From 11f631d63428159a97de897905ccc63d5b31bff7 Mon Sep 17 00:00:00 2001 From: Benjamin Guillet Date: Wed, 13 Jan 2016 16:59:57 -0800 Subject: [PATCH 1/5] Switch to Go1.5 vendoring. --- Godeps/Godeps.json | 172 -- Godeps/Readme | 5 - Godeps/_workspace/.gitignore | 2 - .../src/github.com/BurntSushi/toml/.gitignore | 5 - .../github.com/BurntSushi/toml/.travis.yml | 12 - .../toml/cmd/toml-test-decoder/COPYING | 14 - .../toml/cmd/toml-test-decoder/README.md | 14 - .../toml/cmd/toml-test-decoder/main.go | 90 - .../toml/cmd/toml-test-encoder/COPYING | 14 - .../toml/cmd/toml-test-encoder/README.md | 14 - .../toml/cmd/toml-test-encoder/main.go | 131 - .../BurntSushi/toml/cmd/tomlv/COPYING | 14 - .../BurntSushi/toml/cmd/tomlv/README.md | 22 - .../BurntSushi/toml/cmd/tomlv/main.go | 61 - .../github.com/bmizerany/assert/.gitignore | 7 - .../bmizerany/assert/example/point.go | 5 - .../bmizerany/assert/example/point_test.go | 13 - .../github.com/codegangsta/cli/.travis.yml | 6 - .../cli/autocomplete/bash_autocomplete | 13 - .../cli/autocomplete/zsh_autocomplete | 5 - .../src/github.com/BurntSushi/toml/.gitignore | 5 - .../github.com/BurntSushi/toml/.travis.yml | 12 - .../src/github.com/BurntSushi/toml/COMPATIBLE | 3 - .../src/github.com/BurntSushi/toml/COPYING | 14 - .../src/github.com/BurntSushi/toml/Makefile | 19 - .../src/github.com/BurntSushi/toml/README.md | 220 -- .../toml/cmd/toml-test-decoder/COPYING | 14 - .../toml/cmd/toml-test-decoder/README.md | 14 - .../toml/cmd/toml-test-decoder/main.go | 90 - .../toml/cmd/toml-test-encoder/COPYING | 14 - .../toml/cmd/toml-test-encoder/README.md | 14 - .../toml/cmd/toml-test-encoder/main.go | 131 - .../BurntSushi/toml/cmd/tomlv/COPYING | 14 - .../BurntSushi/toml/cmd/tomlv/README.md | 22 - .../BurntSushi/toml/cmd/tomlv/main.go | 61 - .../src/github.com/BurntSushi/toml/decode.go | 492 --- .../github.com/BurntSushi/toml/decode_meta.go | 99 - .../github.com/BurntSushi/toml/decode_test.go | 949 ------ .../src/github.com/BurntSushi/toml/doc.go | 27 - .../src/github.com/BurntSushi/toml/encode.go | 515 ---- .../github.com/BurntSushi/toml/encode_test.go | 506 --- .../BurntSushi/toml/encoding_types.go | 19 - .../BurntSushi/toml/encoding_types_1.1.go | 18 - .../src/github.com/BurntSushi/toml/lex.go | 863 ------ .../src/github.com/BurntSushi/toml/parse.go | 444 --- .../github.com/BurntSushi/toml/session.vim | 1 - .../github.com/BurntSushi/toml/type_check.go | 91 - .../github.com/BurntSushi/toml/type_fields.go | 241 -- .../src/github.com/howeyc/gopass/nix.go | 23 - .../go-sawyer/hypermedia/hypermedia.go | 133 - .../go-sawyer/hypermedia/hypermedia_test.go | 133 - .../jingweno/go-sawyer/mediaheader/decoder.go | 56 - .../go-sawyer/mediaheader/decoder_test.go | 18 - .../go-sawyer/mediaheader/mediaheader.go | 9 - .../go-sawyer/mediatype/encode_test.go | 71 - .../github.com/jingweno/go-sawyer/request.go | 103 - .../jingweno/go-sawyer/request_test.go | 236 -- .../github.com/jingweno/go-sawyer/response.go | 87 - .../github.com/jingweno/go-sawyer/sawyer.go | 89 - .../github.com/jingweno/go-sawyer/script/fmt | 3 - .../github.com/jingweno/go-sawyer/script/test | 3 - .../jtacoma/uritemplates/.gitignore | 22 - .../jtacoma/uritemplates/.gitmodules | 3 - .../jtacoma/uritemplates/.godocdown.md | 10 - .../jtacoma/uritemplates/.travis.yml | 1 - .../octokit/go-octokit/octokit/auth_method.go | 56 - .../go-octokit/octokit/auth_method_test.go | 22 - .../go-octokit/octokit/authorizations.go | 68 - .../go-octokit/octokit/authorizations_test.go | 109 - .../octokit/go-octokit/octokit/client_test.go | 146 - .../octokit/go-octokit/octokit/commits.go | 93 - .../octokit/go-octokit/octokit/error.go | 173 -- .../octokit/go-octokit/octokit/gist.go | 88 - .../go-octokit/octokit/git_trees_test.go | 59 - .../go-octokit/octokit/octokit_test.go | 124 - .../go-octokit/octokit/pull_requests.go | 104 - .../go-octokit/octokit/pull_requests_test.go | 162 - .../octokit/go-octokit/octokit/releases.go | 79 - .../octokit/go-octokit/octokit/request.go | 67 - .../octokit/go-octokit/octokit/result.go | 61 - .../octokit/go-octokit/octokit/root.go | 90 - .../octokit/go-octokit/octokit/root_test.go | 39 - .../go-octokit/octokit/statuses_test.go | 30 - .../octokit/go-octokit/octokit/users_test.go | 113 - .../_workspace/src/gopkg.in/yaml.v1/yamlh.go | 716 ----- .../src/github.com/howeyc/gopass/LICENSE.txt | 13 - .../src/github.com/howeyc/gopass/README.md | 21 - .../src/github.com/howeyc/gopass/bsd.go | 29 - .../src/github.com/howeyc/gopass/pass.go | 44 - .../src/github.com/howeyc/gopass/win.go | 47 - .../inconshreveable/go-update/check/check.go | 209 -- .../src/github.com/jingweno/go-sawyer/LICENSE | 20 - .../github.com/jingweno/go-sawyer/README.md | 48 - .../jingweno/go-sawyer/gopack.config | 9 - .../go-sawyer/mediaheader/mediaheader.go | 9 - .../jingweno/go-sawyer/mediatype/decode.go | 51 - .../go-sawyer/mediatype/decode_test.go | 77 - .../jingweno/go-sawyer/mediatype/encode.go | 53 - .../jingweno/go-sawyer/mediatype/mediatype.go | 134 - .../go-sawyer/mediatype/mediatype_test.go | 100 - .../jingweno/go-sawyer/sawyer_test.go | 132 - .../github.com/jingweno/go-sawyer/script/fmt | 3 - .../github.com/jingweno/go-sawyer/script/test | 3 - .../jtacoma/uritemplates/.gitignore | 22 - .../jtacoma/uritemplates/.gitmodules | 3 - .../jtacoma/uritemplates/.godocdown.md | 10 - .../jtacoma/uritemplates/.travis.yml | 1 - .../github.com/jtacoma/uritemplates/LICENSE | 18 - .../github.com/jtacoma/uritemplates/README.md | 23 - .../jtacoma/uritemplates/uritemplates.go | 359 --- .../jtacoma/uritemplates/uritemplates_test.go | 245 -- .../github.com/kballard/go-shellquote/LICENSE | 19 - .../github.com/kballard/go-shellquote/README | 36 - .../kballard/go-shellquote/both_test.go | 29 - .../github.com/kballard/go-shellquote/doc.go | 3 - .../kballard/go-shellquote/quote.go | 102 - .../kballard/go-shellquote/quote_test.go | 28 - .../kballard/go-shellquote/unquote.go | 144 - .../kballard/go-shellquote/unquote_test.go | 53 - .../src/github.com/kr/binarydist/.gitignore | 1 - .../kr/binarydist/testdata/sample.new | Bin 10000 -> 0 bytes .../kr/binarydist/testdata/sample.old | Bin 11000 -> 0 bytes .../kr/binarydist/testdata/sample.patch | Bin 1090 -> 0 bytes .../src/github.com/kr/pretty/.gitignore | 4 - .../src/github.com/kr/text/colwriter/Readme | 5 - .../github.com/kr/text/colwriter/column.go | 147 - .../kr/text/colwriter/column_test.go | 90 - .../src/github.com/kr/text/mc/Readme | 9 - .../src/github.com/kr/text/mc/mc.go | 62 - .../octokit/go-octokit/octokit/client.go | 141 - .../go-octokit/octokit/commits_test.go | 58 - .../octokit/go-octokit/octokit/emojis.go | 25 - .../octokit/go-octokit/octokit/emojis_test.go | 28 - .../octokit/go-octokit/octokit/error_test.go | 140 - .../octokit/go-octokit/octokit/gists_test.go | 59 - .../octokit/go-octokit/octokit/git_trees.go | 39 - .../octokit/go-octokit/octokit/hyperlink.go | 17 - .../go-octokit/octokit/hyperlink_test.go | 23 - .../octokit/go-octokit/octokit/issues.go | 91 - .../octokit/go-octokit/octokit/issues_test.go | 145 - .../octokit/go-octokit/octokit/octokit.go | 11 - .../go-octokit/octokit/organizations.go | 12 - .../go-octokit/octokit/releases_test.go | 101 - .../go-octokit/octokit/repositories.go | 85 - .../go-octokit/octokit/repositories_test.go | 144 - .../octokit/go-octokit/octokit/response.go | 31 - .../octokit/go-octokit/octokit/result_test.go | 19 - .../octokit/go-octokit/octokit/statuses.go | 41 - .../octokit/go-octokit/octokit/uploads.go | 20 - .../go-octokit/octokit/uploads_test.go | 42 - .../octokit/go-octokit/octokit/users.go | 77 - .../x/crypto/ssh/terminal/terminal.go | 888 ------ .../x/crypto/ssh/terminal/terminal_test.go | 243 -- .../golang.org/x/crypto/ssh/terminal/util.go | 128 - .../x/crypto/ssh/terminal/util_bsd.go | 12 - .../x/crypto/ssh/terminal/util_linux.go | 11 - .../x/crypto/ssh/terminal/util_windows.go | 174 -- .../_workspace/src/gopkg.in/yaml.v1/LICENSE | 188 -- .../src/gopkg.in/yaml.v1/LICENSE.libyaml | 31 - .../_workspace/src/gopkg.in/yaml.v1/README.md | 128 - .../_workspace/src/gopkg.in/yaml.v1/apic.go | 742 ----- .../_workspace/src/gopkg.in/yaml.v1/decode.go | 566 ---- .../src/gopkg.in/yaml.v1/decode_test.go | 703 ----- .../src/gopkg.in/yaml.v1/emitterc.go | 1685 ---------- .../_workspace/src/gopkg.in/yaml.v1/encode.go | 265 -- .../src/gopkg.in/yaml.v1/encode_test.go | 433 --- .../src/gopkg.in/yaml.v1/parserc.go | 1096 ------- .../src/gopkg.in/yaml.v1/readerc.go | 391 --- .../src/gopkg.in/yaml.v1/resolve.go | 190 -- .../src/gopkg.in/yaml.v1/scannerc.go | 2710 ----------------- .../_workspace/src/gopkg.in/yaml.v1/sorter.go | 104 - .../src/gopkg.in/yaml.v1/suite_test.go | 12 - .../src/gopkg.in/yaml.v1/writerc.go | 89 - .../_workspace/src/gopkg.in/yaml.v1/yaml.go | 301 -- .../src/gopkg.in/yaml.v1/yamlprivateh.go | 173 -- deploy.go | 6 +- deploy_test.go | 2 +- github.go | 4 +- updater.go | 4 +- .../bitbucket.org/kardianos/osext/LICENSE | 0 .../bitbucket.org/kardianos/osext/osext.go | 0 .../kardianos/osext/osext_plan9.go | 0 .../kardianos/osext/osext_procfs.go | 0 .../kardianos/osext/osext_sysctl.go | 0 .../kardianos/osext/osext_test.go | 0 .../kardianos/osext/osext_windows.go | 0 .../p}/go-netrc/netrc/example.netrc | 0 .../p}/go-netrc/netrc/netrc.go | 0 .../p}/go-netrc/netrc/netrc_test.go | 0 .../github.com/BurntSushi/toml/COMPATIBLE | 0 .../github.com/BurntSushi/toml/COPYING | 0 .../github.com/BurntSushi/toml/Makefile | 0 .../github.com/BurntSushi/toml/README.md | 0 .../github.com/BurntSushi/toml/decode.go | 0 .../github.com/BurntSushi/toml/decode_meta.go | 0 .../github.com/BurntSushi/toml/decode_test.go | 0 .../github.com/BurntSushi/toml/doc.go | 0 .../github.com/BurntSushi/toml/encode.go | 0 .../github.com/BurntSushi/toml/encode_test.go | 0 .../BurntSushi/toml/encoding_types.go | 0 .../BurntSushi/toml/encoding_types_1.1.go | 0 .../github.com/BurntSushi/toml/lex.go | 0 .../github.com/BurntSushi/toml/parse.go | 0 .../github.com/BurntSushi/toml/session.vim | 0 .../github.com/BurntSushi/toml/type_check.go | 0 .../github.com/BurntSushi/toml/type_fields.go | 0 .../github.com/bmizerany/assert/README.md | 0 .../github.com/bmizerany/assert/assert.go | 2 +- .../bmizerany/assert/assert_test.go | 0 .../github.com/codegangsta/cli/LICENSE | 0 .../github.com/codegangsta/cli/README.md | 0 .../github.com/codegangsta/cli/app.go | 0 .../github.com/codegangsta/cli/app_test.go | 2 +- .../github.com/codegangsta/cli/cli.go | 0 .../github.com/codegangsta/cli/cli_test.go | 2 +- .../github.com/codegangsta/cli/command.go | 0 .../codegangsta/cli/command_test.go | 2 +- .../github.com/codegangsta/cli/context.go | 0 .../codegangsta/cli/context_test.go | 2 +- .../github.com/codegangsta/cli/flag.go | 0 .../github.com/codegangsta/cli/flag_test.go | 2 +- .../github.com/codegangsta/cli/help.go | 0 .../codegangsta/cli/helpers_test.go | 0 .../fhs/go-netrc/netrc/example.netrc | 0 .../github.com/fhs/go-netrc/netrc/netrc.go | 0 .../fhs/go-netrc/netrc/netrc_test.go | 0 .../src/github.com/bmizerany/assert/README.md | 45 + .../src/github.com/bmizerany/assert/assert.go | 77 + .../bmizerany/assert/assert_test.go | 15 + .../github.com/kballard/go-shellquote/LICENSE | 0 .../github.com/kballard/go-shellquote/README | 0 .../kballard/go-shellquote/both_test.go | 0 .../github.com/kballard/go-shellquote/doc.go | 0 .../kballard/go-shellquote/quote.go | 0 .../kballard/go-shellquote/quote_test.go | 0 .../kballard/go-shellquote/unquote.go | 0 .../kballard/go-shellquote/unquote_test.go | 0 .../src/github.com/kr/pretty/License | 0 .../src/github.com/kr/pretty/Readme | 0 .../src/github.com/kr/pretty/diff.go | 0 .../src/github.com/kr/pretty/diff_test.go | 0 .../src/github.com/kr/pretty/example_test.go | 2 +- .../src/github.com/kr/pretty/formatter.go | 2 +- .../github.com/kr/pretty/formatter_test.go | 0 .../src/github.com/kr/pretty/pretty.go | 0 .../src/github.com/kr/pretty/zero.go | 0 .../_workspace/src/github.com/kr/text/License | 0 .../_workspace/src/github.com/kr/text/Readme | 0 .../_workspace/src/github.com/kr/text/doc.go | 0 .../src/github.com/kr/text/indent.go | 0 .../src/github.com/kr/text/indent_test.go | 0 .../_workspace/src/github.com/kr/text/wrap.go | 0 .../src/github.com/kr/text/wrap_test.go | 0 .../github.com/mattn/go-colorable/README.md | 42 + .../mattn/go-colorable/colorable_others.go | 16 + .../mattn/go-colorable/colorable_windows.go | 594 ++++ .../src/github.com/mattn/go-isatty/README.md | 0 .../src/github.com/mattn/go-isatty/doc.go | 0 .../github.com/mattn/go-isatty/isatty_bsd.go | 0 .../mattn/go-isatty/isatty_linux.go | 0 .../mattn/go-isatty/isatty_windows.go | 0 .../github.com/github/hub/cmd/cmd.go | 18 +- .../github.com/github/hub/cmd/cmd_test.go | 2 +- .../github/hub/fixtures/fixtures.go | 14 + vendor/github.com/github/hub/fixtures/gh.zip | Bin 0 -> 246 bytes .../github/hub/fixtures/test_configs.go | 43 + .../github/hub/fixtures/test_repo.go | 93 + .../github.com/github/hub/git/git.go | 2 +- .../github.com/github/hub/git/git_test.go | 2 +- .../github.com/github/hub/git/ssh_config.go | 0 .../github/hub/git/ssh_config_test.go | 2 +- .../github.com/github/hub/git/url.go | 0 .../github.com/github/hub/git/url_test.go | 2 +- .../github.com/github/hub/github/branch.go | 2 +- .../github/hub/github/branch_test.go | 2 +- .../github.com/github/hub/github/client.go | 2 +- .../github/hub/github/client_test.go | 4 +- .../github.com/github/hub/github/config.go | 6 +- .../github/hub/github/config_decoder.go | 4 +- .../github/hub/github/config_encoder.go | 4 +- .../github/hub/github/config_service.go | 0 .../github/hub/github/config_service_test.go | 2 +- .../github/hub/github/crash_report.go | 6 +- .../github/hub/github/crash_report_test.go | 2 +- .../github.com/github/hub/github/editor.go | 4 +- .../github/hub/github/editor_test.go | 2 +- .../github.com/github/hub/github/hosts.go | 2 +- .../github.com/github/hub/github/http.go | 2 +- .../github.com/github/hub/github/http_test.go | 2 +- .../github.com/github/hub/github/localrepo.go | 2 +- .../github/hub/github/localrepo_test.go | 2 +- .../github.com/github/hub/github/project.go | 4 +- .../github/hub/github/project_test.go | 2 +- .../github.com/github/hub/github/remote.go | 2 +- .../github.com/github/hub/github/url.go | 0 .../github.com/github/hub/github/url_test.go | 2 +- .../github.com/github/hub/github/util.go | 4 +- .../github.com/github/hub/ui/ui.go | 13 +- .../github.com/github/hub/utils/utils.go | 2 +- .../github.com/github/hub/utils/utils_test.go | 2 +- .../google/go-github/github/activity.go | 0 .../go-github/github/activity_events.go | 0 .../go-github/github/activity_events_test.go | 0 .../github/activity_notifications.go | 0 .../github/activity_notifications_test.go | 0 .../google/go-github/github/activity_star.go | 0 .../go-github/github/activity_star_test.go | 0 .../go-github/github/activity_watching.go | 0 .../github/activity_watching_test.go | 0 .../github.com/google/go-github/github/doc.go | 0 .../google/go-github/github/gists.go | 0 .../google/go-github/github/gists_comments.go | 0 .../go-github/github/gists_comments_test.go | 0 .../google/go-github/github/gists_test.go | 0 .../github.com/google/go-github/github/git.go | 0 .../google/go-github/github/git_blobs.go | 0 .../google/go-github/github/git_blobs_test.go | 0 .../google/go-github/github/git_commits.go | 0 .../go-github/github/git_commits_test.go | 0 .../google/go-github/github/git_refs.go | 0 .../google/go-github/github/git_refs_test.go | 0 .../google/go-github/github/git_tags.go | 0 .../google/go-github/github/git_tags_test.go | 0 .../google/go-github/github/git_trees.go | 0 .../google/go-github/github/git_trees_test.go | 0 .../google/go-github/github/github.go | 2 +- .../google/go-github/github/github_test.go | 0 .../google/go-github/github/gitignore.go | 0 .../google/go-github/github/gitignore_test.go | 0 .../google/go-github/github/issues.go | 0 .../go-github/github/issues_assignees.go | 0 .../go-github/github/issues_assignees_test.go | 0 .../go-github/github/issues_comments.go | 0 .../go-github/github/issues_comments_test.go | 0 .../google/go-github/github/issues_events.go | 0 .../go-github/github/issues_events_test.go | 0 .../google/go-github/github/issues_labels.go | 0 .../go-github/github/issues_labels_test.go | 0 .../go-github/github/issues_milestones.go | 0 .../github/issues_milestones_test.go | 0 .../google/go-github/github/issues_test.go | 0 .../google/go-github/github/misc.go | 0 .../google/go-github/github/misc_test.go | 0 .../google/go-github/github/orgs.go | 0 .../google/go-github/github/orgs_members.go | 0 .../go-github/github/orgs_members_test.go | 0 .../google/go-github/github/orgs_teams.go | 0 .../go-github/github/orgs_teams_test.go | 0 .../google/go-github/github/orgs_test.go | 0 .../google/go-github/github/pulls.go | 0 .../google/go-github/github/pulls_comments.go | 0 .../go-github/github/pulls_comments_test.go | 0 .../google/go-github/github/pulls_test.go | 0 .../google/go-github/github/repos.go | 0 .../go-github/github/repos_collaborators.go | 0 .../github/repos_collaborators_test.go | 0 .../google/go-github/github/repos_comments.go | 0 .../go-github/github/repos_comments_test.go | 0 .../google/go-github/github/repos_commits.go | 0 .../go-github/github/repos_commits_test.go | 0 .../google/go-github/github/repos_contents.go | 0 .../go-github/github/repos_contents_test.go | 0 .../go-github/github/repos_deployments.go | 0 .../github/repos_deployments_test.go | 0 .../google/go-github/github/repos_forks.go | 0 .../go-github/github/repos_forks_test.go | 0 .../google/go-github/github/repos_hooks.go | 0 .../go-github/github/repos_hooks_test.go | 0 .../google/go-github/github/repos_keys.go | 0 .../go-github/github/repos_keys_test.go | 0 .../google/go-github/github/repos_merging.go | 0 .../go-github/github/repos_merging_test.go | 0 .../google/go-github/github/repos_pages.go | 0 .../go-github/github/repos_pages_test.go | 0 .../google/go-github/github/repos_releases.go | 0 .../go-github/github/repos_releases_test.go | 0 .../google/go-github/github/repos_stats.go | 0 .../go-github/github/repos_stats_test.go | 0 .../google/go-github/github/repos_statuses.go | 0 .../go-github/github/repos_statuses_test.go | 0 .../google/go-github/github/repos_test.go | 0 .../google/go-github/github/search.go | 2 +- .../google/go-github/github/search_test.go | 0 .../google/go-github/github/strings.go | 0 .../google/go-github/github/strings_test.go | 0 .../google/go-github/github/timestamp.go | 0 .../google/go-github/github/timestamp_test.go | 0 .../google/go-github/github/users.go | 0 .../go-github/github/users_administration.go | 0 .../github/users_administration_test.go | 0 .../google/go-github/github/users_emails.go | 0 .../go-github/github/users_emails_test.go | 0 .../go-github/github/users_followers.go | 0 .../go-github/github/users_followers_test.go | 0 .../google/go-github/github/users_keys.go | 0 .../go-github/github/users_keys_test.go | 0 .../google/go-github/github/users_test.go | 0 .../google/go-querystring/query/encode.go | 0 .../go-querystring/query/encode_test.go | 0 .../github.com/howeyc/gopass/LICENSE.txt | 0 .../github.com/howeyc/gopass/README.md | 0 .../github.com/howeyc/gopass/bsd.go | 0 .../github.com/howeyc/gopass/nix.go | 2 +- .../github.com/howeyc/gopass/pass.go | 0 .../github.com/howeyc/gopass/win.go | 0 .../inconshreveable/go-update/LICENSE | 0 .../inconshreveable/go-update/README.md | 0 .../go-update/download/download.go | 0 .../inconshreveable/go-update/hide_noop.go | 0 .../inconshreveable/go-update/hide_windows.go | 0 .../inconshreveable/go-update/update.go | 6 +- .../inconshreveable/go-update/update_test.go | 2 +- .../github.com/jingweno/go-sawyer/LICENSE | 0 .../github.com/jingweno/go-sawyer/README.md | 0 .../jingweno/go-sawyer/gopack.config | 0 .../go-sawyer/hypermedia/hypermedia.go | 2 +- .../go-sawyer/hypermedia/hypermedia_test.go | 2 +- .../jingweno/go-sawyer/mediaheader/decoder.go | 2 +- .../go-sawyer/mediaheader/decoder_test.go | 2 +- .../go-sawyer/mediaheader/mediaheader.go | 9 + .../jingweno/go-sawyer/mediatype/decode.go | 0 .../go-sawyer/mediatype/decode_test.go | 2 +- .../jingweno/go-sawyer/mediatype/encode.go | 0 .../go-sawyer/mediatype/encode_test.go | 2 +- .../jingweno/go-sawyer/mediatype/mediatype.go | 0 .../go-sawyer/mediatype/mediatype_test.go | 2 +- .../github.com/jingweno/go-sawyer/request.go | 4 +- .../jingweno/go-sawyer/request_test.go | 4 +- .../github.com/jingweno/go-sawyer/response.go | 4 +- .../github.com/jingweno/go-sawyer/sawyer.go | 2 +- .../jingweno/go-sawyer/sawyer_test.go | 4 +- .../github.com/jtacoma/uritemplates/LICENSE | 0 .../github.com/jtacoma/uritemplates/README.md | 0 .../jtacoma/uritemplates/uritemplates.go | 0 .../jtacoma/uritemplates/uritemplates_test.go | 0 .../github.com/kr/binarydist/License | 0 .../github.com/kr/binarydist/Readme.md | 0 .../github.com/kr/binarydist/bzip2.go | 0 .../github.com/kr/binarydist/common_test.go | 0 .../github.com/kr/binarydist/diff.go | 0 .../github.com/kr/binarydist/diff_test.go | 0 .../github.com/kr/binarydist/doc.go | 0 .../github.com/kr/binarydist/encoding.go | 0 .../github.com/kr/binarydist/patch.go | 0 .../github.com/kr/binarydist/patch_test.go | 0 .../github.com/kr/binarydist/seek.go | 0 .../github.com/kr/binarydist/sort_test.go | 0 vendor/github.com/kr/pretty/License | 21 + vendor/github.com/kr/pretty/Readme | 9 + vendor/github.com/kr/pretty/diff.go | 158 + vendor/github.com/kr/pretty/diff_test.go | 74 + vendor/github.com/kr/pretty/example_test.go | 20 + vendor/github.com/kr/pretty/formatter.go | 337 ++ vendor/github.com/kr/pretty/formatter_test.go | 261 ++ vendor/github.com/kr/pretty/pretty.go | 98 + vendor/github.com/kr/pretty/zero.go | 41 + vendor/github.com/kr/text/License | 19 + vendor/github.com/kr/text/Readme | 3 + vendor/github.com/kr/text/doc.go | 3 + vendor/github.com/kr/text/indent.go | 74 + vendor/github.com/kr/text/indent_test.go | 119 + vendor/github.com/kr/text/wrap.go | 86 + vendor/github.com/kr/text/wrap_test.go | 44 + .../github.com/mattn/go-isatty/README.md | 0 .../github.com/mattn/go-isatty/doc.go | 0 .../github.com/mattn/go-isatty/isatty_bsd.go | 0 .../mattn/go-isatty/isatty_linux.go | 0 .../mattn/go-isatty/isatty_windows.go | 0 .../octokit/go-octokit/octokit/auth_method.go | 2 +- .../go-octokit/octokit/auth_method_test.go | 2 +- .../go-octokit/octokit/authorizations.go | 2 +- .../go-octokit/octokit/authorizations_test.go | 2 +- .../octokit/go-octokit/octokit/client.go | 4 +- .../octokit/go-octokit/octokit/client_test.go | 2 +- .../octokit/go-octokit/octokit/commits.go | 2 +- .../go-octokit/octokit/commits_test.go | 2 +- .../octokit/go-octokit/octokit/emojis.go | 0 .../octokit/go-octokit/octokit/emojis_test.go | 2 +- .../octokit/go-octokit/octokit/error.go | 2 +- .../octokit/go-octokit/octokit/error_test.go | 2 +- .../octokit/go-octokit/octokit/gist.go | 2 +- .../octokit/go-octokit/octokit/gists_test.go | 2 +- .../octokit/go-octokit/octokit/git_trees.go | 0 .../go-octokit/octokit/git_trees_test.go | 2 +- .../octokit/go-octokit/octokit/hyperlink.go | 2 +- .../go-octokit/octokit/hyperlink_test.go | 2 +- .../octokit/go-octokit/octokit/issues.go | 2 +- .../octokit/go-octokit/octokit/issues_test.go | 2 +- .../octokit/go-octokit/octokit/octokit.go | 0 .../go-octokit/octokit/octokit_test.go | 2 +- .../go-octokit/octokit/organizations.go | 0 .../go-octokit/octokit/pull_requests.go | 2 +- .../go-octokit/octokit/pull_requests_test.go | 2 +- .../octokit/go-octokit/octokit/releases.go | 2 +- .../go-octokit/octokit/releases_test.go | 2 +- .../go-octokit/octokit/repositories.go | 2 +- .../go-octokit/octokit/repositories_test.go | 2 +- .../octokit/go-octokit/octokit/request.go | 4 +- .../octokit/go-octokit/octokit/response.go | 6 +- .../octokit/go-octokit/octokit/result.go | 2 +- .../octokit/go-octokit/octokit/result_test.go | 6 +- .../octokit/go-octokit/octokit/root.go | 2 +- .../octokit/go-octokit/octokit/root_test.go | 2 +- .../octokit/go-octokit/octokit/statuses.go | 2 +- .../go-octokit/octokit/statuses_test.go | 2 +- .../octokit/go-octokit/octokit/uploads.go | 0 .../go-octokit/octokit/uploads_test.go | 2 +- .../octokit/go-octokit/octokit/users.go | 2 +- .../octokit/go-octokit/octokit/users_test.go | 2 +- .../github.com/ogier/pflag/LICENSE | 0 .../github.com/ogier/pflag/README.md | 0 .../github.com/ogier/pflag/bool.go | 0 .../github.com/ogier/pflag/duration.go | 0 .../github.com/ogier/pflag/example_test.go | 2 +- .../github.com/ogier/pflag/export_test.go | 0 .../github.com/ogier/pflag/flag.go | 0 .../github.com/ogier/pflag/flag_test.go | 2 +- .../github.com/ogier/pflag/float32.go | 0 .../github.com/ogier/pflag/float64.go | 0 .../github.com/ogier/pflag/int.go | 0 .../github.com/ogier/pflag/int32.go | 0 .../github.com/ogier/pflag/int64.go | 0 .../github.com/ogier/pflag/int8.go | 0 .../github.com/ogier/pflag/ip.go | 0 .../github.com/ogier/pflag/ipmask.go | 0 .../github.com/ogier/pflag/string.go | 0 .../github.com/ogier/pflag/uint.go | 0 .../github.com/ogier/pflag/uint16.go | 0 .../github.com/ogier/pflag/uint32.go | 0 .../github.com/ogier/pflag/uint64.go | 0 .../github.com/ogier/pflag/uint8.go | 0 .../x/crypto/ssh/terminal/terminal.go | 0 .../x/crypto/ssh/terminal/terminal_test.go | 0 .../golang.org/x/crypto/ssh/terminal/util.go | 0 .../x/crypto/ssh/terminal/util_bsd.go | 0 .../x/crypto/ssh/terminal/util_linux.go | 0 .../x/crypto/ssh/terminal/util_windows.go | 0 vendor/gopkg.in/check.v1/LICENSE | 25 + vendor/gopkg.in/check.v1/README.md | 20 + vendor/gopkg.in/check.v1/TODO | 2 + vendor/gopkg.in/check.v1/benchmark.go | 187 ++ vendor/gopkg.in/check.v1/benchmark_test.go | 91 + vendor/gopkg.in/check.v1/bootstrap_test.go | 82 + vendor/gopkg.in/check.v1/check.go | 873 ++++++ vendor/gopkg.in/check.v1/check_test.go | 207 ++ vendor/gopkg.in/check.v1/checkers.go | 458 +++ vendor/gopkg.in/check.v1/checkers_test.go | 272 ++ vendor/gopkg.in/check.v1/export_test.go | 19 + vendor/gopkg.in/check.v1/fixture_test.go | 484 +++ vendor/gopkg.in/check.v1/foundation_test.go | 335 ++ vendor/gopkg.in/check.v1/helpers.go | 231 ++ vendor/gopkg.in/check.v1/helpers_test.go | 519 ++++ vendor/gopkg.in/check.v1/printer.go | 168 + vendor/gopkg.in/check.v1/printer_test.go | 104 + vendor/gopkg.in/check.v1/reporter.go | 88 + vendor/gopkg.in/check.v1/reporter_test.go | 159 + vendor/gopkg.in/check.v1/run.go | 175 ++ vendor/gopkg.in/check.v1/run_test.go | 419 +++ .../src => vendor}/gopkg.in/yaml.v1/LICENSE | 0 .../gopkg.in/yaml.v1/LICENSE.libyaml | 0 .../src => vendor}/gopkg.in/yaml.v1/README.md | 0 .../src => vendor}/gopkg.in/yaml.v1/apic.go | 0 .../src => vendor}/gopkg.in/yaml.v1/decode.go | 0 .../gopkg.in/yaml.v1/decode_test.go | 2 +- .../gopkg.in/yaml.v1/emitterc.go | 0 .../src => vendor}/gopkg.in/yaml.v1/encode.go | 0 .../gopkg.in/yaml.v1/encode_test.go | 2 +- .../gopkg.in/yaml.v1/parserc.go | 0 .../gopkg.in/yaml.v1/readerc.go | 0 .../gopkg.in/yaml.v1/resolve.go | 0 .../gopkg.in/yaml.v1/scannerc.go | 0 .../src => vendor}/gopkg.in/yaml.v1/sorter.go | 0 .../gopkg.in/yaml.v1/suite_test.go | 0 .../gopkg.in/yaml.v1/writerc.go | 0 .../src => vendor}/gopkg.in/yaml.v1/yaml.go | 0 .../src => vendor}/gopkg.in/yaml.v1/yamlh.go | 2 +- .../gopkg.in/yaml.v1/yamlprivateh.go | 0 vendor/vendor.json | 176 ++ 578 files changed, 7562 insertions(+), 24085 deletions(-) delete mode 100644 Godeps/Godeps.json delete mode 100644 Godeps/Readme delete mode 100644 Godeps/_workspace/.gitignore delete mode 100644 Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore delete mode 100644 Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml delete mode 100644 Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING delete mode 100644 Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md delete mode 100644 Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go delete mode 100644 Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING delete mode 100644 Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md delete mode 100644 Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go delete mode 100644 Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/COPYING delete mode 100644 Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md delete mode 100644 Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go delete mode 100644 Godeps/_workspace/src/github.com/bmizerany/assert/.gitignore delete mode 100644 Godeps/_workspace/src/github.com/bmizerany/assert/example/point.go delete mode 100644 Godeps/_workspace/src/github.com/bmizerany/assert/example/point_test.go delete mode 100644 Godeps/_workspace/src/github.com/codegangsta/cli/.travis.yml delete mode 100644 Godeps/_workspace/src/github.com/codegangsta/cli/autocomplete/bash_autocomplete delete mode 100644 Godeps/_workspace/src/github.com/codegangsta/cli/autocomplete/zsh_autocomplete delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/COMPATIBLE delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/COPYING delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/Makefile delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/README.md delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/COPYING delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/decode.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/decode_meta.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/decode_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/doc.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encode.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encode_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types_1.1.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/lex.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/parse.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/session.vim delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/type_check.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/type_fields.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/nix.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/mediaheader.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/request.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/request_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/response.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/fmt delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/test delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitignore delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitmodules delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.godocdown.md delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.travis.yml delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gist.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/request.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users_test.go delete mode 100644 Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/yamlh.go delete mode 100644 Godeps/_workspace/src/github.com/howeyc/gopass/LICENSE.txt delete mode 100644 Godeps/_workspace/src/github.com/howeyc/gopass/README.md delete mode 100644 Godeps/_workspace/src/github.com/howeyc/gopass/bsd.go delete mode 100644 Godeps/_workspace/src/github.com/howeyc/gopass/pass.go delete mode 100644 Godeps/_workspace/src/github.com/howeyc/gopass/win.go delete mode 100644 Godeps/_workspace/src/github.com/inconshreveable/go-update/check/check.go delete mode 100644 Godeps/_workspace/src/github.com/jingweno/go-sawyer/LICENSE delete mode 100644 Godeps/_workspace/src/github.com/jingweno/go-sawyer/README.md delete mode 100644 Godeps/_workspace/src/github.com/jingweno/go-sawyer/gopack.config delete mode 100644 Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/mediaheader.go delete mode 100644 Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode.go delete mode 100644 Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode_test.go delete mode 100644 Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode.go delete mode 100644 Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype.go delete mode 100644 Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype_test.go delete mode 100644 Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer_test.go delete mode 100644 Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/fmt delete mode 100644 Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/test delete mode 100644 Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitignore delete mode 100644 Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitmodules delete mode 100644 Godeps/_workspace/src/github.com/jtacoma/uritemplates/.godocdown.md delete mode 100644 Godeps/_workspace/src/github.com/jtacoma/uritemplates/.travis.yml delete mode 100644 Godeps/_workspace/src/github.com/jtacoma/uritemplates/LICENSE delete mode 100644 Godeps/_workspace/src/github.com/jtacoma/uritemplates/README.md delete mode 100644 Godeps/_workspace/src/github.com/jtacoma/uritemplates/uritemplates.go delete mode 100644 Godeps/_workspace/src/github.com/jtacoma/uritemplates/uritemplates_test.go delete mode 100644 Godeps/_workspace/src/github.com/kballard/go-shellquote/LICENSE delete mode 100644 Godeps/_workspace/src/github.com/kballard/go-shellquote/README delete mode 100644 Godeps/_workspace/src/github.com/kballard/go-shellquote/both_test.go delete mode 100644 Godeps/_workspace/src/github.com/kballard/go-shellquote/doc.go delete mode 100644 Godeps/_workspace/src/github.com/kballard/go-shellquote/quote.go delete mode 100644 Godeps/_workspace/src/github.com/kballard/go-shellquote/quote_test.go delete mode 100644 Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote.go delete mode 100644 Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote_test.go delete mode 100644 Godeps/_workspace/src/github.com/kr/binarydist/.gitignore delete mode 100644 Godeps/_workspace/src/github.com/kr/binarydist/testdata/sample.new delete mode 100644 Godeps/_workspace/src/github.com/kr/binarydist/testdata/sample.old delete mode 100644 Godeps/_workspace/src/github.com/kr/binarydist/testdata/sample.patch delete mode 100644 Godeps/_workspace/src/github.com/kr/pretty/.gitignore delete mode 100644 Godeps/_workspace/src/github.com/kr/text/colwriter/Readme delete mode 100644 Godeps/_workspace/src/github.com/kr/text/colwriter/column.go delete mode 100644 Godeps/_workspace/src/github.com/kr/text/colwriter/column_test.go delete mode 100644 Godeps/_workspace/src/github.com/kr/text/mc/Readme delete mode 100644 Godeps/_workspace/src/github.com/kr/text/mc/mc.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits_test.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis_test.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error_test.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gists_test.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink_test.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues_test.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/organizations.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases_test.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories_test.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/response.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result_test.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads_test.go delete mode 100644 Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users.go delete mode 100644 Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/terminal.go delete mode 100644 Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/terminal_test.go delete mode 100644 Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util.go delete mode 100644 Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_bsd.go delete mode 100644 Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_linux.go delete mode 100644 Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_windows.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/LICENSE delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/LICENSE.libyaml delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/README.md delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/apic.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/decode.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/decode_test.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/emitterc.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/encode.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/encode_test.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/parserc.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/readerc.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/resolve.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/scannerc.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/sorter.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/suite_test.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/writerc.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/yaml.go delete mode 100644 Godeps/_workspace/src/gopkg.in/yaml.v1/yamlprivateh.go rename {Godeps/_workspace/src => vendor}/bitbucket.org/kardianos/osext/LICENSE (100%) rename {Godeps/_workspace/src => vendor}/bitbucket.org/kardianos/osext/osext.go (100%) rename {Godeps/_workspace/src => vendor}/bitbucket.org/kardianos/osext/osext_plan9.go (100%) rename {Godeps/_workspace/src => vendor}/bitbucket.org/kardianos/osext/osext_procfs.go (100%) rename {Godeps/_workspace/src => vendor}/bitbucket.org/kardianos/osext/osext_sysctl.go (100%) rename {Godeps/_workspace/src => vendor}/bitbucket.org/kardianos/osext/osext_test.go (100%) rename {Godeps/_workspace/src => vendor}/bitbucket.org/kardianos/osext/osext_windows.go (100%) rename {Godeps/_workspace/src/github.com/fhs => vendor/code.google.com/p}/go-netrc/netrc/example.netrc (100%) rename {Godeps/_workspace/src/github.com/fhs => vendor/code.google.com/p}/go-netrc/netrc/netrc.go (100%) rename {Godeps/_workspace/src/github.com/fhs => vendor/code.google.com/p}/go-netrc/netrc/netrc_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/COMPATIBLE (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/COPYING (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/Makefile (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/README.md (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/decode.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/decode_meta.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/decode_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/doc.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/encode.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/encode_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/encoding_types.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/encoding_types_1.1.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/lex.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/parse.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/session.vim (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/type_check.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/BurntSushi/toml/type_fields.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/bmizerany/assert/README.md (100%) rename {Godeps/_workspace/src => vendor}/github.com/bmizerany/assert/assert.go (95%) rename {Godeps/_workspace/src => vendor}/github.com/bmizerany/assert/assert_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/LICENSE (100%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/README.md (100%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/app.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/app_test.go (99%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/cli.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/cli_test.go (96%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/command.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/command_test.go (92%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/context.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/context_test.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/flag.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/flag_test.go (99%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/help.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/codegangsta/cli/helpers_test.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/fhs/go-netrc/netrc/example.netrc (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/fhs/go-netrc/netrc/netrc.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/fhs/go-netrc/netrc/netrc_test.go (100%) create mode 100644 vendor/github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert/README.md create mode 100644 vendor/github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert/assert.go create mode 100644 vendor/github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert/assert_test.go rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/LICENSE (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/README (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/both_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/doc.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote_test.go (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/pretty/License (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/pretty/Readme (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/pretty/diff.go (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/pretty/diff_test.go (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/pretty/example_test.go (79%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/pretty/formatter.go (99%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/pretty/formatter_test.go (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/pretty/pretty.go (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/pretty/zero.go (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/text/License (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/text/Readme (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/text/doc.go (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/text/indent.go (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/text/indent_test.go (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/text/wrap.go (100%) rename {Godeps => vendor/github.com/github/hub/Godeps}/_workspace/src/github.com/kr/text/wrap_test.go (100%) create mode 100644 vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable/README.md create mode 100644 vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable/colorable_others.go create mode 100644 vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable/colorable_windows.go rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/README.md (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/doc.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_bsd.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_linux.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_windows.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/cmd/cmd.go (79%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/cmd/cmd_test.go (84%) create mode 100644 vendor/github.com/github/hub/fixtures/fixtures.go create mode 100644 vendor/github.com/github/hub/fixtures/gh.zip create mode 100644 vendor/github.com/github/hub/fixtures/test_configs.go create mode 100644 vendor/github.com/github/hub/fixtures/test_repo.go rename {Godeps/_workspace/src => vendor}/github.com/github/hub/git/git.go (98%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/git/git_test.go (95%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/git/ssh_config.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/git/ssh_config_test.go (83%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/git/url.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/git/url_test.go (96%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/branch.go (95%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/branch_test.go (90%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/client.go (99%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/client_test.go (91%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/config.go (94%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/config_decoder.go (83%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/config_encoder.go (82%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/config_service.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/config_service_test.go (96%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/crash_report.go (92%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/crash_report_test.go (96%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/editor.go (94%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/editor_test.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/hosts.go (90%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/http.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/http_test.go (94%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/localrepo.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/localrepo_test.go (90%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/project.go (95%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/project_test.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/remote.go (94%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/url.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/url_test.go (90%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/github/util.go (67%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/ui/ui.go (75%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/utils/utils.go (94%) rename {Godeps/_workspace/src => vendor}/github.com/github/hub/utils/utils_test.go (81%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/activity.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/activity_events.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/activity_events_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/activity_notifications.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/activity_notifications_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/activity_star.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/activity_star_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/activity_watching.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/activity_watching_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/doc.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/gists.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/gists_comments.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/gists_comments_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/gists_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/git.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/git_blobs.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/git_blobs_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/git_commits.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/git_commits_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/git_refs.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/git_refs_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/git_tags.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/git_tags_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/git_trees.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/git_trees_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/github.go (99%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/github_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/gitignore.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/gitignore_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/issues.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/issues_assignees.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/issues_assignees_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/issues_comments.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/issues_comments_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/issues_events.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/issues_events_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/issues_labels.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/issues_labels_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/issues_milestones.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/issues_milestones_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/issues_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/misc.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/misc_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/orgs.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/orgs_members.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/orgs_members_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/orgs_teams.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/orgs_teams_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/orgs_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/pulls.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/pulls_comments.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/pulls_comments_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/pulls_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_collaborators.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_collaborators_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_comments.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_comments_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_commits.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_commits_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_contents.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_contents_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_deployments.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_deployments_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_forks.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_forks_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_hooks.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_hooks_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_keys.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_keys_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_merging.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_merging_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_pages.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_pages_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_releases.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_releases_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_stats.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_stats_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_statuses.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_statuses_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/repos_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/search.go (98%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/search_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/strings.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/strings_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/timestamp.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/timestamp_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/users.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/users_administration.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/users_administration_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/users_emails.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/users_emails_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/users_followers.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/users_followers_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/users_keys.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/users_keys_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-github/github/users_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-querystring/query/encode.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/google/go-querystring/query/encode_test.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/howeyc/gopass/LICENSE.txt (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/howeyc/gopass/README.md (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/howeyc/gopass/bsd.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/howeyc/gopass/nix.go (78%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/howeyc/gopass/pass.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/howeyc/gopass/win.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/inconshreveable/go-update/LICENSE (100%) rename {Godeps/_workspace/src => vendor}/github.com/inconshreveable/go-update/README.md (100%) rename {Godeps/_workspace/src => vendor}/github.com/inconshreveable/go-update/download/download.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/inconshreveable/go-update/hide_noop.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/inconshreveable/go-update/hide_windows.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/inconshreveable/go-update/update.go (98%) rename {Godeps/_workspace/src => vendor}/github.com/inconshreveable/go-update/update_test.go (99%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/LICENSE (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/README.md (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/gopack.config (100%) rename {Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/hypermedia/hypermedia.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/hypermedia/hypermedia_test.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/mediaheader/decoder.go (92%) rename {Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/mediaheader/decoder_test.go (87%) create mode 100644 vendor/github.com/jingweno/go-sawyer/mediaheader/mediaheader.go rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/mediatype/decode.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/mediatype/decode_test.go (94%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/mediatype/encode.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/mediatype/encode_test.go (94%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/mediatype/mediatype.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/mediatype/mediatype_test.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/request.go (91%) rename {Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/request_test.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/response.go (89%) rename {Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/sawyer.go (95%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jingweno/go-sawyer/sawyer_test.go (94%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jtacoma/uritemplates/LICENSE (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jtacoma/uritemplates/README.md (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jtacoma/uritemplates/uritemplates.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/jtacoma/uritemplates/uritemplates_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/kr/binarydist/License (100%) rename {Godeps/_workspace/src => vendor}/github.com/kr/binarydist/Readme.md (100%) rename {Godeps/_workspace/src => vendor}/github.com/kr/binarydist/bzip2.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/kr/binarydist/common_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/kr/binarydist/diff.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/kr/binarydist/diff_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/kr/binarydist/doc.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/kr/binarydist/encoding.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/kr/binarydist/patch.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/kr/binarydist/patch_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/kr/binarydist/seek.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/kr/binarydist/sort_test.go (100%) create mode 100644 vendor/github.com/kr/pretty/License create mode 100644 vendor/github.com/kr/pretty/Readme create mode 100644 vendor/github.com/kr/pretty/diff.go create mode 100644 vendor/github.com/kr/pretty/diff_test.go create mode 100644 vendor/github.com/kr/pretty/example_test.go create mode 100644 vendor/github.com/kr/pretty/formatter.go create mode 100644 vendor/github.com/kr/pretty/formatter_test.go create mode 100644 vendor/github.com/kr/pretty/pretty.go create mode 100644 vendor/github.com/kr/pretty/zero.go create mode 100644 vendor/github.com/kr/text/License create mode 100644 vendor/github.com/kr/text/Readme create mode 100644 vendor/github.com/kr/text/doc.go create mode 100644 vendor/github.com/kr/text/indent.go create mode 100644 vendor/github.com/kr/text/indent_test.go create mode 100644 vendor/github.com/kr/text/wrap.go create mode 100644 vendor/github.com/kr/text/wrap_test.go rename {Godeps/_workspace/src => vendor}/github.com/mattn/go-isatty/README.md (100%) rename {Godeps/_workspace/src => vendor}/github.com/mattn/go-isatty/doc.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/mattn/go-isatty/isatty_bsd.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/mattn/go-isatty/isatty_linux.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/mattn/go-isatty/isatty_windows.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/auth_method.go (93%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/auth_method_test.go (87%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/authorizations.go (94%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/authorizations_test.go (97%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/client.go (95%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/client_test.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/commits.go (96%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/commits_test.go (95%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/emojis.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/emojis_test.go (89%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/error.go (98%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/error_test.go (98%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/gist.go (96%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/gists_test.go (95%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/git_trees.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/git_trees_test.go (95%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/hyperlink.go (73%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/hyperlink_test.go (87%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/issues.go (96%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/issues_test.go (98%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/octokit.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/octokit_test.go (97%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/organizations.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/pull_requests.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/pull_requests_test.go (98%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/releases.go (96%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/releases_test.go (97%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/repositories.go (96%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/repositories_test.go (98%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/request.go (90%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/response.go (65%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/result.go (91%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/result_test.go (60%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/root.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/root_test.go (91%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/statuses.go (90%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/statuses_test.go (91%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/uploads.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/uploads_test.go (92%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/users.go (96%) rename {Godeps/_workspace/src => vendor}/github.com/octokit/go-octokit/octokit/users_test.go (97%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/LICENSE (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/README.md (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/bool.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/duration.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/example_test.go (96%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/export_test.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/flag.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/flag_test.go (99%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/float32.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/float64.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/int.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/int32.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/int64.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/int8.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/ip.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/ipmask.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/string.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/uint.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/uint16.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/uint32.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/uint64.go (100%) rename {Godeps/_workspace/src => vendor}/github.com/ogier/pflag/uint8.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/golang.org/x/crypto/ssh/terminal/terminal.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/golang.org/x/crypto/ssh/terminal/terminal_test.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/golang.org/x/crypto/ssh/terminal/util.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/golang.org/x/crypto/ssh/terminal/util_bsd.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/golang.org/x/crypto/ssh/terminal/util_linux.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/golang.org/x/crypto/ssh/terminal/util_windows.go (100%) create mode 100644 vendor/gopkg.in/check.v1/LICENSE create mode 100644 vendor/gopkg.in/check.v1/README.md create mode 100644 vendor/gopkg.in/check.v1/TODO create mode 100644 vendor/gopkg.in/check.v1/benchmark.go create mode 100644 vendor/gopkg.in/check.v1/benchmark_test.go create mode 100644 vendor/gopkg.in/check.v1/bootstrap_test.go create mode 100644 vendor/gopkg.in/check.v1/check.go create mode 100644 vendor/gopkg.in/check.v1/check_test.go create mode 100644 vendor/gopkg.in/check.v1/checkers.go create mode 100644 vendor/gopkg.in/check.v1/checkers_test.go create mode 100644 vendor/gopkg.in/check.v1/export_test.go create mode 100644 vendor/gopkg.in/check.v1/fixture_test.go create mode 100644 vendor/gopkg.in/check.v1/foundation_test.go create mode 100644 vendor/gopkg.in/check.v1/helpers.go create mode 100644 vendor/gopkg.in/check.v1/helpers_test.go create mode 100644 vendor/gopkg.in/check.v1/printer.go create mode 100644 vendor/gopkg.in/check.v1/printer_test.go create mode 100644 vendor/gopkg.in/check.v1/reporter.go create mode 100644 vendor/gopkg.in/check.v1/reporter_test.go create mode 100644 vendor/gopkg.in/check.v1/run.go create mode 100644 vendor/gopkg.in/check.v1/run_test.go rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/LICENSE (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/LICENSE.libyaml (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/README.md (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/apic.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/decode.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/decode_test.go (99%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/emitterc.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/encode.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/encode_test.go (99%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/parserc.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/readerc.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/resolve.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/scannerc.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/sorter.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/suite_test.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/writerc.go (100%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/yaml.go (100%) rename {Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/yamlh.go (99%) rename {Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src => vendor}/gopkg.in/yaml.v1/yamlprivateh.go (100%) create mode 100644 vendor/vendor.json diff --git a/Godeps/Godeps.json b/Godeps/Godeps.json deleted file mode 100644 index 474f634..0000000 --- a/Godeps/Godeps.json +++ /dev/null @@ -1,172 +0,0 @@ -{ - "ImportPath": "github.com/remind101/deploy", - "GoVersion": "go1.4.1", - "Packages": [ - "./..." - ], - "Deps": [ - { - "ImportPath": "bitbucket.org/kardianos/osext", - "Comment": "null-15", - "Rev": "44140c5fc69ecf1102c5ef451d73cd98ef59b178" - }, - { - "ImportPath": "github.com/BurntSushi/toml", - "Comment": "v0.1.0-9-g3883ac1", - "Rev": "3883ac1ce943878302255f538fce319d23226223" - }, - { - "ImportPath": "github.com/bmizerany/assert", - "Comment": "release.r60-6-ge17e998", - "Rev": "e17e99893cb6509f428e1728281c2ad60a6b31e3" - }, - { - "ImportPath": "github.com/codegangsta/cli", - "Comment": "1.2.0-95-g9b2bd2b", - "Rev": "9b2bd2b3489748d4d0a204fa4eb2ee9e89e0ebc6" - }, - { - "ImportPath": "github.com/fhs/go-netrc/netrc", - "Rev": "4422b68c9c934b03e8e53ef18c8c8714542def7e" - }, - { - "ImportPath": "github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/Godeps/_workspace/src/github.com/fhs/go-netrc/netrc", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/cmd", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/git", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/github", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/ui", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/github/hub/utils", - "Comment": "v2.2.0-21-gfb118b0", - "Rev": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" - }, - { - "ImportPath": "github.com/google/go-github/github", - "Rev": "7ea4ee6d222607c11ea86e99a6f6723beeae785d" - }, - { - "ImportPath": "github.com/google/go-querystring/query", - "Rev": "ec0a78e0f4db229b7897be36596a8944230b857a" - }, - { - "ImportPath": "github.com/howeyc/gopass", - "Rev": "62ab5a80502a82291f265e6980d72310b8f480d5" - }, - { - "ImportPath": "github.com/inconshreveable/go-update", - "Rev": "221d034a558b4c21b0624b2a450c076913854a57" - }, - { - "ImportPath": "github.com/jingweno/go-sawyer", - "Rev": "1999ae5763d678f3ce1112cf1fda7c7e9cf2aadf" - }, - { - "ImportPath": "github.com/jtacoma/uritemplates", - "Comment": "0.1-14-g0a85813", - "Rev": "0a85813ecac22e3cbe916ab9480b33f2f4a06b2e" - }, - { - "ImportPath": "github.com/kballard/go-shellquote", - "Rev": "e5c918b80c17694cbc49aab32a759f9a40067f5d" - }, - { - "ImportPath": "github.com/kr/binarydist", - "Rev": "9955b0ab8708602d411341e55fffd7e0700f86bd" - }, - { - "ImportPath": "github.com/kr/pretty", - "Comment": "go.weekly.2011-12-22-24-gf31442d", - "Rev": "f31442d60e51465c69811e2107ae978868dbea5c" - }, - { - "ImportPath": "github.com/kr/text", - "Rev": "6807e777504f54ad073ecef66747de158294b639" - }, - { - "ImportPath": "github.com/mattn/go-isatty", - "Rev": "6152ce208cfa13d58f065348a3312b4160fb98d1" - }, - { - "ImportPath": "github.com/octokit/go-octokit/octokit", - "Comment": "v0.4.0-97-g6909930", - "Rev": "69099306b45af55301f9328f52d48338274f8d7d" - }, - { - "ImportPath": "github.com/ogier/pflag", - "Rev": "e4f7d00f344b0954fa3791a8527d10ba7334eceb" - }, - { - "ImportPath": "golang.org/x/crypto/ssh/terminal", - "Comment": "null-236", - "Rev": "69e2a90ed92d03812364aeb947b7068dc42e561e" - }, - { - "ImportPath": "gopkg.in/yaml.v1", - "Rev": "9f9df34309c04878acc86042b16630b0f696e1de" - } - ] -} diff --git a/Godeps/Readme b/Godeps/Readme deleted file mode 100644 index 4cdaa53..0000000 --- a/Godeps/Readme +++ /dev/null @@ -1,5 +0,0 @@ -This directory tree is generated automatically by godep. - -Please do not edit. - -See https://github.com/tools/godep for more information. diff --git a/Godeps/_workspace/.gitignore b/Godeps/_workspace/.gitignore deleted file mode 100644 index f037d68..0000000 --- a/Godeps/_workspace/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/pkg -/bin diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore b/Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore deleted file mode 100644 index 0cd3800..0000000 --- a/Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -TAGS -tags -.*.swp -tomlcheck/tomlcheck -toml.test diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml b/Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml deleted file mode 100644 index 43caf6d..0000000 --- a/Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: go -go: - - 1.1 - - 1.2 - - tip -install: - - go install ./... - - go get github.com/BurntSushi/toml-test -script: - - export PATH="$PATH:$HOME/gopath/bin" - - make test - diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING b/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING deleted file mode 100644 index 5a8e332..0000000 --- a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md b/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md deleted file mode 100644 index 24421eb..0000000 --- a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Implements the TOML test suite interface - -This is an implementation of the interface expected by -[toml-test](https://github.com/BurntSushi/toml-test) for my -[toml parser written in Go](https://github.com/BurntSushi/toml). -In particular, it maps TOML data on `stdin` to a JSON format on `stdout`. - - -Compatible with TOML version -[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md) - -Compatible with `toml-test` version -[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0) - diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go b/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go deleted file mode 100644 index 176be2a..0000000 --- a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go +++ /dev/null @@ -1,90 +0,0 @@ -// Command toml-test-decoder satisfies the toml-test interface for testing -// TOML decoders. Namely, it accepts TOML on stdin and outputs JSON on stdout. -package main - -import ( - "encoding/json" - "flag" - "fmt" - "log" - "os" - "path" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/BurntSushi/toml" -) - -func init() { - log.SetFlags(0) - - flag.Usage = usage - flag.Parse() -} - -func usage() { - log.Printf("Usage: %s < toml-file\n", path.Base(os.Args[0])) - flag.PrintDefaults() - - os.Exit(1) -} - -func main() { - if flag.NArg() != 0 { - flag.Usage() - } - - var tmp interface{} - if _, err := toml.DecodeReader(os.Stdin, &tmp); err != nil { - log.Fatalf("Error decoding TOML: %s", err) - } - - typedTmp := translate(tmp) - if err := json.NewEncoder(os.Stdout).Encode(typedTmp); err != nil { - log.Fatalf("Error encoding JSON: %s", err) - } -} - -func translate(tomlData interface{}) interface{} { - switch orig := tomlData.(type) { - case map[string]interface{}: - typed := make(map[string]interface{}, len(orig)) - for k, v := range orig { - typed[k] = translate(v) - } - return typed - case []map[string]interface{}: - typed := make([]map[string]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v).(map[string]interface{}) - } - return typed - case []interface{}: - typed := make([]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v) - } - - // We don't really need to tag arrays, but let's be future proof. - // (If TOML ever supports tuples, we'll need this.) - return tag("array", typed) - case time.Time: - return tag("datetime", orig.Format("2006-01-02T15:04:05Z")) - case bool: - return tag("bool", fmt.Sprintf("%v", orig)) - case int64: - return tag("integer", fmt.Sprintf("%d", orig)) - case float64: - return tag("float", fmt.Sprintf("%v", orig)) - case string: - return tag("string", orig) - } - - panic(fmt.Sprintf("Unknown type: %T", tomlData)) -} - -func tag(typeName string, data interface{}) map[string]interface{} { - return map[string]interface{}{ - "type": typeName, - "value": data, - } -} diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING b/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING deleted file mode 100644 index 5a8e332..0000000 --- a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md b/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md deleted file mode 100644 index 45a603f..0000000 --- a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Implements the TOML test suite interface for TOML encoders - -This is an implementation of the interface expected by -[toml-test](https://github.com/BurntSushi/toml-test) for the -[TOML encoder](https://github.com/BurntSushi/toml). -In particular, it maps JSON data on `stdin` to a TOML format on `stdout`. - - -Compatible with TOML version -[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md) - -Compatible with `toml-test` version -[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0) - diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go b/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go deleted file mode 100644 index 2336020..0000000 --- a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go +++ /dev/null @@ -1,131 +0,0 @@ -// Command toml-test-encoder satisfies the toml-test interface for testing -// TOML encoders. Namely, it accepts JSON on stdin and outputs TOML on stdout. -package main - -import ( - "encoding/json" - "flag" - "log" - "os" - "path" - "strconv" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/BurntSushi/toml" -) - -func init() { - log.SetFlags(0) - - flag.Usage = usage - flag.Parse() -} - -func usage() { - log.Printf("Usage: %s < json-file\n", path.Base(os.Args[0])) - flag.PrintDefaults() - - os.Exit(1) -} - -func main() { - if flag.NArg() != 0 { - flag.Usage() - } - - var tmp interface{} - if err := json.NewDecoder(os.Stdin).Decode(&tmp); err != nil { - log.Fatalf("Error decoding JSON: %s", err) - } - - tomlData := translate(tmp) - if err := toml.NewEncoder(os.Stdout).Encode(tomlData); err != nil { - log.Fatalf("Error encoding TOML: %s", err) - } -} - -func translate(typedJson interface{}) interface{} { - switch v := typedJson.(type) { - case map[string]interface{}: - if len(v) == 2 && in("type", v) && in("value", v) { - return untag(v) - } - m := make(map[string]interface{}, len(v)) - for k, v2 := range v { - m[k] = translate(v2) - } - return m - case []interface{}: - tabArray := make([]map[string]interface{}, len(v)) - for i := range v { - if m, ok := translate(v[i]).(map[string]interface{}); ok { - tabArray[i] = m - } else { - log.Fatalf("JSON arrays may only contain objects. This " + - "corresponds to only tables being allowed in " + - "TOML table arrays.") - } - } - return tabArray - } - log.Fatalf("Unrecognized JSON format '%T'.", typedJson) - panic("unreachable") -} - -func untag(typed map[string]interface{}) interface{} { - t := typed["type"].(string) - v := typed["value"] - switch t { - case "string": - return v.(string) - case "integer": - v := v.(string) - n, err := strconv.Atoi(v) - if err != nil { - log.Fatalf("Could not parse '%s' as integer: %s", v, err) - } - return n - case "float": - v := v.(string) - f, err := strconv.ParseFloat(v, 64) - if err != nil { - log.Fatalf("Could not parse '%s' as float64: %s", v, err) - } - return f - case "datetime": - v := v.(string) - t, err := time.Parse("2006-01-02T15:04:05Z", v) - if err != nil { - log.Fatalf("Could not parse '%s' as a datetime: %s", v, err) - } - return t - case "bool": - v := v.(string) - switch v { - case "true": - return true - case "false": - return false - } - log.Fatalf("Could not parse '%s' as a boolean.", v) - case "array": - v := v.([]interface{}) - array := make([]interface{}, len(v)) - for i := range v { - if m, ok := v[i].(map[string]interface{}); ok { - array[i] = untag(m) - } else { - log.Fatalf("Arrays may only contain other arrays or "+ - "primitive values, but found a '%T'.", m) - } - } - return array - } - log.Fatalf("Unrecognized tag type '%s'.", t) - panic("unreachable") -} - -func in(key string, m map[string]interface{}) bool { - _, ok := m[key] - return ok -} diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/COPYING b/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/COPYING deleted file mode 100644 index 5a8e332..0000000 --- a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md b/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md deleted file mode 100644 index 5df0dc3..0000000 --- a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md +++ /dev/null @@ -1,22 +0,0 @@ -# TOML Validator - -If Go is installed, it's simple to try it out: - -```bash -go get github.com/BurntSushi/toml/cmd/tomlv -tomlv some-toml-file.toml -``` - -You can see the types of every key in a TOML file with: - -```bash -tomlv -types some-toml-file.toml -``` - -At the moment, only one error message is reported at a time. Error messages -include line numbers. No output means that the files given are valid TOML, or -there is a bug in `tomlv`. - -Compatible with TOML version -[v0.1.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.1.0.md) - diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go b/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go deleted file mode 100644 index 9a47a6e..0000000 --- a/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go +++ /dev/null @@ -1,61 +0,0 @@ -// Command tomlv validates TOML documents and prints each key's type. -package main - -import ( - "flag" - "fmt" - "log" - "os" - "path" - "strings" - "text/tabwriter" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/BurntSushi/toml" -) - -var ( - flagTypes = false -) - -func init() { - log.SetFlags(0) - - flag.BoolVar(&flagTypes, "types", flagTypes, - "When set, the types of every defined key will be shown.") - - flag.Usage = usage - flag.Parse() -} - -func usage() { - log.Printf("Usage: %s toml-file [ toml-file ... ]\n", - path.Base(os.Args[0])) - flag.PrintDefaults() - - os.Exit(1) -} - -func main() { - if flag.NArg() < 1 { - flag.Usage() - } - for _, f := range flag.Args() { - var tmp interface{} - md, err := toml.DecodeFile(f, &tmp) - if err != nil { - log.Fatalf("Error in '%s': %s", f, err) - } - if flagTypes { - printTypes(md) - } - } -} - -func printTypes(md toml.MetaData) { - tabw := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) - for _, key := range md.Keys() { - fmt.Fprintf(tabw, "%s%s\t%s\n", - strings.Repeat(" ", len(key)-1), key, md.Type(key...)) - } - tabw.Flush() -} diff --git a/Godeps/_workspace/src/github.com/bmizerany/assert/.gitignore b/Godeps/_workspace/src/github.com/bmizerany/assert/.gitignore deleted file mode 100644 index b6fadf4..0000000 --- a/Godeps/_workspace/src/github.com/bmizerany/assert/.gitignore +++ /dev/null @@ -1,7 +0,0 @@ -_go_.* -_gotest_.* -_obj -_test -_testmain.go -*.out -*.[568] diff --git a/Godeps/_workspace/src/github.com/bmizerany/assert/example/point.go b/Godeps/_workspace/src/github.com/bmizerany/assert/example/point.go deleted file mode 100644 index 15789fe..0000000 --- a/Godeps/_workspace/src/github.com/bmizerany/assert/example/point.go +++ /dev/null @@ -1,5 +0,0 @@ -package point - -type Point struct { - X, Y int -} diff --git a/Godeps/_workspace/src/github.com/bmizerany/assert/example/point_test.go b/Godeps/_workspace/src/github.com/bmizerany/assert/example/point_test.go deleted file mode 100644 index 34e791a..0000000 --- a/Godeps/_workspace/src/github.com/bmizerany/assert/example/point_test.go +++ /dev/null @@ -1,13 +0,0 @@ -package point - -import ( - "testing" - "assert" -) - -func TestAsserts(t *testing.T) { - p1 := Point{1, 1} - p2 := Point{2, 1} - - assert.Equal(t, p1, p2) -} diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/.travis.yml b/Godeps/_workspace/src/github.com/codegangsta/cli/.travis.yml deleted file mode 100644 index baf46ab..0000000 --- a/Godeps/_workspace/src/github.com/codegangsta/cli/.travis.yml +++ /dev/null @@ -1,6 +0,0 @@ -language: go -go: 1.1 - -script: -- go vet ./... -- go test -v ./... diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/autocomplete/bash_autocomplete b/Godeps/_workspace/src/github.com/codegangsta/cli/autocomplete/bash_autocomplete deleted file mode 100644 index 9b55dd9..0000000 --- a/Godeps/_workspace/src/github.com/codegangsta/cli/autocomplete/bash_autocomplete +++ /dev/null @@ -1,13 +0,0 @@ -#! /bin/bash - -_cli_bash_autocomplete() { - local cur prev opts base - COMPREPLY=() - cur="${COMP_WORDS[COMP_CWORD]}" - prev="${COMP_WORDS[COMP_CWORD-1]}" - opts=$( ${COMP_WORDS[@]:0:$COMP_CWORD} --generate-bash-completion ) - COMPREPLY=( $(compgen -W "${opts}" -- ${cur}) ) - return 0 - } - - complete -F _cli_bash_autocomplete $PROG \ No newline at end of file diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/autocomplete/zsh_autocomplete b/Godeps/_workspace/src/github.com/codegangsta/cli/autocomplete/zsh_autocomplete deleted file mode 100644 index 5430a18..0000000 --- a/Godeps/_workspace/src/github.com/codegangsta/cli/autocomplete/zsh_autocomplete +++ /dev/null @@ -1,5 +0,0 @@ -autoload -U compinit && compinit -autoload -U bashcompinit && bashcompinit - -script_dir=$(dirname $0) -source ${script_dir}/bash_autocomplete diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore deleted file mode 100644 index 0cd3800..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/.gitignore +++ /dev/null @@ -1,5 +0,0 @@ -TAGS -tags -.*.swp -tomlcheck/tomlcheck -toml.test diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml deleted file mode 100644 index 43caf6d..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/.travis.yml +++ /dev/null @@ -1,12 +0,0 @@ -language: go -go: - - 1.1 - - 1.2 - - tip -install: - - go install ./... - - go get github.com/BurntSushi/toml-test -script: - - export PATH="$PATH:$HOME/gopath/bin" - - make test - diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/COMPATIBLE b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/COMPATIBLE deleted file mode 100644 index 21e0938..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/COMPATIBLE +++ /dev/null @@ -1,3 +0,0 @@ -Compatible with TOML version -[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md) - diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/COPYING b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/COPYING deleted file mode 100644 index 5a8e332..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/Makefile b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/Makefile deleted file mode 100644 index 3600848..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/Makefile +++ /dev/null @@ -1,19 +0,0 @@ -install: - go install ./... - -test: install - go test -v - toml-test toml-test-decoder - toml-test -encoder toml-test-encoder - -fmt: - gofmt -w *.go */*.go - colcheck *.go */*.go - -tags: - find ./ -name '*.go' -print0 | xargs -0 gotags > TAGS - -push: - git push origin master - git push github master - diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/README.md b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/README.md deleted file mode 100644 index e861c0c..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/README.md +++ /dev/null @@ -1,220 +0,0 @@ -## TOML parser and encoder for Go with reflection - -TOML stands for Tom's Obvious, Minimal Language. This Go package provides a -reflection interface similar to Go's standard library `json` and `xml` -packages. This package also supports the `encoding.TextUnmarshaler` and -`encoding.TextMarshaler` interfaces so that you can define custom data -representations. (There is an example of this below.) - -Spec: https://github.com/mojombo/toml - -Compatible with TOML version -[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md) - -Documentation: http://godoc.org/github.com/BurntSushi/toml - -Installation: - -```bash -go get github.com/BurntSushi/toml -``` - -Try the toml validator: - -```bash -go get github.com/BurntSushi/toml/cmd/tomlv -tomlv some-toml-file.toml -``` - -[![Build status](https://api.travis-ci.org/BurntSushi/toml.png)](https://travis-ci.org/BurntSushi/toml) - - -### Testing - -This package passes all tests in -[toml-test](https://github.com/BurntSushi/toml-test) for both the decoder -and the encoder. - -### Examples - -This package works similarly to how the Go standard library handles `XML` -and `JSON`. Namely, data is loaded into Go values via reflection. - -For the simplest example, consider some TOML file as just a list of keys -and values: - -```toml -Age = 25 -Cats = [ "Cauchy", "Plato" ] -Pi = 3.14 -Perfection = [ 6, 28, 496, 8128 ] -DOB = 1987-07-05T05:45:00Z -``` - -Which could be defined in Go as: - -```go -type Config struct { - Age int - Cats []string - Pi float64 - Perfection []int - DOB time.Time // requires `import time` -} -``` - -And then decoded with: - -```go -var conf Config -if _, err := toml.Decode(tomlData, &conf); err != nil { - // handle error -} -``` - -You can also use struct tags if your struct field name doesn't map to a TOML -key value directly: - -```toml -some_key_NAME = "wat" -``` - -```go -type TOML struct { - ObscureKey string `toml:"some_key_NAME"` -} -``` - -### Using the `encoding.TextUnmarshaler` interface - -Here's an example that automatically parses duration strings into -`time.Duration` values: - -```toml -[[song]] -name = "Thunder Road" -duration = "4m49s" - -[[song]] -name = "Stairway to Heaven" -duration = "8m03s" -``` - -Which can be decoded with: - -```go -type song struct { - Name string - Duration duration -} -type songs struct { - Song []song -} -var favorites songs -if _, err := toml.Decode(blob, &favorites); err != nil { - log.Fatal(err) -} - -for _, s := range favorites.Song { - fmt.Printf("%s (%s)\n", s.Name, s.Duration) -} -``` - -And you'll also need a `duration` type that satisfies the -`encoding.TextUnmarshaler` interface: - -```go -type duration struct { - time.Duration -} - -func (d *duration) UnmarshalText(text []byte) error { - var err error - d.Duration, err = time.ParseDuration(string(text)) - return err -} -``` - -### More complex usage - -Here's an example of how to load the example from the official spec page: - -```toml -# This is a TOML document. Boom. - -title = "TOML Example" - -[owner] -name = "Tom Preston-Werner" -organization = "GitHub" -bio = "GitHub Cofounder & CEO\nLikes tater tots and beer." -dob = 1979-05-27T07:32:00Z # First class dates? Why not? - -[database] -server = "192.168.1.1" -ports = [ 8001, 8001, 8002 ] -connection_max = 5000 -enabled = true - -[servers] - - # You can indent as you please. Tabs or spaces. TOML don't care. - [servers.alpha] - ip = "10.0.0.1" - dc = "eqdc10" - - [servers.beta] - ip = "10.0.0.2" - dc = "eqdc10" - -[clients] -data = [ ["gamma", "delta"], [1, 2] ] # just an update to make sure parsers support it - -# Line breaks are OK when inside arrays -hosts = [ - "alpha", - "omega" -] -``` - -And the corresponding Go types are: - -```go -type tomlConfig struct { - Title string - Owner ownerInfo - DB database `toml:"database"` - Servers map[string]server - Clients clients -} - -type ownerInfo struct { - Name string - Org string `toml:"organization"` - Bio string - DOB time.Time -} - -type database struct { - Server string - Ports []int - ConnMax int `toml:"connection_max"` - Enabled bool -} - -type server struct { - IP string - DC string -} - -type clients struct { - Data [][]interface{} - Hosts []string -} -``` - -Note that a case insensitive match will be tried if an exact match can't be -found. - -A working example of the above can be found in `_examples/example.{go,toml}`. - diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING deleted file mode 100644 index 5a8e332..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md deleted file mode 100644 index 24421eb..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Implements the TOML test suite interface - -This is an implementation of the interface expected by -[toml-test](https://github.com/BurntSushi/toml-test) for my -[toml parser written in Go](https://github.com/BurntSushi/toml). -In particular, it maps TOML data on `stdin` to a JSON format on `stdout`. - - -Compatible with TOML version -[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md) - -Compatible with `toml-test` version -[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0) - diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go deleted file mode 100644 index 176be2a..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-decoder/main.go +++ /dev/null @@ -1,90 +0,0 @@ -// Command toml-test-decoder satisfies the toml-test interface for testing -// TOML decoders. Namely, it accepts TOML on stdin and outputs JSON on stdout. -package main - -import ( - "encoding/json" - "flag" - "fmt" - "log" - "os" - "path" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/BurntSushi/toml" -) - -func init() { - log.SetFlags(0) - - flag.Usage = usage - flag.Parse() -} - -func usage() { - log.Printf("Usage: %s < toml-file\n", path.Base(os.Args[0])) - flag.PrintDefaults() - - os.Exit(1) -} - -func main() { - if flag.NArg() != 0 { - flag.Usage() - } - - var tmp interface{} - if _, err := toml.DecodeReader(os.Stdin, &tmp); err != nil { - log.Fatalf("Error decoding TOML: %s", err) - } - - typedTmp := translate(tmp) - if err := json.NewEncoder(os.Stdout).Encode(typedTmp); err != nil { - log.Fatalf("Error encoding JSON: %s", err) - } -} - -func translate(tomlData interface{}) interface{} { - switch orig := tomlData.(type) { - case map[string]interface{}: - typed := make(map[string]interface{}, len(orig)) - for k, v := range orig { - typed[k] = translate(v) - } - return typed - case []map[string]interface{}: - typed := make([]map[string]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v).(map[string]interface{}) - } - return typed - case []interface{}: - typed := make([]interface{}, len(orig)) - for i, v := range orig { - typed[i] = translate(v) - } - - // We don't really need to tag arrays, but let's be future proof. - // (If TOML ever supports tuples, we'll need this.) - return tag("array", typed) - case time.Time: - return tag("datetime", orig.Format("2006-01-02T15:04:05Z")) - case bool: - return tag("bool", fmt.Sprintf("%v", orig)) - case int64: - return tag("integer", fmt.Sprintf("%d", orig)) - case float64: - return tag("float", fmt.Sprintf("%v", orig)) - case string: - return tag("string", orig) - } - - panic(fmt.Sprintf("Unknown type: %T", tomlData)) -} - -func tag(typeName string, data interface{}) map[string]interface{} { - return map[string]interface{}{ - "type": typeName, - "value": data, - } -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING deleted file mode 100644 index 5a8e332..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md deleted file mode 100644 index 45a603f..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Implements the TOML test suite interface for TOML encoders - -This is an implementation of the interface expected by -[toml-test](https://github.com/BurntSushi/toml-test) for the -[TOML encoder](https://github.com/BurntSushi/toml). -In particular, it maps JSON data on `stdin` to a TOML format on `stdout`. - - -Compatible with TOML version -[v0.2.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.2.0.md) - -Compatible with `toml-test` version -[v0.2.0](https://github.com/BurntSushi/toml-test/tree/v0.2.0) - diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go deleted file mode 100644 index 2336020..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/toml-test-encoder/main.go +++ /dev/null @@ -1,131 +0,0 @@ -// Command toml-test-encoder satisfies the toml-test interface for testing -// TOML encoders. Namely, it accepts JSON on stdin and outputs TOML on stdout. -package main - -import ( - "encoding/json" - "flag" - "log" - "os" - "path" - "strconv" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/BurntSushi/toml" -) - -func init() { - log.SetFlags(0) - - flag.Usage = usage - flag.Parse() -} - -func usage() { - log.Printf("Usage: %s < json-file\n", path.Base(os.Args[0])) - flag.PrintDefaults() - - os.Exit(1) -} - -func main() { - if flag.NArg() != 0 { - flag.Usage() - } - - var tmp interface{} - if err := json.NewDecoder(os.Stdin).Decode(&tmp); err != nil { - log.Fatalf("Error decoding JSON: %s", err) - } - - tomlData := translate(tmp) - if err := toml.NewEncoder(os.Stdout).Encode(tomlData); err != nil { - log.Fatalf("Error encoding TOML: %s", err) - } -} - -func translate(typedJson interface{}) interface{} { - switch v := typedJson.(type) { - case map[string]interface{}: - if len(v) == 2 && in("type", v) && in("value", v) { - return untag(v) - } - m := make(map[string]interface{}, len(v)) - for k, v2 := range v { - m[k] = translate(v2) - } - return m - case []interface{}: - tabArray := make([]map[string]interface{}, len(v)) - for i := range v { - if m, ok := translate(v[i]).(map[string]interface{}); ok { - tabArray[i] = m - } else { - log.Fatalf("JSON arrays may only contain objects. This " + - "corresponds to only tables being allowed in " + - "TOML table arrays.") - } - } - return tabArray - } - log.Fatalf("Unrecognized JSON format '%T'.", typedJson) - panic("unreachable") -} - -func untag(typed map[string]interface{}) interface{} { - t := typed["type"].(string) - v := typed["value"] - switch t { - case "string": - return v.(string) - case "integer": - v := v.(string) - n, err := strconv.Atoi(v) - if err != nil { - log.Fatalf("Could not parse '%s' as integer: %s", v, err) - } - return n - case "float": - v := v.(string) - f, err := strconv.ParseFloat(v, 64) - if err != nil { - log.Fatalf("Could not parse '%s' as float64: %s", v, err) - } - return f - case "datetime": - v := v.(string) - t, err := time.Parse("2006-01-02T15:04:05Z", v) - if err != nil { - log.Fatalf("Could not parse '%s' as a datetime: %s", v, err) - } - return t - case "bool": - v := v.(string) - switch v { - case "true": - return true - case "false": - return false - } - log.Fatalf("Could not parse '%s' as a boolean.", v) - case "array": - v := v.([]interface{}) - array := make([]interface{}, len(v)) - for i := range v { - if m, ok := v[i].(map[string]interface{}); ok { - array[i] = untag(m) - } else { - log.Fatalf("Arrays may only contain other arrays or "+ - "primitive values, but found a '%T'.", m) - } - } - return array - } - log.Fatalf("Unrecognized tag type '%s'.", t) - panic("unreachable") -} - -func in(key string, m map[string]interface{}) bool { - _, ok := m[key] - return ok -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/COPYING b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/COPYING deleted file mode 100644 index 5a8e332..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/COPYING +++ /dev/null @@ -1,14 +0,0 @@ - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - Version 2, December 2004 - - Copyright (C) 2004 Sam Hocevar - - Everyone is permitted to copy and distribute verbatim or modified - copies of this license document, and changing it is allowed as long - as the name is changed. - - DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE - TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION - - 0. You just DO WHAT THE FUCK YOU WANT TO. - diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md deleted file mode 100644 index 5df0dc3..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/README.md +++ /dev/null @@ -1,22 +0,0 @@ -# TOML Validator - -If Go is installed, it's simple to try it out: - -```bash -go get github.com/BurntSushi/toml/cmd/tomlv -tomlv some-toml-file.toml -``` - -You can see the types of every key in a TOML file with: - -```bash -tomlv -types some-toml-file.toml -``` - -At the moment, only one error message is reported at a time. Error messages -include line numbers. No output means that the files given are valid TOML, or -there is a bug in `tomlv`. - -Compatible with TOML version -[v0.1.0](https://github.com/mojombo/toml/blob/master/versions/toml-v0.1.0.md) - diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go deleted file mode 100644 index 9a47a6e..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/cmd/tomlv/main.go +++ /dev/null @@ -1,61 +0,0 @@ -// Command tomlv validates TOML documents and prints each key's type. -package main - -import ( - "flag" - "fmt" - "log" - "os" - "path" - "strings" - "text/tabwriter" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/BurntSushi/toml" -) - -var ( - flagTypes = false -) - -func init() { - log.SetFlags(0) - - flag.BoolVar(&flagTypes, "types", flagTypes, - "When set, the types of every defined key will be shown.") - - flag.Usage = usage - flag.Parse() -} - -func usage() { - log.Printf("Usage: %s toml-file [ toml-file ... ]\n", - path.Base(os.Args[0])) - flag.PrintDefaults() - - os.Exit(1) -} - -func main() { - if flag.NArg() < 1 { - flag.Usage() - } - for _, f := range flag.Args() { - var tmp interface{} - md, err := toml.DecodeFile(f, &tmp) - if err != nil { - log.Fatalf("Error in '%s': %s", f, err) - } - if flagTypes { - printTypes(md) - } - } -} - -func printTypes(md toml.MetaData) { - tabw := tabwriter.NewWriter(os.Stdout, 0, 0, 2, ' ', 0) - for _, key := range md.Keys() { - fmt.Fprintf(tabw, "%s%s\t%s\n", - strings.Repeat(" ", len(key)-1), key, md.Type(key...)) - } - tabw.Flush() -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/decode.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/decode.go deleted file mode 100644 index 6c7d398..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/decode.go +++ /dev/null @@ -1,492 +0,0 @@ -package toml - -import ( - "fmt" - "io" - "io/ioutil" - "math" - "reflect" - "strings" - "time" -) - -var e = fmt.Errorf - -// Unmarshaler is the interface implemented by objects that can unmarshal a -// TOML description of themselves. -type Unmarshaler interface { - UnmarshalTOML(interface{}) error -} - -// Unmarshal decodes the contents of `p` in TOML format into a pointer `v`. -func Unmarshal(p []byte, v interface{}) error { - _, err := Decode(string(p), v) - return err -} - -// Primitive is a TOML value that hasn't been decoded into a Go value. -// When using the various `Decode*` functions, the type `Primitive` may -// be given to any value, and its decoding will be delayed. -// -// A `Primitive` value can be decoded using the `PrimitiveDecode` function. -// -// The underlying representation of a `Primitive` value is subject to change. -// Do not rely on it. -// -// N.B. Primitive values are still parsed, so using them will only avoid -// the overhead of reflection. They can be useful when you don't know the -// exact type of TOML data until run time. -type Primitive struct { - undecoded interface{} - context Key -} - -// DEPRECATED! -// -// Use MetaData.PrimitiveDecode instead. -func PrimitiveDecode(primValue Primitive, v interface{}) error { - md := MetaData{decoded: make(map[string]bool)} - return md.unify(primValue.undecoded, rvalue(v)) -} - -// PrimitiveDecode is just like the other `Decode*` functions, except it -// decodes a TOML value that has already been parsed. Valid primitive values -// can *only* be obtained from values filled by the decoder functions, -// including this method. (i.e., `v` may contain more `Primitive` -// values.) -// -// Meta data for primitive values is included in the meta data returned by -// the `Decode*` functions with one exception: keys returned by the Undecoded -// method will only reflect keys that were decoded. Namely, any keys hidden -// behind a Primitive will be considered undecoded. Executing this method will -// update the undecoded keys in the meta data. (See the example.) -func (md *MetaData) PrimitiveDecode(primValue Primitive, v interface{}) error { - md.context = primValue.context - defer func() { md.context = nil }() - return md.unify(primValue.undecoded, rvalue(v)) -} - -// Decode will decode the contents of `data` in TOML format into a pointer -// `v`. -// -// TOML hashes correspond to Go structs or maps. (Dealer's choice. They can be -// used interchangeably.) -// -// TOML arrays of tables correspond to either a slice of structs or a slice -// of maps. -// -// TOML datetimes correspond to Go `time.Time` values. -// -// All other TOML types (float, string, int, bool and array) correspond -// to the obvious Go types. -// -// An exception to the above rules is if a type implements the -// encoding.TextUnmarshaler interface. In this case, any primitive TOML value -// (floats, strings, integers, booleans and datetimes) will be converted to -// a byte string and given to the value's UnmarshalText method. See the -// Unmarshaler example for a demonstration with time duration strings. -// -// Key mapping -// -// TOML keys can map to either keys in a Go map or field names in a Go -// struct. The special `toml` struct tag may be used to map TOML keys to -// struct fields that don't match the key name exactly. (See the example.) -// A case insensitive match to struct names will be tried if an exact match -// can't be found. -// -// The mapping between TOML values and Go values is loose. That is, there -// may exist TOML values that cannot be placed into your representation, and -// there may be parts of your representation that do not correspond to -// TOML values. This loose mapping can be made stricter by using the IsDefined -// and/or Undecoded methods on the MetaData returned. -// -// This decoder will not handle cyclic types. If a cyclic type is passed, -// `Decode` will not terminate. -func Decode(data string, v interface{}) (MetaData, error) { - p, err := parse(data) - if err != nil { - return MetaData{}, err - } - md := MetaData{ - p.mapping, p.types, p.ordered, - make(map[string]bool, len(p.ordered)), nil, - } - return md, md.unify(p.mapping, rvalue(v)) -} - -// DecodeFile is just like Decode, except it will automatically read the -// contents of the file at `fpath` and decode it for you. -func DecodeFile(fpath string, v interface{}) (MetaData, error) { - bs, err := ioutil.ReadFile(fpath) - if err != nil { - return MetaData{}, err - } - return Decode(string(bs), v) -} - -// DecodeReader is just like Decode, except it will consume all bytes -// from the reader and decode it for you. -func DecodeReader(r io.Reader, v interface{}) (MetaData, error) { - bs, err := ioutil.ReadAll(r) - if err != nil { - return MetaData{}, err - } - return Decode(string(bs), v) -} - -// unify performs a sort of type unification based on the structure of `rv`, -// which is the client representation. -// -// Any type mismatch produces an error. Finding a type that we don't know -// how to handle produces an unsupported type error. -func (md *MetaData) unify(data interface{}, rv reflect.Value) error { - - // Special case. Look for a `Primitive` value. - if rv.Type() == reflect.TypeOf((*Primitive)(nil)).Elem() { - // Save the undecoded data and the key context into the primitive - // value. - context := make(Key, len(md.context)) - copy(context, md.context) - rv.Set(reflect.ValueOf(Primitive{ - undecoded: data, - context: context, - })) - return nil - } - - // Special case. Unmarshaler Interface support. - if rv.CanAddr() { - if v, ok := rv.Addr().Interface().(Unmarshaler); ok { - return v.UnmarshalTOML(data) - } - } - - // Special case. Handle time.Time values specifically. - // TODO: Remove this code when we decide to drop support for Go 1.1. - // This isn't necessary in Go 1.2 because time.Time satisfies the encoding - // interfaces. - if rv.Type().AssignableTo(rvalue(time.Time{}).Type()) { - return md.unifyDatetime(data, rv) - } - - // Special case. Look for a value satisfying the TextUnmarshaler interface. - if v, ok := rv.Interface().(TextUnmarshaler); ok { - return md.unifyText(data, v) - } - // BUG(burntsushi) - // The behavior here is incorrect whenever a Go type satisfies the - // encoding.TextUnmarshaler interface but also corresponds to a TOML - // hash or array. In particular, the unmarshaler should only be applied - // to primitive TOML values. But at this point, it will be applied to - // all kinds of values and produce an incorrect error whenever those values - // are hashes or arrays (including arrays of tables). - - k := rv.Kind() - - // laziness - if k >= reflect.Int && k <= reflect.Uint64 { - return md.unifyInt(data, rv) - } - switch k { - case reflect.Ptr: - elem := reflect.New(rv.Type().Elem()) - err := md.unify(data, reflect.Indirect(elem)) - if err != nil { - return err - } - rv.Set(elem) - return nil - case reflect.Struct: - return md.unifyStruct(data, rv) - case reflect.Map: - return md.unifyMap(data, rv) - case reflect.Array: - return md.unifyArray(data, rv) - case reflect.Slice: - return md.unifySlice(data, rv) - case reflect.String: - return md.unifyString(data, rv) - case reflect.Bool: - return md.unifyBool(data, rv) - case reflect.Interface: - // we only support empty interfaces. - if rv.NumMethod() > 0 { - return e("Unsupported type '%s'.", rv.Kind()) - } - return md.unifyAnything(data, rv) - case reflect.Float32: - fallthrough - case reflect.Float64: - return md.unifyFloat64(data, rv) - } - return e("Unsupported type '%s'.", rv.Kind()) -} - -func (md *MetaData) unifyStruct(mapping interface{}, rv reflect.Value) error { - tmap, ok := mapping.(map[string]interface{}) - if !ok { - return mismatch(rv, "map", mapping) - } - - for key, datum := range tmap { - var f *field - fields := cachedTypeFields(rv.Type()) - for i := range fields { - ff := &fields[i] - if ff.name == key { - f = ff - break - } - if f == nil && strings.EqualFold(ff.name, key) { - f = ff - } - } - if f != nil { - subv := rv - for _, i := range f.index { - subv = indirect(subv.Field(i)) - } - if isUnifiable(subv) { - md.decoded[md.context.add(key).String()] = true - md.context = append(md.context, key) - if err := md.unify(datum, subv); err != nil { - return e("Type mismatch for '%s.%s': %s", - rv.Type().String(), f.name, err) - } - md.context = md.context[0 : len(md.context)-1] - } else if f.name != "" { - // Bad user! No soup for you! - return e("Field '%s.%s' is unexported, and therefore cannot "+ - "be loaded with reflection.", rv.Type().String(), f.name) - } - } - } - return nil -} - -func (md *MetaData) unifyMap(mapping interface{}, rv reflect.Value) error { - tmap, ok := mapping.(map[string]interface{}) - if !ok { - return badtype("map", mapping) - } - if rv.IsNil() { - rv.Set(reflect.MakeMap(rv.Type())) - } - for k, v := range tmap { - md.decoded[md.context.add(k).String()] = true - md.context = append(md.context, k) - - rvkey := indirect(reflect.New(rv.Type().Key())) - rvval := reflect.Indirect(reflect.New(rv.Type().Elem())) - if err := md.unify(v, rvval); err != nil { - return err - } - md.context = md.context[0 : len(md.context)-1] - - rvkey.SetString(k) - rv.SetMapIndex(rvkey, rvval) - } - return nil -} - -func (md *MetaData) unifyArray(data interface{}, rv reflect.Value) error { - datav := reflect.ValueOf(data) - if datav.Kind() != reflect.Slice { - return badtype("slice", data) - } - sliceLen := datav.Len() - if sliceLen != rv.Len() { - return e("expected array length %d; got TOML array of length %d", - rv.Len(), sliceLen) - } - return md.unifySliceArray(datav, rv) -} - -func (md *MetaData) unifySlice(data interface{}, rv reflect.Value) error { - datav := reflect.ValueOf(data) - if datav.Kind() != reflect.Slice { - return badtype("slice", data) - } - sliceLen := datav.Len() - if rv.IsNil() { - rv.Set(reflect.MakeSlice(rv.Type(), sliceLen, sliceLen)) - } - return md.unifySliceArray(datav, rv) -} - -func (md *MetaData) unifySliceArray(data, rv reflect.Value) error { - sliceLen := data.Len() - for i := 0; i < sliceLen; i++ { - v := data.Index(i).Interface() - sliceval := indirect(rv.Index(i)) - if err := md.unify(v, sliceval); err != nil { - return err - } - } - return nil -} - -func (md *MetaData) unifyDatetime(data interface{}, rv reflect.Value) error { - if _, ok := data.(time.Time); ok { - rv.Set(reflect.ValueOf(data)) - return nil - } - return badtype("time.Time", data) -} - -func (md *MetaData) unifyString(data interface{}, rv reflect.Value) error { - if s, ok := data.(string); ok { - rv.SetString(s) - return nil - } - return badtype("string", data) -} - -func (md *MetaData) unifyFloat64(data interface{}, rv reflect.Value) error { - if num, ok := data.(float64); ok { - switch rv.Kind() { - case reflect.Float32: - fallthrough - case reflect.Float64: - rv.SetFloat(num) - default: - panic("bug") - } - return nil - } - return badtype("float", data) -} - -func (md *MetaData) unifyInt(data interface{}, rv reflect.Value) error { - if num, ok := data.(int64); ok { - if rv.Kind() >= reflect.Int && rv.Kind() <= reflect.Int64 { - switch rv.Kind() { - case reflect.Int, reflect.Int64: - // No bounds checking necessary. - case reflect.Int8: - if num < math.MinInt8 || num > math.MaxInt8 { - return e("Value '%d' is out of range for int8.", num) - } - case reflect.Int16: - if num < math.MinInt16 || num > math.MaxInt16 { - return e("Value '%d' is out of range for int16.", num) - } - case reflect.Int32: - if num < math.MinInt32 || num > math.MaxInt32 { - return e("Value '%d' is out of range for int32.", num) - } - } - rv.SetInt(num) - } else if rv.Kind() >= reflect.Uint && rv.Kind() <= reflect.Uint64 { - unum := uint64(num) - switch rv.Kind() { - case reflect.Uint, reflect.Uint64: - // No bounds checking necessary. - case reflect.Uint8: - if num < 0 || unum > math.MaxUint8 { - return e("Value '%d' is out of range for uint8.", num) - } - case reflect.Uint16: - if num < 0 || unum > math.MaxUint16 { - return e("Value '%d' is out of range for uint16.", num) - } - case reflect.Uint32: - if num < 0 || unum > math.MaxUint32 { - return e("Value '%d' is out of range for uint32.", num) - } - } - rv.SetUint(unum) - } else { - panic("unreachable") - } - return nil - } - return badtype("integer", data) -} - -func (md *MetaData) unifyBool(data interface{}, rv reflect.Value) error { - if b, ok := data.(bool); ok { - rv.SetBool(b) - return nil - } - return badtype("boolean", data) -} - -func (md *MetaData) unifyAnything(data interface{}, rv reflect.Value) error { - rv.Set(reflect.ValueOf(data)) - return nil -} - -func (md *MetaData) unifyText(data interface{}, v TextUnmarshaler) error { - var s string - switch sdata := data.(type) { - case TextMarshaler: - text, err := sdata.MarshalText() - if err != nil { - return err - } - s = string(text) - case fmt.Stringer: - s = sdata.String() - case string: - s = sdata - case bool: - s = fmt.Sprintf("%v", sdata) - case int64: - s = fmt.Sprintf("%d", sdata) - case float64: - s = fmt.Sprintf("%f", sdata) - default: - return badtype("primitive (string-like)", data) - } - if err := v.UnmarshalText([]byte(s)); err != nil { - return err - } - return nil -} - -// rvalue returns a reflect.Value of `v`. All pointers are resolved. -func rvalue(v interface{}) reflect.Value { - return indirect(reflect.ValueOf(v)) -} - -// indirect returns the value pointed to by a pointer. -// Pointers are followed until the value is not a pointer. -// New values are allocated for each nil pointer. -// -// An exception to this rule is if the value satisfies an interface of -// interest to us (like encoding.TextUnmarshaler). -func indirect(v reflect.Value) reflect.Value { - if v.Kind() != reflect.Ptr { - if v.CanAddr() { - pv := v.Addr() - if _, ok := pv.Interface().(TextUnmarshaler); ok { - return pv - } - } - return v - } - if v.IsNil() { - v.Set(reflect.New(v.Type().Elem())) - } - return indirect(reflect.Indirect(v)) -} - -func isUnifiable(rv reflect.Value) bool { - if rv.CanSet() { - return true - } - if _, ok := rv.Interface().(TextUnmarshaler); ok { - return true - } - return false -} - -func badtype(expected string, data interface{}) error { - return e("Expected %s but found '%T'.", expected, data) -} - -func mismatch(user reflect.Value, expected string, data interface{}) error { - return e("Type mismatch for %s. Expected %s but found '%T'.", - user.Type().String(), expected, data) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/decode_meta.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/decode_meta.go deleted file mode 100644 index c811445..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/decode_meta.go +++ /dev/null @@ -1,99 +0,0 @@ -package toml - -import "strings" - -// MetaData allows access to meta information about TOML data that may not -// be inferrable via reflection. In particular, whether a key has been defined -// and the TOML type of a key. -type MetaData struct { - mapping map[string]interface{} - types map[string]tomlType - keys []Key - decoded map[string]bool - context Key // Used only during decoding. -} - -// IsDefined returns true if the key given exists in the TOML data. The key -// should be specified hierarchially. e.g., -// -// // access the TOML key 'a.b.c' -// IsDefined("a", "b", "c") -// -// IsDefined will return false if an empty key given. Keys are case sensitive. -func (md *MetaData) IsDefined(key ...string) bool { - if len(key) == 0 { - return false - } - - var hash map[string]interface{} - var ok bool - var hashOrVal interface{} = md.mapping - for _, k := range key { - if hash, ok = hashOrVal.(map[string]interface{}); !ok { - return false - } - if hashOrVal, ok = hash[k]; !ok { - return false - } - } - return true -} - -// Type returns a string representation of the type of the key specified. -// -// Type will return the empty string if given an empty key or a key that -// does not exist. Keys are case sensitive. -func (md *MetaData) Type(key ...string) string { - fullkey := strings.Join(key, ".") - if typ, ok := md.types[fullkey]; ok { - return typ.typeString() - } - return "" -} - -// Key is the type of any TOML key, including key groups. Use (MetaData).Keys -// to get values of this type. -type Key []string - -func (k Key) String() string { - return strings.Join(k, ".") -} - -func (k Key) add(piece string) Key { - newKey := make(Key, len(k)+1) - copy(newKey, k) - newKey[len(k)] = piece - return newKey -} - -// Keys returns a slice of every key in the TOML data, including key groups. -// Each key is itself a slice, where the first element is the top of the -// hierarchy and the last is the most specific. -// -// The list will have the same order as the keys appeared in the TOML data. -// -// All keys returned are non-empty. -func (md *MetaData) Keys() []Key { - return md.keys -} - -// Undecoded returns all keys that have not been decoded in the order in which -// they appear in the original TOML document. -// -// This includes keys that haven't been decoded because of a Primitive value. -// Once the Primitive value is decoded, the keys will be considered decoded. -// -// Also note that decoding into an empty interface will result in no decoding, -// and so no keys will be considered decoded. -// -// In this sense, the Undecoded keys correspond to keys in the TOML document -// that do not have a concrete type in your representation. -func (md *MetaData) Undecoded() []Key { - undecoded := make([]Key, 0, len(md.keys)) - for _, key := range md.keys { - if !md.decoded[key.String()] { - undecoded = append(undecoded, key) - } - } - return undecoded -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/decode_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/decode_test.go deleted file mode 100644 index 6405250..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/decode_test.go +++ /dev/null @@ -1,949 +0,0 @@ -package toml - -import ( - "fmt" - "log" - "reflect" - "testing" - "time" -) - -func init() { - log.SetFlags(0) -} - -func TestDecodeSimple(t *testing.T) { - var testSimple = ` -age = 250 -andrew = "gallant" -kait = "brady" -now = 1987-07-05T05:45:00Z -yesOrNo = true -pi = 3.14 -colors = [ - ["red", "green", "blue"], - ["cyan", "magenta", "yellow", "black"], -] - -[My.Cats] -plato = "cat 1" -cauchy = "cat 2" -` - - type cats struct { - Plato string - Cauchy string - } - type simple struct { - Age int - Colors [][]string - Pi float64 - YesOrNo bool - Now time.Time - Andrew string - Kait string - My map[string]cats - } - - var val simple - _, err := Decode(testSimple, &val) - if err != nil { - t.Fatal(err) - } - - now, err := time.Parse("2006-01-02T15:04:05", "1987-07-05T05:45:00") - if err != nil { - panic(err) - } - var answer = simple{ - Age: 250, - Andrew: "gallant", - Kait: "brady", - Now: now, - YesOrNo: true, - Pi: 3.14, - Colors: [][]string{ - {"red", "green", "blue"}, - {"cyan", "magenta", "yellow", "black"}, - }, - My: map[string]cats{ - "Cats": cats{Plato: "cat 1", Cauchy: "cat 2"}, - }, - } - if !reflect.DeepEqual(val, answer) { - t.Fatalf("Expected\n-----\n%#v\n-----\nbut got\n-----\n%#v\n", - answer, val) - } -} - -func TestDecodeEmbedded(t *testing.T) { - type Dog struct{ Name string } - type Age int - - tests := map[string]struct { - input string - decodeInto interface{} - wantDecoded interface{} - }{ - "embedded struct": { - input: `Name = "milton"`, - decodeInto: &struct{ Dog }{}, - wantDecoded: &struct{ Dog }{Dog{"milton"}}, - }, - "embedded non-nil pointer to struct": { - input: `Name = "milton"`, - decodeInto: &struct{ *Dog }{}, - wantDecoded: &struct{ *Dog }{&Dog{"milton"}}, - }, - "embedded nil pointer to struct": { - input: ``, - decodeInto: &struct{ *Dog }{}, - wantDecoded: &struct{ *Dog }{nil}, - }, - "embedded int": { - input: `Age = -5`, - decodeInto: &struct{ Age }{}, - wantDecoded: &struct{ Age }{-5}, - }, - } - - for label, test := range tests { - _, err := Decode(test.input, test.decodeInto) - if err != nil { - t.Fatal(err) - } - if !reflect.DeepEqual(test.wantDecoded, test.decodeInto) { - t.Errorf("%s: want decoded == %+v, got %+v", - label, test.wantDecoded, test.decodeInto) - } - } -} - -func TestTableArrays(t *testing.T) { - var tomlTableArrays = ` -[[albums]] -name = "Born to Run" - - [[albums.songs]] - name = "Jungleland" - - [[albums.songs]] - name = "Meeting Across the River" - -[[albums]] -name = "Born in the USA" - - [[albums.songs]] - name = "Glory Days" - - [[albums.songs]] - name = "Dancing in the Dark" -` - - type Song struct { - Name string - } - - type Album struct { - Name string - Songs []Song - } - - type Music struct { - Albums []Album - } - - expected := Music{[]Album{ - {"Born to Run", []Song{{"Jungleland"}, {"Meeting Across the River"}}}, - {"Born in the USA", []Song{{"Glory Days"}, {"Dancing in the Dark"}}}, - }} - var got Music - if _, err := Decode(tomlTableArrays, &got); err != nil { - t.Fatal(err) - } - if !reflect.DeepEqual(expected, got) { - t.Fatalf("\n%#v\n!=\n%#v\n", expected, got) - } -} - -// Case insensitive matching tests. -// A bit more comprehensive than needed given the current implementation, -// but implementations change. -// Probably still missing demonstrations of some ugly corner cases regarding -// case insensitive matching and multiple fields. -func TestCase(t *testing.T) { - var caseToml = ` -tOpString = "string" -tOpInt = 1 -tOpFloat = 1.1 -tOpBool = true -tOpdate = 2006-01-02T15:04:05Z -tOparray = [ "array" ] -Match = "i should be in Match only" -MatcH = "i should be in MatcH only" -once = "just once" -[nEst.eD] -nEstedString = "another string" -` - - type InsensitiveEd struct { - NestedString string - } - - type InsensitiveNest struct { - Ed InsensitiveEd - } - - type Insensitive struct { - TopString string - TopInt int - TopFloat float64 - TopBool bool - TopDate time.Time - TopArray []string - Match string - MatcH string - Once string - OncE string - Nest InsensitiveNest - } - - tme, err := time.Parse(time.RFC3339, time.RFC3339[:len(time.RFC3339)-5]) - if err != nil { - panic(err) - } - expected := Insensitive{ - TopString: "string", - TopInt: 1, - TopFloat: 1.1, - TopBool: true, - TopDate: tme, - TopArray: []string{"array"}, - MatcH: "i should be in MatcH only", - Match: "i should be in Match only", - Once: "just once", - OncE: "", - Nest: InsensitiveNest{ - Ed: InsensitiveEd{NestedString: "another string"}, - }, - } - var got Insensitive - if _, err := Decode(caseToml, &got); err != nil { - t.Fatal(err) - } - if !reflect.DeepEqual(expected, got) { - t.Fatalf("\n%#v\n!=\n%#v\n", expected, got) - } -} - -func TestPointers(t *testing.T) { - type Object struct { - Type string - Description string - } - - type Dict struct { - NamedObject map[string]*Object - BaseObject *Object - Strptr *string - Strptrs []*string - } - s1, s2, s3 := "blah", "abc", "def" - expected := &Dict{ - Strptr: &s1, - Strptrs: []*string{&s2, &s3}, - NamedObject: map[string]*Object{ - "foo": {"FOO", "fooooo!!!"}, - "bar": {"BAR", "ba-ba-ba-ba-barrrr!!!"}, - }, - BaseObject: &Object{"BASE", "da base"}, - } - - ex1 := ` -Strptr = "blah" -Strptrs = ["abc", "def"] - -[NamedObject.foo] -Type = "FOO" -Description = "fooooo!!!" - -[NamedObject.bar] -Type = "BAR" -Description = "ba-ba-ba-ba-barrrr!!!" - -[BaseObject] -Type = "BASE" -Description = "da base" -` - dict := new(Dict) - _, err := Decode(ex1, dict) - if err != nil { - t.Errorf("Decode error: %v", err) - } - if !reflect.DeepEqual(expected, dict) { - t.Fatalf("\n%#v\n!=\n%#v\n", expected, dict) - } -} - -type sphere struct { - Center [3]float64 - Radius float64 -} - -func TestDecodeSimpleArray(t *testing.T) { - var s1 sphere - if _, err := Decode(`center = [0.0, 1.5, 0.0]`, &s1); err != nil { - t.Fatal(err) - } -} - -func TestDecodeArrayWrongSize(t *testing.T) { - var s1 sphere - if _, err := Decode(`center = [0.1, 2.3]`, &s1); err == nil { - t.Fatal("Expected array type mismatch error") - } -} - -func TestDecodeLargeIntoSmallInt(t *testing.T) { - type table struct { - Value int8 - } - var tab table - if _, err := Decode(`value = 500`, &tab); err == nil { - t.Fatal("Expected integer out-of-bounds error.") - } -} - -func TestDecodeSizedInts(t *testing.T) { - type table struct { - U8 uint8 - U16 uint16 - U32 uint32 - U64 uint64 - U uint - I8 int8 - I16 int16 - I32 int32 - I64 int64 - I int - } - answer := table{1, 1, 1, 1, 1, -1, -1, -1, -1, -1} - toml := ` - u8 = 1 - u16 = 1 - u32 = 1 - u64 = 1 - u = 1 - i8 = -1 - i16 = -1 - i32 = -1 - i64 = -1 - i = -1 - ` - var tab table - if _, err := Decode(toml, &tab); err != nil { - t.Fatal(err.Error()) - } - if answer != tab { - t.Fatalf("Expected %#v but got %#v", answer, tab) - } -} - -func TestUnmarshaler(t *testing.T) { - - var tomlBlob = ` -[dishes.hamboogie] -name = "Hamboogie with fries" -price = 10.99 - -[[dishes.hamboogie.ingredients]] -name = "Bread Bun" - -[[dishes.hamboogie.ingredients]] -name = "Lettuce" - -[[dishes.hamboogie.ingredients]] -name = "Real Beef Patty" - -[[dishes.hamboogie.ingredients]] -name = "Tomato" - -[dishes.eggsalad] -name = "Egg Salad with rice" -price = 3.99 - -[[dishes.eggsalad.ingredients]] -name = "Egg" - -[[dishes.eggsalad.ingredients]] -name = "Mayo" - -[[dishes.eggsalad.ingredients]] -name = "Rice" -` - m := &menu{} - if _, err := Decode(tomlBlob, m); err != nil { - log.Fatal(err) - } - - if len(m.Dishes) != 2 { - t.Log("two dishes should be loaded with UnmarshalTOML()") - t.Errorf("expected %d but got %d", 2, len(m.Dishes)) - } - - eggSalad := m.Dishes["eggsalad"] - if _, ok := interface{}(eggSalad).(dish); !ok { - t.Errorf("expected a dish") - } - - if eggSalad.Name != "Egg Salad with rice" { - t.Errorf("expected the dish to be named 'Egg Salad with rice'") - } - - if len(eggSalad.Ingredients) != 3 { - t.Log("dish should be loaded with UnmarshalTOML()") - t.Errorf("expected %d but got %d", 3, len(eggSalad.Ingredients)) - } - - found := false - for _, i := range eggSalad.Ingredients { - if i.Name == "Rice" { - found = true - break - } - } - if !found { - t.Error("Rice was not loaded in UnmarshalTOML()") - } - - // test on a value - must be passed as * - o := menu{} - if _, err := Decode(tomlBlob, &o); err != nil { - log.Fatal(err) - } - -} - -type menu struct { - Dishes map[string]dish -} - -func (m *menu) UnmarshalTOML(p interface{}) error { - m.Dishes = make(map[string]dish) - data, _ := p.(map[string]interface{}) - dishes := data["dishes"].(map[string]interface{}) - for n, v := range dishes { - if d, ok := v.(map[string]interface{}); ok { - nd := dish{} - nd.UnmarshalTOML(d) - m.Dishes[n] = nd - } else { - return fmt.Errorf("not a dish") - } - } - return nil -} - -type dish struct { - Name string - Price float32 - Ingredients []ingredient -} - -func (d *dish) UnmarshalTOML(p interface{}) error { - data, _ := p.(map[string]interface{}) - d.Name, _ = data["name"].(string) - d.Price, _ = data["price"].(float32) - ingredients, _ := data["ingredients"].([]map[string]interface{}) - for _, e := range ingredients { - n, _ := interface{}(e).(map[string]interface{}) - name, _ := n["name"].(string) - i := ingredient{name} - d.Ingredients = append(d.Ingredients, i) - } - return nil -} - -type ingredient struct { - Name string -} - -func ExampleMetaData_PrimitiveDecode() { - var md MetaData - var err error - - var tomlBlob = ` -ranking = ["Springsteen", "J Geils"] - -[bands.Springsteen] -started = 1973 -albums = ["Greetings", "WIESS", "Born to Run", "Darkness"] - -[bands.J Geils] -started = 1970 -albums = ["The J. Geils Band", "Full House", "Blow Your Face Out"] -` - - type band struct { - Started int - Albums []string - } - type classics struct { - Ranking []string - Bands map[string]Primitive - } - - // Do the initial decode. Reflection is delayed on Primitive values. - var music classics - if md, err = Decode(tomlBlob, &music); err != nil { - log.Fatal(err) - } - - // MetaData still includes information on Primitive values. - fmt.Printf("Is `bands.Springsteen` defined? %v\n", - md.IsDefined("bands", "Springsteen")) - - // Decode primitive data into Go values. - for _, artist := range music.Ranking { - // A band is a primitive value, so we need to decode it to get a - // real `band` value. - primValue := music.Bands[artist] - - var aBand band - if err = md.PrimitiveDecode(primValue, &aBand); err != nil { - log.Fatal(err) - } - fmt.Printf("%s started in %d.\n", artist, aBand.Started) - } - // Check to see if there were any fields left undecoded. - // Note that this won't be empty before decoding the Primitive value! - fmt.Printf("Undecoded: %q\n", md.Undecoded()) - - // Output: - // Is `bands.Springsteen` defined? true - // Springsteen started in 1973. - // J Geils started in 1970. - // Undecoded: [] -} - -func ExampleDecode() { - var tomlBlob = ` -# Some comments. -[alpha] -ip = "10.0.0.1" - - [alpha.config] - Ports = [ 8001, 8002 ] - Location = "Toronto" - Created = 1987-07-05T05:45:00Z - -[beta] -ip = "10.0.0.2" - - [beta.config] - Ports = [ 9001, 9002 ] - Location = "New Jersey" - Created = 1887-01-05T05:55:00Z -` - - type serverConfig struct { - Ports []int - Location string - Created time.Time - } - - type server struct { - IP string `toml:"ip"` - Config serverConfig `toml:"config"` - } - - type servers map[string]server - - var config servers - if _, err := Decode(tomlBlob, &config); err != nil { - log.Fatal(err) - } - - for _, name := range []string{"alpha", "beta"} { - s := config[name] - fmt.Printf("Server: %s (ip: %s) in %s created on %s\n", - name, s.IP, s.Config.Location, - s.Config.Created.Format("2006-01-02")) - fmt.Printf("Ports: %v\n", s.Config.Ports) - } - - // Output: - // Server: alpha (ip: 10.0.0.1) in Toronto created on 1987-07-05 - // Ports: [8001 8002] - // Server: beta (ip: 10.0.0.2) in New Jersey created on 1887-01-05 - // Ports: [9001 9002] -} - -type duration struct { - time.Duration -} - -func (d *duration) UnmarshalText(text []byte) error { - var err error - d.Duration, err = time.ParseDuration(string(text)) - return err -} - -// Example Unmarshaler shows how to decode TOML strings into your own -// custom data type. -func Example_unmarshaler() { - blob := ` -[[song]] -name = "Thunder Road" -duration = "4m49s" - -[[song]] -name = "Stairway to Heaven" -duration = "8m03s" -` - type song struct { - Name string - Duration duration - } - type songs struct { - Song []song - } - var favorites songs - if _, err := Decode(blob, &favorites); err != nil { - log.Fatal(err) - } - - // Code to implement the TextUnmarshaler interface for `duration`: - // - // type duration struct { - // time.Duration - // } - // - // func (d *duration) UnmarshalText(text []byte) error { - // var err error - // d.Duration, err = time.ParseDuration(string(text)) - // return err - // } - - for _, s := range favorites.Song { - fmt.Printf("%s (%s)\n", s.Name, s.Duration) - } - // Output: - // Thunder Road (4m49s) - // Stairway to Heaven (8m3s) -} - -// Example StrictDecoding shows how to detect whether there are keys in the -// TOML document that weren't decoded into the value given. This is useful -// for returning an error to the user if they've included extraneous fields -// in their configuration. -func Example_strictDecoding() { - var blob = ` -key1 = "value1" -key2 = "value2" -key3 = "value3" -` - type config struct { - Key1 string - Key3 string - } - - var conf config - md, err := Decode(blob, &conf) - if err != nil { - log.Fatal(err) - } - fmt.Printf("Undecoded keys: %q\n", md.Undecoded()) - // Output: - // Undecoded keys: ["key2"] -} - -// Example UnmarshalTOML shows how to implement a struct type that knows how to -// unmarshal itself. The struct must take full responsibility for mapping the -// values passed into the struct. The method may be used with interfaces in a -// struct in cases where the actual type is not known until the data is examined. -func Example_unmarshalTOML() { - - var blob = ` -[[parts]] -type = "valve" -id = "valve-1" -size = 1.2 -rating = 4 - -[[parts]] -type = "valve" -id = "valve-2" -size = 2.1 -rating = 5 - -[[parts]] -type = "pipe" -id = "pipe-1" -length = 2.1 -diameter = 12 - -[[parts]] -type = "cable" -id = "cable-1" -length = 12 -rating = 3.1 -` - o := &order{} - err := Unmarshal([]byte(blob), o) - if err != nil { - log.Fatal(err) - } - - fmt.Println(len(o.parts)) - - for _, part := range o.parts { - fmt.Println(part.Name()) - } - - // Code to implement UmarshalJSON. - - // type order struct { - // // NOTE `order.parts` is a private slice of type `part` which is an - // // interface and may only be loaded from toml using the UnmarshalTOML() - // // method of the Umarshaler interface. - // parts parts - // } - - // func (o *order) UnmarshalTOML(data interface{}) error { - - // // NOTE the example below contains detailed type casting to show how - // // the 'data' is retrieved. In operational use, a type cast wrapper - // // may be prefered e.g. - // // - // // func AsMap(v interface{}) (map[string]interface{}, error) { - // // return v.(map[string]interface{}) - // // } - // // - // // resulting in: - // // d, _ := AsMap(data) - // // - - // d, _ := data.(map[string]interface{}) - // parts, _ := d["parts"].([]map[string]interface{}) - - // for _, p := range parts { - - // typ, _ := p["type"].(string) - // id, _ := p["id"].(string) - - // // detect the type of part and handle each case - // switch p["type"] { - // case "valve": - - // size := float32(p["size"].(float64)) - // rating := int(p["rating"].(int64)) - - // valve := &valve{ - // Type: typ, - // ID: id, - // Size: size, - // Rating: rating, - // } - - // o.parts = append(o.parts, valve) - - // case "pipe": - - // length := float32(p["length"].(float64)) - // diameter := int(p["diameter"].(int64)) - - // pipe := &pipe{ - // Type: typ, - // ID: id, - // Length: length, - // Diameter: diameter, - // } - - // o.parts = append(o.parts, pipe) - - // case "cable": - - // length := int(p["length"].(int64)) - // rating := float32(p["rating"].(float64)) - - // cable := &cable{ - // Type: typ, - // ID: id, - // Length: length, - // Rating: rating, - // } - - // o.parts = append(o.parts, cable) - - // } - // } - - // return nil - // } - - // type parts []part - - // type part interface { - // Name() string - // } - - // type valve struct { - // Type string - // ID string - // Size float32 - // Rating int - // } - - // func (v *valve) Name() string { - // return fmt.Sprintf("VALVE: %s", v.ID) - // } - - // type pipe struct { - // Type string - // ID string - // Length float32 - // Diameter int - // } - - // func (p *pipe) Name() string { - // return fmt.Sprintf("PIPE: %s", p.ID) - // } - - // type cable struct { - // Type string - // ID string - // Length int - // Rating float32 - // } - - // func (c *cable) Name() string { - // return fmt.Sprintf("CABLE: %s", c.ID) - // } - - // Output: - // 4 - // VALVE: valve-1 - // VALVE: valve-2 - // PIPE: pipe-1 - // CABLE: cable-1 - -} - -type order struct { - // NOTE `order.parts` is a private slice of type `part` which is an - // interface and may only be loaded from toml using the UnmarshalTOML() - // method of the Umarshaler interface. - parts parts -} - -func (o *order) UnmarshalTOML(data interface{}) error { - - // NOTE the example below contains detailed type casting to show how - // the 'data' is retrieved. In operational use, a type cast wrapper - // may be prefered e.g. - // - // func AsMap(v interface{}) (map[string]interface{}, error) { - // return v.(map[string]interface{}) - // } - // - // resulting in: - // d, _ := AsMap(data) - // - - d, _ := data.(map[string]interface{}) - parts, _ := d["parts"].([]map[string]interface{}) - - for _, p := range parts { - - typ, _ := p["type"].(string) - id, _ := p["id"].(string) - - // detect the type of part and handle each case - switch p["type"] { - case "valve": - - size := float32(p["size"].(float64)) - rating := int(p["rating"].(int64)) - - valve := &valve{ - Type: typ, - ID: id, - Size: size, - Rating: rating, - } - - o.parts = append(o.parts, valve) - - case "pipe": - - length := float32(p["length"].(float64)) - diameter := int(p["diameter"].(int64)) - - pipe := &pipe{ - Type: typ, - ID: id, - Length: length, - Diameter: diameter, - } - - o.parts = append(o.parts, pipe) - - case "cable": - - length := int(p["length"].(int64)) - rating := float32(p["rating"].(float64)) - - cable := &cable{ - Type: typ, - ID: id, - Length: length, - Rating: rating, - } - - o.parts = append(o.parts, cable) - - } - } - - return nil -} - -type parts []part - -type part interface { - Name() string -} - -type valve struct { - Type string - ID string - Size float32 - Rating int -} - -func (v *valve) Name() string { - return fmt.Sprintf("VALVE: %s", v.ID) -} - -type pipe struct { - Type string - ID string - Length float32 - Diameter int -} - -func (p *pipe) Name() string { - return fmt.Sprintf("PIPE: %s", p.ID) -} - -type cable struct { - Type string - ID string - Length int - Rating float32 -} - -func (c *cable) Name() string { - return fmt.Sprintf("CABLE: %s", c.ID) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/doc.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/doc.go deleted file mode 100644 index fe26800..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/doc.go +++ /dev/null @@ -1,27 +0,0 @@ -/* -Package toml provides facilities for decoding and encoding TOML configuration -files via reflection. There is also support for delaying decoding with -the Primitive type, and querying the set of keys in a TOML document with the -MetaData type. - -The specification implemented: https://github.com/mojombo/toml - -The sub-command github.com/BurntSushi/toml/cmd/tomlv can be used to verify -whether a file is a valid TOML document. It can also be used to print the -type of each key in a TOML document. - -Testing - -There are two important types of tests used for this package. The first is -contained inside '*_test.go' files and uses the standard Go unit testing -framework. These tests are primarily devoted to holistically testing the -decoder and encoder. - -The second type of testing is used to verify the implementation's adherence -to the TOML specification. These tests have been factored into their own -project: https://github.com/BurntSushi/toml-test - -The reason the tests are in a separate project is so that they can be used by -any implementation of TOML. Namely, it is language agnostic. -*/ -package toml diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encode.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encode.go deleted file mode 100644 index 3618713..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encode.go +++ /dev/null @@ -1,515 +0,0 @@ -package toml - -import ( - "bufio" - "errors" - "fmt" - "io" - "reflect" - "sort" - "strconv" - "strings" - "time" -) - -type tomlEncodeError struct{ error } - -var ( - errArrayMixedElementTypes = errors.New( - "can't encode array with mixed element types") - errArrayNilElement = errors.New( - "can't encode array with nil element") - errNonString = errors.New( - "can't encode a map with non-string key type") - errAnonNonStruct = errors.New( - "can't encode an anonymous field that is not a struct") - errArrayNoTable = errors.New( - "TOML array element can't contain a table") - errNoKey = errors.New( - "top-level values must be a Go map or struct") - errAnything = errors.New("") // used in testing -) - -var quotedReplacer = strings.NewReplacer( - "\t", "\\t", - "\n", "\\n", - "\r", "\\r", - "\"", "\\\"", - "\\", "\\\\", -) - -// Encoder controls the encoding of Go values to a TOML document to some -// io.Writer. -// -// The indentation level can be controlled with the Indent field. -type Encoder struct { - // A single indentation level. By default it is two spaces. - Indent string - - // hasWritten is whether we have written any output to w yet. - hasWritten bool - w *bufio.Writer -} - -// NewEncoder returns a TOML encoder that encodes Go values to the io.Writer -// given. By default, a single indentation level is 2 spaces. -func NewEncoder(w io.Writer) *Encoder { - return &Encoder{ - w: bufio.NewWriter(w), - Indent: " ", - } -} - -// Encode writes a TOML representation of the Go value to the underlying -// io.Writer. If the value given cannot be encoded to a valid TOML document, -// then an error is returned. -// -// The mapping between Go values and TOML values should be precisely the same -// as for the Decode* functions. Similarly, the TextMarshaler interface is -// supported by encoding the resulting bytes as strings. (If you want to write -// arbitrary binary data then you will need to use something like base64 since -// TOML does not have any binary types.) -// -// When encoding TOML hashes (i.e., Go maps or structs), keys without any -// sub-hashes are encoded first. -// -// If a Go map is encoded, then its keys are sorted alphabetically for -// deterministic output. More control over this behavior may be provided if -// there is demand for it. -// -// Encoding Go values without a corresponding TOML representation---like map -// types with non-string keys---will cause an error to be returned. Similarly -// for mixed arrays/slices, arrays/slices with nil elements, embedded -// non-struct types and nested slices containing maps or structs. -// (e.g., [][]map[string]string is not allowed but []map[string]string is OK -// and so is []map[string][]string.) -func (enc *Encoder) Encode(v interface{}) error { - rv := eindirect(reflect.ValueOf(v)) - if err := enc.safeEncode(Key([]string{}), rv); err != nil { - return err - } - return enc.w.Flush() -} - -func (enc *Encoder) safeEncode(key Key, rv reflect.Value) (err error) { - defer func() { - if r := recover(); r != nil { - if terr, ok := r.(tomlEncodeError); ok { - err = terr.error - return - } - panic(r) - } - }() - enc.encode(key, rv) - return nil -} - -func (enc *Encoder) encode(key Key, rv reflect.Value) { - // Special case. Time needs to be in ISO8601 format. - // Special case. If we can marshal the type to text, then we used that. - // Basically, this prevents the encoder for handling these types as - // generic structs (or whatever the underlying type of a TextMarshaler is). - switch rv.Interface().(type) { - case time.Time, TextMarshaler: - enc.keyEqElement(key, rv) - return - } - - k := rv.Kind() - switch k { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, - reflect.Uint64, - reflect.Float32, reflect.Float64, reflect.String, reflect.Bool: - enc.keyEqElement(key, rv) - case reflect.Array, reflect.Slice: - if typeEqual(tomlArrayHash, tomlTypeOfGo(rv)) { - enc.eArrayOfTables(key, rv) - } else { - enc.keyEqElement(key, rv) - } - case reflect.Interface: - if rv.IsNil() { - return - } - enc.encode(key, rv.Elem()) - case reflect.Map: - if rv.IsNil() { - return - } - enc.eTable(key, rv) - case reflect.Ptr: - if rv.IsNil() { - return - } - enc.encode(key, rv.Elem()) - case reflect.Struct: - enc.eTable(key, rv) - default: - panic(e("Unsupported type for key '%s': %s", key, k)) - } -} - -// eElement encodes any value that can be an array element (primitives and -// arrays). -func (enc *Encoder) eElement(rv reflect.Value) { - switch v := rv.Interface().(type) { - case time.Time: - // Special case time.Time as a primitive. Has to come before - // TextMarshaler below because time.Time implements - // encoding.TextMarshaler, but we need to always use UTC. - enc.wf(v.In(time.FixedZone("UTC", 0)).Format("2006-01-02T15:04:05Z")) - return - case TextMarshaler: - // Special case. Use text marshaler if it's available for this value. - if s, err := v.MarshalText(); err != nil { - encPanic(err) - } else { - enc.writeQuoted(string(s)) - } - return - } - switch rv.Kind() { - case reflect.Bool: - enc.wf(strconv.FormatBool(rv.Bool())) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - enc.wf(strconv.FormatInt(rv.Int(), 10)) - case reflect.Uint, reflect.Uint8, reflect.Uint16, - reflect.Uint32, reflect.Uint64: - enc.wf(strconv.FormatUint(rv.Uint(), 10)) - case reflect.Float32: - enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 32))) - case reflect.Float64: - enc.wf(floatAddDecimal(strconv.FormatFloat(rv.Float(), 'f', -1, 64))) - case reflect.Array, reflect.Slice: - enc.eArrayOrSliceElement(rv) - case reflect.Interface: - enc.eElement(rv.Elem()) - case reflect.String: - enc.writeQuoted(rv.String()) - default: - panic(e("Unexpected primitive type: %s", rv.Kind())) - } -} - -// By the TOML spec, all floats must have a decimal with at least one -// number on either side. -func floatAddDecimal(fstr string) string { - if !strings.Contains(fstr, ".") { - return fstr + ".0" - } - return fstr -} - -func (enc *Encoder) writeQuoted(s string) { - enc.wf("\"%s\"", quotedReplacer.Replace(s)) -} - -func (enc *Encoder) eArrayOrSliceElement(rv reflect.Value) { - length := rv.Len() - enc.wf("[") - for i := 0; i < length; i++ { - elem := rv.Index(i) - enc.eElement(elem) - if i != length-1 { - enc.wf(", ") - } - } - enc.wf("]") -} - -func (enc *Encoder) eArrayOfTables(key Key, rv reflect.Value) { - if len(key) == 0 { - encPanic(errNoKey) - } - panicIfInvalidKey(key, true) - for i := 0; i < rv.Len(); i++ { - trv := rv.Index(i) - if isNil(trv) { - continue - } - enc.newline() - enc.wf("%s[[%s]]", enc.indentStr(key), key.String()) - enc.newline() - enc.eMapOrStruct(key, trv) - } -} - -func (enc *Encoder) eTable(key Key, rv reflect.Value) { - if len(key) == 1 { - // Output an extra new line between top-level tables. - // (The newline isn't written if nothing else has been written though.) - enc.newline() - } - if len(key) > 0 { - panicIfInvalidKey(key, true) - enc.wf("%s[%s]", enc.indentStr(key), key.String()) - enc.newline() - } - enc.eMapOrStruct(key, rv) -} - -func (enc *Encoder) eMapOrStruct(key Key, rv reflect.Value) { - switch rv := eindirect(rv); rv.Kind() { - case reflect.Map: - enc.eMap(key, rv) - case reflect.Struct: - enc.eStruct(key, rv) - default: - panic("eTable: unhandled reflect.Value Kind: " + rv.Kind().String()) - } -} - -func (enc *Encoder) eMap(key Key, rv reflect.Value) { - rt := rv.Type() - if rt.Key().Kind() != reflect.String { - encPanic(errNonString) - } - - // Sort keys so that we have deterministic output. And write keys directly - // underneath this key first, before writing sub-structs or sub-maps. - var mapKeysDirect, mapKeysSub []string - for _, mapKey := range rv.MapKeys() { - k := mapKey.String() - if typeIsHash(tomlTypeOfGo(rv.MapIndex(mapKey))) { - mapKeysSub = append(mapKeysSub, k) - } else { - mapKeysDirect = append(mapKeysDirect, k) - } - } - - var writeMapKeys = func(mapKeys []string) { - sort.Strings(mapKeys) - for _, mapKey := range mapKeys { - mrv := rv.MapIndex(reflect.ValueOf(mapKey)) - if isNil(mrv) { - // Don't write anything for nil fields. - continue - } - enc.encode(key.add(mapKey), mrv) - } - } - writeMapKeys(mapKeysDirect) - writeMapKeys(mapKeysSub) -} - -func (enc *Encoder) eStruct(key Key, rv reflect.Value) { - // Write keys for fields directly under this key first, because if we write - // a field that creates a new table, then all keys under it will be in that - // table (not the one we're writing here). - rt := rv.Type() - var fieldsDirect, fieldsSub [][]int - var addFields func(rt reflect.Type, rv reflect.Value, start []int) - addFields = func(rt reflect.Type, rv reflect.Value, start []int) { - for i := 0; i < rt.NumField(); i++ { - f := rt.Field(i) - // skip unexporded fields - if f.PkgPath != "" { - continue - } - frv := rv.Field(i) - if f.Anonymous { - frv := eindirect(frv) - t := frv.Type() - if t.Kind() != reflect.Struct { - encPanic(errAnonNonStruct) - } - addFields(t, frv, f.Index) - } else if typeIsHash(tomlTypeOfGo(frv)) { - fieldsSub = append(fieldsSub, append(start, f.Index...)) - } else { - fieldsDirect = append(fieldsDirect, append(start, f.Index...)) - } - } - } - addFields(rt, rv, nil) - - var writeFields = func(fields [][]int) { - for _, fieldIndex := range fields { - sft := rt.FieldByIndex(fieldIndex) - sf := rv.FieldByIndex(fieldIndex) - if isNil(sf) { - // Don't write anything for nil fields. - continue - } - - keyName := sft.Tag.Get("toml") - if keyName == "-" { - continue - } - if keyName == "" { - keyName = sft.Name - } - enc.encode(key.add(keyName), sf) - } - } - writeFields(fieldsDirect) - writeFields(fieldsSub) -} - -// tomlTypeName returns the TOML type name of the Go value's type. It is used to -// determine whether the types of array elements are mixed (which is forbidden). -// If the Go value is nil, then it is illegal for it to be an array element, and -// valueIsNil is returned as true. - -// Returns the TOML type of a Go value. The type may be `nil`, which means -// no concrete TOML type could be found. -func tomlTypeOfGo(rv reflect.Value) tomlType { - if isNil(rv) || !rv.IsValid() { - return nil - } - switch rv.Kind() { - case reflect.Bool: - return tomlBool - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, - reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, - reflect.Uint64: - return tomlInteger - case reflect.Float32, reflect.Float64: - return tomlFloat - case reflect.Array, reflect.Slice: - if typeEqual(tomlHash, tomlArrayType(rv)) { - return tomlArrayHash - } else { - return tomlArray - } - case reflect.Ptr, reflect.Interface: - return tomlTypeOfGo(rv.Elem()) - case reflect.String: - return tomlString - case reflect.Map: - return tomlHash - case reflect.Struct: - switch rv.Interface().(type) { - case time.Time: - return tomlDatetime - case TextMarshaler: - return tomlString - default: - return tomlHash - } - default: - panic("unexpected reflect.Kind: " + rv.Kind().String()) - } -} - -// tomlArrayType returns the element type of a TOML array. The type returned -// may be nil if it cannot be determined (e.g., a nil slice or a zero length -// slize). This function may also panic if it finds a type that cannot be -// expressed in TOML (such as nil elements, heterogeneous arrays or directly -// nested arrays of tables). -func tomlArrayType(rv reflect.Value) tomlType { - if isNil(rv) || !rv.IsValid() || rv.Len() == 0 { - return nil - } - firstType := tomlTypeOfGo(rv.Index(0)) - if firstType == nil { - encPanic(errArrayNilElement) - } - - rvlen := rv.Len() - for i := 1; i < rvlen; i++ { - elem := rv.Index(i) - switch elemType := tomlTypeOfGo(elem); { - case elemType == nil: - encPanic(errArrayNilElement) - case !typeEqual(firstType, elemType): - encPanic(errArrayMixedElementTypes) - } - } - // If we have a nested array, then we must make sure that the nested - // array contains ONLY primitives. - // This checks arbitrarily nested arrays. - if typeEqual(firstType, tomlArray) || typeEqual(firstType, tomlArrayHash) { - nest := tomlArrayType(eindirect(rv.Index(0))) - if typeEqual(nest, tomlHash) || typeEqual(nest, tomlArrayHash) { - encPanic(errArrayNoTable) - } - } - return firstType -} - -func (enc *Encoder) newline() { - if enc.hasWritten { - enc.wf("\n") - } -} - -func (enc *Encoder) keyEqElement(key Key, val reflect.Value) { - if len(key) == 0 { - encPanic(errNoKey) - } - panicIfInvalidKey(key, false) - enc.wf("%s%s = ", enc.indentStr(key), key[len(key)-1]) - enc.eElement(val) - enc.newline() -} - -func (enc *Encoder) wf(format string, v ...interface{}) { - if _, err := fmt.Fprintf(enc.w, format, v...); err != nil { - encPanic(err) - } - enc.hasWritten = true -} - -func (enc *Encoder) indentStr(key Key) string { - return strings.Repeat(enc.Indent, len(key)-1) -} - -func encPanic(err error) { - panic(tomlEncodeError{err}) -} - -func eindirect(v reflect.Value) reflect.Value { - switch v.Kind() { - case reflect.Ptr, reflect.Interface: - return eindirect(v.Elem()) - default: - return v - } -} - -func isNil(rv reflect.Value) bool { - switch rv.Kind() { - case reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: - return rv.IsNil() - default: - return false - } -} - -func panicIfInvalidKey(key Key, hash bool) { - if hash { - for _, k := range key { - if !isValidTableName(k) { - encPanic(e("Key '%s' is not a valid table name. Table names "+ - "cannot contain '[', ']' or '.'.", key.String())) - } - } - } else { - if !isValidKeyName(key[len(key)-1]) { - encPanic(e("Key '%s' is not a name. Key names "+ - "cannot contain whitespace.", key.String())) - } - } -} - -func isValidTableName(s string) bool { - if len(s) == 0 { - return false - } - for _, r := range s { - if r == '[' || r == ']' || r == '.' { - return false - } - } - return true -} - -func isValidKeyName(s string) bool { - if len(s) == 0 { - return false - } - return true -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encode_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encode_test.go deleted file mode 100644 index 74a5ee5..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encode_test.go +++ /dev/null @@ -1,506 +0,0 @@ -package toml - -import ( - "bytes" - "fmt" - "log" - "net" - "testing" - "time" -) - -func TestEncodeRoundTrip(t *testing.T) { - type Config struct { - Age int - Cats []string - Pi float64 - Perfection []int - DOB time.Time - Ipaddress net.IP - } - - var inputs = Config{ - 13, - []string{"one", "two", "three"}, - 3.145, - []int{11, 2, 3, 4}, - time.Now(), - net.ParseIP("192.168.59.254"), - } - - var firstBuffer bytes.Buffer - e := NewEncoder(&firstBuffer) - err := e.Encode(inputs) - if err != nil { - t.Fatal(err) - } - var outputs Config - if _, err := Decode(firstBuffer.String(), &outputs); err != nil { - log.Printf("Could not decode:\n-----\n%s\n-----\n", - firstBuffer.String()) - t.Fatal(err) - } - - // could test each value individually, but I'm lazy - var secondBuffer bytes.Buffer - e2 := NewEncoder(&secondBuffer) - err = e2.Encode(outputs) - if err != nil { - t.Fatal(err) - } - if firstBuffer.String() != secondBuffer.String() { - t.Error( - firstBuffer.String(), - "\n\n is not identical to\n\n", - secondBuffer.String()) - } -} - -// XXX(burntsushi) -// I think these tests probably should be removed. They are good, but they -// ought to be obsolete by toml-test. -func TestEncode(t *testing.T) { - type Embedded struct { - Int int `toml:"_int"` - } - type NonStruct int - - date := time.Date(2014, 5, 11, 20, 30, 40, 0, time.FixedZone("IST", 3600)) - dateStr := "2014-05-11T19:30:40Z" - - tests := map[string]struct { - input interface{} - wantOutput string - wantError error - }{ - "bool field": { - input: struct { - BoolTrue bool - BoolFalse bool - }{true, false}, - wantOutput: "BoolTrue = true\nBoolFalse = false\n", - }, - "int fields": { - input: struct { - Int int - Int8 int8 - Int16 int16 - Int32 int32 - Int64 int64 - }{1, 2, 3, 4, 5}, - wantOutput: "Int = 1\nInt8 = 2\nInt16 = 3\nInt32 = 4\nInt64 = 5\n", - }, - "uint fields": { - input: struct { - Uint uint - Uint8 uint8 - Uint16 uint16 - Uint32 uint32 - Uint64 uint64 - }{1, 2, 3, 4, 5}, - wantOutput: "Uint = 1\nUint8 = 2\nUint16 = 3\nUint32 = 4" + - "\nUint64 = 5\n", - }, - "float fields": { - input: struct { - Float32 float32 - Float64 float64 - }{1.5, 2.5}, - wantOutput: "Float32 = 1.5\nFloat64 = 2.5\n", - }, - "string field": { - input: struct{ String string }{"foo"}, - wantOutput: "String = \"foo\"\n", - }, - "string field and unexported field": { - input: struct { - String string - unexported int - }{"foo", 0}, - wantOutput: "String = \"foo\"\n", - }, - "datetime field in UTC": { - input: struct{ Date time.Time }{date}, - wantOutput: fmt.Sprintf("Date = %s\n", dateStr), - }, - "datetime field as primitive": { - // Using a map here to fail if isStructOrMap() returns true for - // time.Time. - input: map[string]interface{}{ - "Date": date, - "Int": 1, - }, - wantOutput: fmt.Sprintf("Date = %s\nInt = 1\n", dateStr), - }, - "array fields": { - input: struct { - IntArray0 [0]int - IntArray3 [3]int - }{[0]int{}, [3]int{1, 2, 3}}, - wantOutput: "IntArray0 = []\nIntArray3 = [1, 2, 3]\n", - }, - "slice fields": { - input: struct{ IntSliceNil, IntSlice0, IntSlice3 []int }{ - nil, []int{}, []int{1, 2, 3}, - }, - wantOutput: "IntSlice0 = []\nIntSlice3 = [1, 2, 3]\n", - }, - "datetime slices": { - input: struct{ DatetimeSlice []time.Time }{ - []time.Time{date, date}, - }, - wantOutput: fmt.Sprintf("DatetimeSlice = [%s, %s]\n", - dateStr, dateStr), - }, - "nested arrays and slices": { - input: struct { - SliceOfArrays [][2]int - ArrayOfSlices [2][]int - SliceOfArraysOfSlices [][2][]int - ArrayOfSlicesOfArrays [2][][2]int - SliceOfMixedArrays [][2]interface{} - ArrayOfMixedSlices [2][]interface{} - }{ - [][2]int{{1, 2}, {3, 4}}, - [2][]int{{1, 2}, {3, 4}}, - [][2][]int{ - { - {1, 2}, {3, 4}, - }, - { - {5, 6}, {7, 8}, - }, - }, - [2][][2]int{ - { - {1, 2}, {3, 4}, - }, - { - {5, 6}, {7, 8}, - }, - }, - [][2]interface{}{ - {1, 2}, {"a", "b"}, - }, - [2][]interface{}{ - {1, 2}, {"a", "b"}, - }, - }, - wantOutput: `SliceOfArrays = [[1, 2], [3, 4]] -ArrayOfSlices = [[1, 2], [3, 4]] -SliceOfArraysOfSlices = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]] -ArrayOfSlicesOfArrays = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]] -SliceOfMixedArrays = [[1, 2], ["a", "b"]] -ArrayOfMixedSlices = [[1, 2], ["a", "b"]] -`, - }, - "empty slice": { - input: struct{ Empty []interface{} }{[]interface{}{}}, - wantOutput: "Empty = []\n", - }, - "(error) slice with element type mismatch (string and integer)": { - input: struct{ Mixed []interface{} }{[]interface{}{1, "a"}}, - wantError: errArrayMixedElementTypes, - }, - "(error) slice with element type mismatch (integer and float)": { - input: struct{ Mixed []interface{} }{[]interface{}{1, 2.5}}, - wantError: errArrayMixedElementTypes, - }, - "slice with elems of differing Go types, same TOML types": { - input: struct { - MixedInts []interface{} - MixedFloats []interface{} - }{ - []interface{}{ - int(1), int8(2), int16(3), int32(4), int64(5), - uint(1), uint8(2), uint16(3), uint32(4), uint64(5), - }, - []interface{}{float32(1.5), float64(2.5)}, - }, - wantOutput: "MixedInts = [1, 2, 3, 4, 5, 1, 2, 3, 4, 5]\n" + - "MixedFloats = [1.5, 2.5]\n", - }, - "(error) slice w/ element type mismatch (one is nested array)": { - input: struct{ Mixed []interface{} }{ - []interface{}{1, []interface{}{2}}, - }, - wantError: errArrayMixedElementTypes, - }, - "(error) slice with 1 nil element": { - input: struct{ NilElement1 []interface{} }{[]interface{}{nil}}, - wantError: errArrayNilElement, - }, - "(error) slice with 1 nil element (and other non-nil elements)": { - input: struct{ NilElement []interface{} }{ - []interface{}{1, nil}, - }, - wantError: errArrayNilElement, - }, - "simple map": { - input: map[string]int{"a": 1, "b": 2}, - wantOutput: "a = 1\nb = 2\n", - }, - "map with interface{} value type": { - input: map[string]interface{}{"a": 1, "b": "c"}, - wantOutput: "a = 1\nb = \"c\"\n", - }, - "map with interface{} value type, some of which are structs": { - input: map[string]interface{}{ - "a": struct{ Int int }{2}, - "b": 1, - }, - wantOutput: "b = 1\n\n[a]\n Int = 2\n", - }, - "nested map": { - input: map[string]map[string]int{ - "a": {"b": 1}, - "c": {"d": 2}, - }, - wantOutput: "[a]\n b = 1\n\n[c]\n d = 2\n", - }, - "nested struct": { - input: struct{ Struct struct{ Int int } }{ - struct{ Int int }{1}, - }, - wantOutput: "[Struct]\n Int = 1\n", - }, - "nested struct and non-struct field": { - input: struct { - Struct struct{ Int int } - Bool bool - }{struct{ Int int }{1}, true}, - wantOutput: "Bool = true\n\n[Struct]\n Int = 1\n", - }, - "2 nested structs": { - input: struct{ Struct1, Struct2 struct{ Int int } }{ - struct{ Int int }{1}, struct{ Int int }{2}, - }, - wantOutput: "[Struct1]\n Int = 1\n\n[Struct2]\n Int = 2\n", - }, - "deeply nested structs": { - input: struct { - Struct1, Struct2 struct{ Struct3 *struct{ Int int } } - }{ - struct{ Struct3 *struct{ Int int } }{&struct{ Int int }{1}}, - struct{ Struct3 *struct{ Int int } }{nil}, - }, - wantOutput: "[Struct1]\n [Struct1.Struct3]\n Int = 1" + - "\n\n[Struct2]\n", - }, - "nested struct with nil struct elem": { - input: struct { - Struct struct{ Inner *struct{ Int int } } - }{ - struct{ Inner *struct{ Int int } }{nil}, - }, - wantOutput: "[Struct]\n", - }, - "nested struct with no fields": { - input: struct { - Struct struct{ Inner struct{} } - }{ - struct{ Inner struct{} }{struct{}{}}, - }, - wantOutput: "[Struct]\n [Struct.Inner]\n", - }, - "struct with tags": { - input: struct { - Struct struct { - Int int `toml:"_int"` - } `toml:"_struct"` - Bool bool `toml:"_bool"` - }{ - struct { - Int int `toml:"_int"` - }{1}, true, - }, - wantOutput: "_bool = true\n\n[_struct]\n _int = 1\n", - }, - "embedded struct": { - input: struct{ Embedded }{Embedded{1}}, - wantOutput: "_int = 1\n", - }, - "embedded *struct": { - input: struct{ *Embedded }{&Embedded{1}}, - wantOutput: "_int = 1\n", - }, - "nested embedded struct": { - input: struct { - Struct struct{ Embedded } `toml:"_struct"` - }{struct{ Embedded }{Embedded{1}}}, - wantOutput: "[_struct]\n _int = 1\n", - }, - "nested embedded *struct": { - input: struct { - Struct struct{ *Embedded } `toml:"_struct"` - }{struct{ *Embedded }{&Embedded{1}}}, - wantOutput: "[_struct]\n _int = 1\n", - }, - "array of tables": { - input: struct { - Structs []*struct{ Int int } `toml:"struct"` - }{ - []*struct{ Int int }{{1}, {3}}, - }, - wantOutput: "[[struct]]\n Int = 1\n\n[[struct]]\n Int = 3\n", - }, - "array of tables order": { - input: map[string]interface{}{ - "map": map[string]interface{}{ - "zero": 5, - "arr": []map[string]int{ - map[string]int{ - "friend": 5, - }, - }, - }, - }, - wantOutput: "[map]\n zero = 5\n\n [[map.arr]]\n friend = 5\n", - }, - "(error) top-level slice": { - input: []struct{ Int int }{{1}, {2}, {3}}, - wantError: errNoKey, - }, - "(error) slice of slice": { - input: struct { - Slices [][]struct{ Int int } - }{ - [][]struct{ Int int }{{{1}}, {{2}}, {{3}}}, - }, - wantError: errArrayNoTable, - }, - "(error) map no string key": { - input: map[int]string{1: ""}, - wantError: errNonString, - }, - "(error) anonymous non-struct": { - input: struct{ NonStruct }{5}, - wantError: errAnonNonStruct, - }, - "(error) empty key name": { - input: map[string]int{"": 1}, - wantError: errAnything, - }, - "(error) empty map name": { - input: map[string]interface{}{ - "": map[string]int{"v": 1}, - }, - wantError: errAnything, - }, - } - for label, test := range tests { - encodeExpected(t, label, test.input, test.wantOutput, test.wantError) - } -} - -func TestEncodeNestedTableArrays(t *testing.T) { - type song struct { - Name string `toml:"name"` - } - type album struct { - Name string `toml:"name"` - Songs []song `toml:"songs"` - } - type springsteen struct { - Albums []album `toml:"albums"` - } - value := springsteen{ - []album{ - {"Born to Run", - []song{{"Jungleland"}, {"Meeting Across the River"}}}, - {"Born in the USA", - []song{{"Glory Days"}, {"Dancing in the Dark"}}}, - }, - } - expected := `[[albums]] - name = "Born to Run" - - [[albums.songs]] - name = "Jungleland" - - [[albums.songs]] - name = "Meeting Across the River" - -[[albums]] - name = "Born in the USA" - - [[albums.songs]] - name = "Glory Days" - - [[albums.songs]] - name = "Dancing in the Dark" -` - encodeExpected(t, "nested table arrays", value, expected, nil) -} - -func TestEncodeArrayHashWithNormalHashOrder(t *testing.T) { - type Alpha struct { - V int - } - type Beta struct { - V int - } - type Conf struct { - V int - A Alpha - B []Beta - } - - val := Conf{ - V: 1, - A: Alpha{2}, - B: []Beta{{3}}, - } - expected := "V = 1\n\n[A]\n V = 2\n\n[[B]]\n V = 3\n" - encodeExpected(t, "array hash with normal hash order", val, expected, nil) -} - -func encodeExpected( - t *testing.T, label string, val interface{}, wantStr string, wantErr error, -) { - var buf bytes.Buffer - enc := NewEncoder(&buf) - err := enc.Encode(val) - if err != wantErr { - if wantErr != nil { - if wantErr == errAnything && err != nil { - return - } - t.Errorf("%s: want Encode error %v, got %v", label, wantErr, err) - } else { - t.Errorf("%s: Encode failed: %s", label, err) - } - } - if err != nil { - return - } - if got := buf.String(); wantStr != got { - t.Errorf("%s: want\n-----\n%q\n-----\nbut got\n-----\n%q\n-----\n", - label, wantStr, got) - } -} - -func ExampleEncoder_Encode() { - date, _ := time.Parse(time.RFC822, "14 Mar 10 18:00 UTC") - var config = map[string]interface{}{ - "date": date, - "counts": []int{1, 1, 2, 3, 5, 8}, - "hash": map[string]string{ - "key1": "val1", - "key2": "val2", - }, - } - buf := new(bytes.Buffer) - if err := NewEncoder(buf).Encode(config); err != nil { - log.Fatal(err) - } - fmt.Println(buf.String()) - - // Output: - // counts = [1, 1, 2, 3, 5, 8] - // date = 2010-03-14T18:00:00Z - // - // [hash] - // key1 = "val1" - // key2 = "val2" -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types.go deleted file mode 100644 index 140c44c..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types.go +++ /dev/null @@ -1,19 +0,0 @@ -// +build go1.2 - -package toml - -// In order to support Go 1.1, we define our own TextMarshaler and -// TextUnmarshaler types. For Go 1.2+, we just alias them with the -// standard library interfaces. - -import ( - "encoding" -) - -// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here -// so that Go 1.1 can be supported. -type TextMarshaler encoding.TextMarshaler - -// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined here -// so that Go 1.1 can be supported. -type TextUnmarshaler encoding.TextUnmarshaler diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types_1.1.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types_1.1.go deleted file mode 100644 index fb285e7..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types_1.1.go +++ /dev/null @@ -1,18 +0,0 @@ -// +build !go1.2 - -package toml - -// These interfaces were introduced in Go 1.2, so we add them manually when -// compiling for Go 1.1. - -// TextMarshaler is a synonym for encoding.TextMarshaler. It is defined here -// so that Go 1.1 can be supported. -type TextMarshaler interface { - MarshalText() (text []byte, err error) -} - -// TextUnmarshaler is a synonym for encoding.TextUnmarshaler. It is defined here -// so that Go 1.1 can be supported. -type TextUnmarshaler interface { - UnmarshalText(text []byte) error -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/lex.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/lex.go deleted file mode 100644 index 1929b9b..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/lex.go +++ /dev/null @@ -1,863 +0,0 @@ -package toml - -import ( - "fmt" - "strings" - "unicode/utf8" -) - -type itemType int - -const ( - itemError itemType = iota - itemNIL // used in the parser to indicate no type - itemEOF - itemText - itemString - itemRawString - itemMultilineString - itemRawMultilineString - itemBool - itemInteger - itemFloat - itemDatetime - itemArray // the start of an array - itemArrayEnd - itemTableStart - itemTableEnd - itemArrayTableStart - itemArrayTableEnd - itemKeyStart - itemCommentStart -) - -const ( - eof = 0 - tableStart = '[' - tableEnd = ']' - arrayTableStart = '[' - arrayTableEnd = ']' - tableSep = '.' - keySep = '=' - arrayStart = '[' - arrayEnd = ']' - arrayValTerm = ',' - commentStart = '#' - stringStart = '"' - stringEnd = '"' - rawStringStart = '\'' - rawStringEnd = '\'' -) - -type stateFn func(lx *lexer) stateFn - -type lexer struct { - input string - start int - pos int - width int - line int - state stateFn - items chan item - - // A stack of state functions used to maintain context. - // The idea is to reuse parts of the state machine in various places. - // For example, values can appear at the top level or within arbitrarily - // nested arrays. The last state on the stack is used after a value has - // been lexed. Similarly for comments. - stack []stateFn -} - -type item struct { - typ itemType - val string - line int -} - -func (lx *lexer) nextItem() item { - for { - select { - case item := <-lx.items: - return item - default: - lx.state = lx.state(lx) - } - } -} - -func lex(input string) *lexer { - lx := &lexer{ - input: input + "\n", - state: lexTop, - line: 1, - items: make(chan item, 10), - stack: make([]stateFn, 0, 10), - } - return lx -} - -func (lx *lexer) push(state stateFn) { - lx.stack = append(lx.stack, state) -} - -func (lx *lexer) pop() stateFn { - if len(lx.stack) == 0 { - return lx.errorf("BUG in lexer: no states to pop.") - } - last := lx.stack[len(lx.stack)-1] - lx.stack = lx.stack[0 : len(lx.stack)-1] - return last -} - -func (lx *lexer) current() string { - return lx.input[lx.start:lx.pos] -} - -func (lx *lexer) emit(typ itemType) { - lx.items <- item{typ, lx.current(), lx.line} - lx.start = lx.pos -} - -func (lx *lexer) emitTrim(typ itemType) { - lx.items <- item{typ, strings.TrimSpace(lx.current()), lx.line} - lx.start = lx.pos -} - -func (lx *lexer) next() (r rune) { - if lx.pos >= len(lx.input) { - lx.width = 0 - return eof - } - - if lx.input[lx.pos] == '\n' { - lx.line++ - } - r, lx.width = utf8.DecodeRuneInString(lx.input[lx.pos:]) - lx.pos += lx.width - return r -} - -// ignore skips over the pending input before this point. -func (lx *lexer) ignore() { - lx.start = lx.pos -} - -// backup steps back one rune. Can be called only once per call of next. -func (lx *lexer) backup() { - lx.pos -= lx.width - if lx.pos < len(lx.input) && lx.input[lx.pos] == '\n' { - lx.line-- - } -} - -// accept consumes the next rune if it's equal to `valid`. -func (lx *lexer) accept(valid rune) bool { - if lx.next() == valid { - return true - } - lx.backup() - return false -} - -// peek returns but does not consume the next rune in the input. -func (lx *lexer) peek() rune { - r := lx.next() - lx.backup() - return r -} - -// errorf stops all lexing by emitting an error and returning `nil`. -// Note that any value that is a character is escaped if it's a special -// character (new lines, tabs, etc.). -func (lx *lexer) errorf(format string, values ...interface{}) stateFn { - lx.items <- item{ - itemError, - fmt.Sprintf(format, values...), - lx.line, - } - return nil -} - -// lexTop consumes elements at the top level of TOML data. -func lexTop(lx *lexer) stateFn { - r := lx.next() - if isWhitespace(r) || isNL(r) { - return lexSkip(lx, lexTop) - } - - switch r { - case commentStart: - lx.push(lexTop) - return lexCommentStart - case tableStart: - return lexTableStart - case eof: - if lx.pos > lx.start { - return lx.errorf("Unexpected EOF.") - } - lx.emit(itemEOF) - return nil - } - - // At this point, the only valid item can be a key, so we back up - // and let the key lexer do the rest. - lx.backup() - lx.push(lexTopEnd) - return lexKeyStart -} - -// lexTopEnd is entered whenever a top-level item has been consumed. (A value -// or a table.) It must see only whitespace, and will turn back to lexTop -// upon a new line. If it sees EOF, it will quit the lexer successfully. -func lexTopEnd(lx *lexer) stateFn { - r := lx.next() - switch { - case r == commentStart: - // a comment will read to a new line for us. - lx.push(lexTop) - return lexCommentStart - case isWhitespace(r): - return lexTopEnd - case isNL(r): - lx.ignore() - return lexTop - case r == eof: - lx.ignore() - return lexTop - } - return lx.errorf("Expected a top-level item to end with a new line, "+ - "comment or EOF, but got %q instead.", r) -} - -// lexTable lexes the beginning of a table. Namely, it makes sure that -// it starts with a character other than '.' and ']'. -// It assumes that '[' has already been consumed. -// It also handles the case that this is an item in an array of tables. -// e.g., '[[name]]'. -func lexTableStart(lx *lexer) stateFn { - if lx.peek() == arrayTableStart { - lx.next() - lx.emit(itemArrayTableStart) - lx.push(lexArrayTableEnd) - } else { - lx.emit(itemTableStart) - lx.push(lexTableEnd) - } - return lexTableNameStart -} - -func lexTableEnd(lx *lexer) stateFn { - lx.emit(itemTableEnd) - return lexTopEnd -} - -func lexArrayTableEnd(lx *lexer) stateFn { - if r := lx.next(); r != arrayTableEnd { - return lx.errorf("Expected end of table array name delimiter %q, "+ - "but got %q instead.", arrayTableEnd, r) - } - lx.emit(itemArrayTableEnd) - return lexTopEnd -} - -func lexTableNameStart(lx *lexer) stateFn { - switch lx.next() { - case tableEnd, eof: - return lx.errorf("Unexpected end of table. (Tables cannot " + - "be empty.)") - case tableSep: - return lx.errorf("Unexpected table separator. (Tables cannot " + - "be empty.)") - } - return lexTableName -} - -// lexTableName lexes the name of a table. It assumes that at least one -// valid character for the table has already been read. -func lexTableName(lx *lexer) stateFn { - switch lx.peek() { - case eof: - return lx.errorf("Unexpected end of table name %q.", lx.current()) - case tableStart: - return lx.errorf("Table names cannot contain %q or %q.", - tableStart, tableEnd) - case tableEnd: - lx.emit(itemText) - lx.next() - return lx.pop() - case tableSep: - lx.emit(itemText) - lx.next() - lx.ignore() - return lexTableNameStart - } - lx.next() - return lexTableName -} - -// lexKeyStart consumes a key name up until the first non-whitespace character. -// lexKeyStart will ignore whitespace. -func lexKeyStart(lx *lexer) stateFn { - r := lx.peek() - switch { - case r == keySep: - return lx.errorf("Unexpected key separator %q.", keySep) - case isWhitespace(r) || isNL(r): - lx.next() - return lexSkip(lx, lexKeyStart) - } - - lx.ignore() - lx.emit(itemKeyStart) - lx.next() - return lexKey -} - -// lexKey consumes the text of a key. Assumes that the first character (which -// is not whitespace) has already been consumed. -func lexKey(lx *lexer) stateFn { - r := lx.peek() - - // Keys cannot contain a '#' character. - if r == commentStart { - return lx.errorf("Key cannot contain a '#' character.") - } - - // XXX: Possible divergence from spec? - // "Keys start with the first non-whitespace character and end with the - // last non-whitespace character before the equals sign." - // Note here that whitespace is either a tab or a space. - // But we'll call it quits if we see a new line too. - if isNL(r) { - lx.emitTrim(itemText) - return lexKeyEnd - } - - // Let's also call it quits if we see an equals sign. - if r == keySep { - lx.emitTrim(itemText) - return lexKeyEnd - } - - lx.next() - return lexKey -} - -// lexKeyEnd consumes the end of a key (up to the key separator). -// Assumes that any whitespace after a key has been consumed. -func lexKeyEnd(lx *lexer) stateFn { - r := lx.next() - if r == keySep { - return lexSkip(lx, lexValue) - } - return lx.errorf("Expected key separator %q, but got %q instead.", - keySep, r) -} - -// lexValue starts the consumption of a value anywhere a value is expected. -// lexValue will ignore whitespace. -// After a value is lexed, the last state on the next is popped and returned. -func lexValue(lx *lexer) stateFn { - // We allow whitespace to precede a value, but NOT new lines. - // In array syntax, the array states are responsible for ignoring new lines. - r := lx.next() - if isWhitespace(r) { - return lexSkip(lx, lexValue) - } - - switch { - case r == arrayStart: - lx.ignore() - lx.emit(itemArray) - return lexArrayValue - case r == stringStart: - if lx.accept(stringStart) { - if lx.accept(stringStart) { - lx.ignore() // Ignore """ - return lexMultilineString - } - - lx.backup() - } - - lx.ignore() // ignore the '"' - return lexString - case r == rawStringStart: - if lx.accept(rawStringStart) { - if lx.accept(rawStringStart) { - lx.ignore() // Ignore """ - return lexMultilineRawString - } - - lx.backup() - } - - lx.ignore() // ignore the "'" - return lexRawString - case r == 't': - return lexTrue - case r == 'f': - return lexFalse - case r == '-': - return lexNumberStart - case isDigit(r): - lx.backup() // avoid an extra state and use the same as above - return lexNumberOrDateStart - case r == '.': // special error case, be kind to users - return lx.errorf("Floats must start with a digit, not '.'.") - } - return lx.errorf("Expected value but found %q instead.", r) -} - -// lexArrayValue consumes one value in an array. It assumes that '[' or ',' -// have already been consumed. All whitespace and new lines are ignored. -func lexArrayValue(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r) || isNL(r): - return lexSkip(lx, lexArrayValue) - case r == commentStart: - lx.push(lexArrayValue) - return lexCommentStart - case r == arrayValTerm: - return lx.errorf("Unexpected array value terminator %q.", - arrayValTerm) - case r == arrayEnd: - return lexArrayEnd - } - - lx.backup() - lx.push(lexArrayValueEnd) - return lexValue -} - -// lexArrayValueEnd consumes the cruft between values of an array. Namely, -// it ignores whitespace and expects either a ',' or a ']'. -func lexArrayValueEnd(lx *lexer) stateFn { - r := lx.next() - switch { - case isWhitespace(r) || isNL(r): - return lexSkip(lx, lexArrayValueEnd) - case r == commentStart: - lx.push(lexArrayValueEnd) - return lexCommentStart - case r == arrayValTerm: - lx.ignore() - return lexArrayValue // move on to the next value - case r == arrayEnd: - return lexArrayEnd - } - return lx.errorf("Expected an array value terminator %q or an array "+ - "terminator %q, but got %q instead.", arrayValTerm, arrayEnd, r) -} - -// lexArrayEnd finishes the lexing of an array. It assumes that a ']' has -// just been consumed. -func lexArrayEnd(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemArrayEnd) - return lx.pop() -} - -// lexString consumes the inner contents of a string. It assumes that the -// beginning '"' has already been consumed and ignored. -func lexString(lx *lexer) stateFn { - r := lx.next() - switch { - case isNL(r): - return lx.errorf("Strings cannot contain new lines.") - case r == '\\': - return lexStringEscape - case r == stringEnd: - lx.backup() - lx.emit(itemString) - lx.next() - lx.ignore() - return lx.pop() - } - return lexString -} - -// lexStringEscape consumes an escaped character. It assumes that the preceding -// '\\' has already been consumed. -func lexStringEscape(lx *lexer) stateFn { - return lexStringEscapeHandler(lx, lexString, lexStringUnicode) -} - -// lexMultilineStringEscape consumes an escaped character. It assumes that the -// preceding '\\' has already been consumed. -func lexMultilineStringEscape(lx *lexer) stateFn { - // Handle the special case first: - if isNL(lx.next()) { - lx.next() - return lexMultilineString - } else { - lx.backup() - return lexStringEscapeHandler(lx, lexMultilineString, lexMultilineStringUnicode) - } -} - -func lexStringEscapeHandler(lx *lexer, stringFn stateFn, unicodeFn stateFn) stateFn { - r := lx.next() - switch r { - case 'b': - fallthrough - case 't': - fallthrough - case 'n': - fallthrough - case 'f': - fallthrough - case 'r': - fallthrough - case '"': - fallthrough - case '/': - fallthrough - case '\\': - return stringFn - case 'u': - return unicodeFn - } - return lx.errorf("Invalid escape character %q. Only the following "+ - "escape characters are allowed: "+ - "\\b, \\t, \\n, \\f, \\r, \\\", \\/, \\\\, and \\uXXXX.", r) -} - -// lexStringUnicode consumes four hexadecimal digits following '\u'. It assumes -// that the '\u' has already been consumed. -func lexStringUnicode(lx *lexer) stateFn { - return lexStringUnicodeHandler(lx, lexString) -} - -// lexMultilineStringUnicode consumes four hexadecimal digits following '\u'. -// It assumes that the '\u' has already been consumed. -func lexMultilineStringUnicode(lx *lexer) stateFn { - return lexStringUnicodeHandler(lx, lexMultilineString) -} - -func lexStringUnicodeHandler(lx *lexer, nextFunc stateFn) stateFn { - var r rune - - for i := 0; i < 4; i++ { - r = lx.next() - if !isHexadecimal(r) { - return lx.errorf("Expected four hexadecimal digits after '\\x', "+ - "but got '%s' instead.", lx.current()) - } - } - return nextFunc -} - -// lexMultilineString consumes the inner contents of a string. It assumes that -// the beginning '"""' has already been consumed and ignored. -func lexMultilineString(lx *lexer) stateFn { - r := lx.next() - switch { - case r == '\\': - return lexMultilineStringEscape - case r == stringEnd: - if lx.accept(stringEnd) { - if lx.accept(stringEnd) { - lx.backup() - lx.backup() - lx.backup() - lx.emit(itemMultilineString) - lx.next() - lx.next() - lx.next() - lx.ignore() - return lx.pop() - } - - lx.backup() - } - } - return lexMultilineString -} - -// lexRawString consumes a raw string. Nothing can be escaped in such a string. -// It assumes that the beginning "'" has already been consumed and ignored. -func lexRawString(lx *lexer) stateFn { - r := lx.next() - switch { - case isNL(r): - return lx.errorf("Strings cannot contain new lines.") - case r == rawStringEnd: - lx.backup() - lx.emit(itemRawString) - lx.next() - lx.ignore() - return lx.pop() - } - return lexRawString -} - -// lexMultilineRawString consumes a raw string. Nothing can be escaped in such -// a string. It assumes that the beginning "'" has already been consumed and -// ignored. -func lexMultilineRawString(lx *lexer) stateFn { - r := lx.next() - switch { - case r == rawStringEnd: - if lx.accept(rawStringEnd) { - if lx.accept(rawStringEnd) { - lx.backup() - lx.backup() - lx.backup() - lx.emit(itemRawMultilineString) - lx.next() - lx.next() - lx.next() - lx.ignore() - return lx.pop() - } - - lx.backup() - } - } - return lexMultilineRawString -} - -// lexNumberOrDateStart consumes either a (positive) integer, float or datetime. -// It assumes that NO negative sign has been consumed. -func lexNumberOrDateStart(lx *lexer) stateFn { - r := lx.next() - if !isDigit(r) { - if r == '.' { - return lx.errorf("Floats must start with a digit, not '.'.") - } else { - return lx.errorf("Expected a digit but got %q.", r) - } - } - return lexNumberOrDate -} - -// lexNumberOrDate consumes either a (positive) integer, float or datetime. -func lexNumberOrDate(lx *lexer) stateFn { - r := lx.next() - switch { - case r == '-': - if lx.pos-lx.start != 5 { - return lx.errorf("All ISO8601 dates must be in full Zulu form.") - } - return lexDateAfterYear - case isDigit(r): - return lexNumberOrDate - case r == '.': - return lexFloatStart - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexDateAfterYear consumes a full Zulu Datetime in ISO8601 format. -// It assumes that "YYYY-" has already been consumed. -func lexDateAfterYear(lx *lexer) stateFn { - formats := []rune{ - // digits are '0'. - // everything else is direct equality. - '0', '0', '-', '0', '0', - 'T', - '0', '0', ':', '0', '0', ':', '0', '0', - 'Z', - } - for _, f := range formats { - r := lx.next() - if f == '0' { - if !isDigit(r) { - return lx.errorf("Expected digit in ISO8601 datetime, "+ - "but found %q instead.", r) - } - } else if f != r { - return lx.errorf("Expected %q in ISO8601 datetime, "+ - "but found %q instead.", f, r) - } - } - lx.emit(itemDatetime) - return lx.pop() -} - -// lexNumberStart consumes either an integer or a float. It assumes that a -// negative sign has already been read, but that *no* digits have been consumed. -// lexNumberStart will move to the appropriate integer or float states. -func lexNumberStart(lx *lexer) stateFn { - // we MUST see a digit. Even floats have to start with a digit. - r := lx.next() - if !isDigit(r) { - if r == '.' { - return lx.errorf("Floats must start with a digit, not '.'.") - } else { - return lx.errorf("Expected a digit but got %q.", r) - } - } - return lexNumber -} - -// lexNumber consumes an integer or a float after seeing the first digit. -func lexNumber(lx *lexer) stateFn { - r := lx.next() - switch { - case isDigit(r): - return lexNumber - case r == '.': - return lexFloatStart - } - - lx.backup() - lx.emit(itemInteger) - return lx.pop() -} - -// lexFloatStart starts the consumption of digits of a float after a '.'. -// Namely, at least one digit is required. -func lexFloatStart(lx *lexer) stateFn { - r := lx.next() - if !isDigit(r) { - return lx.errorf("Floats must have a digit after the '.', but got "+ - "%q instead.", r) - } - return lexFloat -} - -// lexFloat consumes the digits of a float after a '.'. -// Assumes that one digit has been consumed after a '.' already. -func lexFloat(lx *lexer) stateFn { - r := lx.next() - if isDigit(r) { - return lexFloat - } - - lx.backup() - lx.emit(itemFloat) - return lx.pop() -} - -// lexConst consumes the s[1:] in s. It assumes that s[0] has already been -// consumed. -func lexConst(lx *lexer, s string) stateFn { - for i := range s[1:] { - if r := lx.next(); r != rune(s[i+1]) { - return lx.errorf("Expected %q, but found %q instead.", s[:i+1], - s[:i]+string(r)) - } - } - return nil -} - -// lexTrue consumes the "rue" in "true". It assumes that 't' has already -// been consumed. -func lexTrue(lx *lexer) stateFn { - if fn := lexConst(lx, "true"); fn != nil { - return fn - } - lx.emit(itemBool) - return lx.pop() -} - -// lexFalse consumes the "alse" in "false". It assumes that 'f' has already -// been consumed. -func lexFalse(lx *lexer) stateFn { - if fn := lexConst(lx, "false"); fn != nil { - return fn - } - lx.emit(itemBool) - return lx.pop() -} - -// lexCommentStart begins the lexing of a comment. It will emit -// itemCommentStart and consume no characters, passing control to lexComment. -func lexCommentStart(lx *lexer) stateFn { - lx.ignore() - lx.emit(itemCommentStart) - return lexComment -} - -// lexComment lexes an entire comment. It assumes that '#' has been consumed. -// It will consume *up to* the first new line character, and pass control -// back to the last state on the stack. -func lexComment(lx *lexer) stateFn { - r := lx.peek() - if isNL(r) || r == eof { - lx.emit(itemText) - return lx.pop() - } - lx.next() - return lexComment -} - -// lexSkip ignores all slurped input and moves on to the next state. -func lexSkip(lx *lexer, nextState stateFn) stateFn { - return func(lx *lexer) stateFn { - lx.ignore() - return nextState - } -} - -// isWhitespace returns true if `r` is a whitespace character according -// to the spec. -func isWhitespace(r rune) bool { - return r == '\t' || r == ' ' -} - -func isNL(r rune) bool { - return r == '\n' || r == '\r' -} - -func isDigit(r rune) bool { - return r >= '0' && r <= '9' -} - -func isHexadecimal(r rune) bool { - return (r >= '0' && r <= '9') || - (r >= 'a' && r <= 'f') || - (r >= 'A' && r <= 'F') -} - -func (itype itemType) String() string { - switch itype { - case itemError: - return "Error" - case itemNIL: - return "NIL" - case itemEOF: - return "EOF" - case itemText: - return "Text" - case itemString: - return "String" - case itemRawString: - return "String" - case itemMultilineString: - return "String" - case itemRawMultilineString: - return "String" - case itemBool: - return "Bool" - case itemInteger: - return "Integer" - case itemFloat: - return "Float" - case itemDatetime: - return "DateTime" - case itemTableStart: - return "TableStart" - case itemTableEnd: - return "TableEnd" - case itemKeyStart: - return "KeyStart" - case itemArray: - return "Array" - case itemArrayEnd: - return "ArrayEnd" - case itemCommentStart: - return "CommentStart" - } - panic(fmt.Sprintf("BUG: Unknown type '%d'.", int(itype))) -} - -func (item item) String() string { - return fmt.Sprintf("(%s, %s)", item.typ.String(), item.val) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/parse.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/parse.go deleted file mode 100644 index 2fbc211..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/parse.go +++ /dev/null @@ -1,444 +0,0 @@ -package toml - -import ( - "fmt" - "log" - "strconv" - "strings" - "time" - "unicode" - "unicode/utf8" -) - -type parser struct { - mapping map[string]interface{} - types map[string]tomlType - lx *lexer - - // A list of keys in the order that they appear in the TOML data. - ordered []Key - - // the full key for the current hash in scope - context Key - - // the base key name for everything except hashes - currentKey string - - // rough approximation of line number - approxLine int - - // A map of 'key.group.names' to whether they were created implicitly. - implicits map[string]bool -} - -type parseError string - -func (pe parseError) Error() string { - return string(pe) -} - -func parse(data string) (p *parser, err error) { - defer func() { - if r := recover(); r != nil { - var ok bool - if err, ok = r.(parseError); ok { - return - } - panic(r) - } - }() - - p = &parser{ - mapping: make(map[string]interface{}), - types: make(map[string]tomlType), - lx: lex(data), - ordered: make([]Key, 0), - implicits: make(map[string]bool), - } - for { - item := p.next() - if item.typ == itemEOF { - break - } - p.topLevel(item) - } - - return p, nil -} - -func (p *parser) panicf(format string, v ...interface{}) { - msg := fmt.Sprintf("Near line %d, key '%s': %s", - p.approxLine, p.current(), fmt.Sprintf(format, v...)) - panic(parseError(msg)) -} - -func (p *parser) next() item { - it := p.lx.nextItem() - if it.typ == itemError { - p.panicf("Near line %d: %s", it.line, it.val) - } - return it -} - -func (p *parser) bug(format string, v ...interface{}) { - log.Fatalf("BUG: %s\n\n", fmt.Sprintf(format, v...)) -} - -func (p *parser) expect(typ itemType) item { - it := p.next() - p.assertEqual(typ, it.typ) - return it -} - -func (p *parser) assertEqual(expected, got itemType) { - if expected != got { - p.bug("Expected '%s' but got '%s'.", expected, got) - } -} - -func (p *parser) topLevel(item item) { - switch item.typ { - case itemCommentStart: - p.approxLine = item.line - p.expect(itemText) - case itemTableStart: - kg := p.expect(itemText) - p.approxLine = kg.line - - key := make(Key, 0) - for ; kg.typ == itemText; kg = p.next() { - key = append(key, kg.val) - } - p.assertEqual(itemTableEnd, kg.typ) - - p.establishContext(key, false) - p.setType("", tomlHash) - p.ordered = append(p.ordered, key) - case itemArrayTableStart: - kg := p.expect(itemText) - p.approxLine = kg.line - - key := make(Key, 0) - for ; kg.typ == itemText; kg = p.next() { - key = append(key, kg.val) - } - p.assertEqual(itemArrayTableEnd, kg.typ) - - p.establishContext(key, true) - p.setType("", tomlArrayHash) - p.ordered = append(p.ordered, key) - case itemKeyStart: - kname := p.expect(itemText) - p.currentKey = kname.val - p.approxLine = kname.line - - val, typ := p.value(p.next()) - p.setValue(p.currentKey, val) - p.setType(p.currentKey, typ) - p.ordered = append(p.ordered, p.context.add(p.currentKey)) - - p.currentKey = "" - default: - p.bug("Unexpected type at top level: %s", item.typ) - } -} - -// value translates an expected value from the lexer into a Go value wrapped -// as an empty interface. -func (p *parser) value(it item) (interface{}, tomlType) { - switch it.typ { - case itemString: - return p.replaceUnicode(replaceEscapes(it.val)), p.typeOfPrimitive(it) - case itemMultilineString: - return p.replaceUnicode(replaceEscapes(stripFirstNewline(stripEscapedWhitespace(it.val)))), p.typeOfPrimitive(it) - case itemRawString: - return it.val, p.typeOfPrimitive(it) - case itemRawMultilineString: - return stripFirstNewline(it.val), p.typeOfPrimitive(it) - case itemBool: - switch it.val { - case "true": - return true, p.typeOfPrimitive(it) - case "false": - return false, p.typeOfPrimitive(it) - } - p.bug("Expected boolean value, but got '%s'.", it.val) - case itemInteger: - num, err := strconv.ParseInt(it.val, 10, 64) - if err != nil { - // See comment below for floats describing why we make a - // distinction between a bug and a user error. - if e, ok := err.(*strconv.NumError); ok && - e.Err == strconv.ErrRange { - - p.panicf("Integer '%s' is out of the range of 64-bit "+ - "signed integers.", it.val) - } else { - p.bug("Expected integer value, but got '%s'.", it.val) - } - } - return num, p.typeOfPrimitive(it) - case itemFloat: - num, err := strconv.ParseFloat(it.val, 64) - if err != nil { - // Distinguish float values. Normally, it'd be a bug if the lexer - // provides an invalid float, but it's possible that the float is - // out of range of valid values (which the lexer cannot determine). - // So mark the former as a bug but the latter as a legitimate user - // error. - // - // This is also true for integers. - if e, ok := err.(*strconv.NumError); ok && - e.Err == strconv.ErrRange { - - p.panicf("Float '%s' is out of the range of 64-bit "+ - "IEEE-754 floating-point numbers.", it.val) - } else { - p.bug("Expected float value, but got '%s'.", it.val) - } - } - return num, p.typeOfPrimitive(it) - case itemDatetime: - t, err := time.Parse("2006-01-02T15:04:05Z", it.val) - if err != nil { - p.bug("Expected Zulu formatted DateTime, but got '%s'.", it.val) - } - return t, p.typeOfPrimitive(it) - case itemArray: - array := make([]interface{}, 0) - types := make([]tomlType, 0) - - for it = p.next(); it.typ != itemArrayEnd; it = p.next() { - if it.typ == itemCommentStart { - p.expect(itemText) - continue - } - - val, typ := p.value(it) - array = append(array, val) - types = append(types, typ) - } - return array, p.typeOfArray(types) - } - p.bug("Unexpected value type: %s", it.typ) - panic("unreachable") -} - -// establishContext sets the current context of the parser, -// where the context is either a hash or an array of hashes. Which one is -// set depends on the value of the `array` parameter. -// -// Establishing the context also makes sure that the key isn't a duplicate, and -// will create implicit hashes automatically. -func (p *parser) establishContext(key Key, array bool) { - var ok bool - - // Always start at the top level and drill down for our context. - hashContext := p.mapping - keyContext := make(Key, 0) - - // We only need implicit hashes for key[0:-1] - for _, k := range key[0 : len(key)-1] { - _, ok = hashContext[k] - keyContext = append(keyContext, k) - - // No key? Make an implicit hash and move on. - if !ok { - p.addImplicit(keyContext) - hashContext[k] = make(map[string]interface{}) - } - - // If the hash context is actually an array of tables, then set - // the hash context to the last element in that array. - // - // Otherwise, it better be a table, since this MUST be a key group (by - // virtue of it not being the last element in a key). - switch t := hashContext[k].(type) { - case []map[string]interface{}: - hashContext = t[len(t)-1] - case map[string]interface{}: - hashContext = t - default: - p.panicf("Key '%s' was already created as a hash.", keyContext) - } - } - - p.context = keyContext - if array { - // If this is the first element for this array, then allocate a new - // list of tables for it. - k := key[len(key)-1] - if _, ok := hashContext[k]; !ok { - hashContext[k] = make([]map[string]interface{}, 0, 5) - } - - // Add a new table. But make sure the key hasn't already been used - // for something else. - if hash, ok := hashContext[k].([]map[string]interface{}); ok { - hashContext[k] = append(hash, make(map[string]interface{})) - } else { - p.panicf("Key '%s' was already created and cannot be used as "+ - "an array.", keyContext) - } - } else { - p.setValue(key[len(key)-1], make(map[string]interface{})) - } - p.context = append(p.context, key[len(key)-1]) -} - -// setValue sets the given key to the given value in the current context. -// It will make sure that the key hasn't already been defined, account for -// implicit key groups. -func (p *parser) setValue(key string, value interface{}) { - var tmpHash interface{} - var ok bool - - hash := p.mapping - keyContext := make(Key, 0) - for _, k := range p.context { - keyContext = append(keyContext, k) - if tmpHash, ok = hash[k]; !ok { - p.bug("Context for key '%s' has not been established.", keyContext) - } - switch t := tmpHash.(type) { - case []map[string]interface{}: - // The context is a table of hashes. Pick the most recent table - // defined as the current hash. - hash = t[len(t)-1] - case map[string]interface{}: - hash = t - default: - p.bug("Expected hash to have type 'map[string]interface{}', but "+ - "it has '%T' instead.", tmpHash) - } - } - keyContext = append(keyContext, key) - - if _, ok := hash[key]; ok { - // Typically, if the given key has already been set, then we have - // to raise an error since duplicate keys are disallowed. However, - // it's possible that a key was previously defined implicitly. In this - // case, it is allowed to be redefined concretely. (See the - // `tests/valid/implicit-and-explicit-after.toml` test in `toml-test`.) - // - // But we have to make sure to stop marking it as an implicit. (So that - // another redefinition provokes an error.) - // - // Note that since it has already been defined (as a hash), we don't - // want to overwrite it. So our business is done. - if p.isImplicit(keyContext) { - p.removeImplicit(keyContext) - return - } - - // Otherwise, we have a concrete key trying to override a previous - // key, which is *always* wrong. - p.panicf("Key '%s' has already been defined.", keyContext) - } - hash[key] = value -} - -// setType sets the type of a particular value at a given key. -// It should be called immediately AFTER setValue. -// -// Note that if `key` is empty, then the type given will be applied to the -// current context (which is either a table or an array of tables). -func (p *parser) setType(key string, typ tomlType) { - keyContext := make(Key, 0, len(p.context)+1) - for _, k := range p.context { - keyContext = append(keyContext, k) - } - if len(key) > 0 { // allow type setting for hashes - keyContext = append(keyContext, key) - } - p.types[keyContext.String()] = typ -} - -// addImplicit sets the given Key as having been created implicitly. -func (p *parser) addImplicit(key Key) { - p.implicits[key.String()] = true -} - -// removeImplicit stops tagging the given key as having been implicitly created. -func (p *parser) removeImplicit(key Key) { - p.implicits[key.String()] = false -} - -// isImplicit returns true if the key group pointed to by the key was created -// implicitly. -func (p *parser) isImplicit(key Key) bool { - return p.implicits[key.String()] -} - -// current returns the full key name of the current context. -func (p *parser) current() string { - if len(p.currentKey) == 0 { - return p.context.String() - } - if len(p.context) == 0 { - return p.currentKey - } - return fmt.Sprintf("%s.%s", p.context, p.currentKey) -} - -func replaceEscapes(s string) string { - return strings.NewReplacer( - "\\b", "\u0008", - "\\t", "\u0009", - "\\n", "\u000A", - "\\f", "\u000C", - "\\r", "\u000D", - "\\\"", "\u0022", - "\\/", "\u002F", - "\\\\", "\u005C", - ).Replace(s) -} - -func stripFirstNewline(s string) string { - if len(s) == 0 || s[0] != '\n' { - return s - } - - return s[1:len(s)] -} - -func stripEscapedWhitespace(s string) string { - esc := strings.Split(s, "\\\n") - - if len(esc) > 1 { - for i := 1; i < len(esc); i++ { - esc[i] = strings.TrimLeftFunc(esc[i], unicode.IsSpace) - } - } - - return strings.Join(esc, "") -} - -func (p *parser) replaceUnicode(s string) string { - indexEsc := func() int { - return strings.Index(s, "\\u") - } - for i := indexEsc(); i != -1; i = indexEsc() { - asciiBytes := s[i+2 : i+6] - s = strings.Replace(s, s[i:i+6], p.asciiEscapeToUnicode(asciiBytes), -1) - } - return s -} - -func (p *parser) asciiEscapeToUnicode(s string) string { - hex, err := strconv.ParseUint(strings.ToLower(s), 16, 32) - if err != nil { - p.bug("Could not parse '%s' as a hexadecimal number, but the "+ - "lexer claims it's OK: %s", s, err) - } - - // BUG(burntsushi) - // I honestly don't understand how this works. I can't seem - // to find a way to make this fail. I figured this would fail on invalid - // UTF-8 characters like U+DCFF, but it doesn't. - r := string(rune(hex)) - if !utf8.ValidString(r) { - p.panicf("Escaped character '\\u%s' is not valid UTF-8.", s) - } - return string(r) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/session.vim b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/session.vim deleted file mode 100644 index 562164b..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/session.vim +++ /dev/null @@ -1 +0,0 @@ -au BufWritePost *.go silent!make tags > /dev/null 2>&1 diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/type_check.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/type_check.go deleted file mode 100644 index caecd4a..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/type_check.go +++ /dev/null @@ -1,91 +0,0 @@ -package toml - -// tomlType represents any Go type that corresponds to a TOML type. -// While the first draft of the TOML spec has a simplistic type system that -// probably doesn't need this level of sophistication, we seem to be militating -// toward adding real composite types. -type tomlType interface { - typeString() string -} - -// typeEqual accepts any two types and returns true if they are equal. -func typeEqual(t1, t2 tomlType) bool { - if t1 == nil || t2 == nil { - return false - } - return t1.typeString() == t2.typeString() -} - -func typeIsHash(t tomlType) bool { - return typeEqual(t, tomlHash) || typeEqual(t, tomlArrayHash) -} - -type tomlBaseType string - -func (btype tomlBaseType) typeString() string { - return string(btype) -} - -func (btype tomlBaseType) String() string { - return btype.typeString() -} - -var ( - tomlInteger tomlBaseType = "Integer" - tomlFloat tomlBaseType = "Float" - tomlDatetime tomlBaseType = "Datetime" - tomlString tomlBaseType = "String" - tomlBool tomlBaseType = "Bool" - tomlArray tomlBaseType = "Array" - tomlHash tomlBaseType = "Hash" - tomlArrayHash tomlBaseType = "ArrayHash" -) - -// typeOfPrimitive returns a tomlType of any primitive value in TOML. -// Primitive values are: Integer, Float, Datetime, String and Bool. -// -// Passing a lexer item other than the following will cause a BUG message -// to occur: itemString, itemBool, itemInteger, itemFloat, itemDatetime. -func (p *parser) typeOfPrimitive(lexItem item) tomlType { - switch lexItem.typ { - case itemInteger: - return tomlInteger - case itemFloat: - return tomlFloat - case itemDatetime: - return tomlDatetime - case itemString: - return tomlString - case itemMultilineString: - return tomlString - case itemRawString: - return tomlString - case itemRawMultilineString: - return tomlString - case itemBool: - return tomlBool - } - p.bug("Cannot infer primitive type of lex item '%s'.", lexItem) - panic("unreachable") -} - -// typeOfArray returns a tomlType for an array given a list of types of its -// values. -// -// In the current spec, if an array is homogeneous, then its type is always -// "Array". If the array is not homogeneous, an error is generated. -func (p *parser) typeOfArray(types []tomlType) tomlType { - // Empty arrays are cool. - if len(types) == 0 { - return tomlArray - } - - theType := types[0] - for _, t := range types[1:] { - if !typeEqual(theType, t) { - p.panicf("Array contains values of type '%s' and '%s', but arrays "+ - "must be homogeneous.", theType, t) - } - } - return tomlArray -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/type_fields.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/type_fields.go deleted file mode 100644 index 7592f87..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/BurntSushi/toml/type_fields.go +++ /dev/null @@ -1,241 +0,0 @@ -package toml - -// Struct field handling is adapted from code in encoding/json: -// -// Copyright 2010 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the Go distribution. - -import ( - "reflect" - "sort" - "sync" -) - -// A field represents a single field found in a struct. -type field struct { - name string // the name of the field (`toml` tag included) - tag bool // whether field has a `toml` tag - index []int // represents the depth of an anonymous field - typ reflect.Type // the type of the field -} - -// byName sorts field by name, breaking ties with depth, -// then breaking ties with "name came from toml tag", then -// breaking ties with index sequence. -type byName []field - -func (x byName) Len() int { return len(x) } - -func (x byName) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byName) Less(i, j int) bool { - if x[i].name != x[j].name { - return x[i].name < x[j].name - } - if len(x[i].index) != len(x[j].index) { - return len(x[i].index) < len(x[j].index) - } - if x[i].tag != x[j].tag { - return x[i].tag - } - return byIndex(x).Less(i, j) -} - -// byIndex sorts field by index sequence. -type byIndex []field - -func (x byIndex) Len() int { return len(x) } - -func (x byIndex) Swap(i, j int) { x[i], x[j] = x[j], x[i] } - -func (x byIndex) Less(i, j int) bool { - for k, xik := range x[i].index { - if k >= len(x[j].index) { - return false - } - if xik != x[j].index[k] { - return xik < x[j].index[k] - } - } - return len(x[i].index) < len(x[j].index) -} - -// typeFields returns a list of fields that TOML should recognize for the given -// type. The algorithm is breadth-first search over the set of structs to -// include - the top struct and then any reachable anonymous structs. -func typeFields(t reflect.Type) []field { - // Anonymous fields to explore at the current level and the next. - current := []field{} - next := []field{{typ: t}} - - // Count of queued names for current level and the next. - count := map[reflect.Type]int{} - nextCount := map[reflect.Type]int{} - - // Types already visited at an earlier level. - visited := map[reflect.Type]bool{} - - // Fields found. - var fields []field - - for len(next) > 0 { - current, next = next, current[:0] - count, nextCount = nextCount, map[reflect.Type]int{} - - for _, f := range current { - if visited[f.typ] { - continue - } - visited[f.typ] = true - - // Scan f.typ for fields to include. - for i := 0; i < f.typ.NumField(); i++ { - sf := f.typ.Field(i) - if sf.PkgPath != "" { // unexported - continue - } - name := sf.Tag.Get("toml") - if name == "-" { - continue - } - index := make([]int, len(f.index)+1) - copy(index, f.index) - index[len(f.index)] = i - - ft := sf.Type - if ft.Name() == "" && ft.Kind() == reflect.Ptr { - // Follow pointer. - ft = ft.Elem() - } - - // Record found field and index sequence. - if name != "" || !sf.Anonymous || ft.Kind() != reflect.Struct { - tagged := name != "" - if name == "" { - name = sf.Name - } - fields = append(fields, field{name, tagged, index, ft}) - if count[f.typ] > 1 { - // If there were multiple instances, add a second, - // so that the annihilation code will see a duplicate. - // It only cares about the distinction between 1 or 2, - // so don't bother generating any more copies. - fields = append(fields, fields[len(fields)-1]) - } - continue - } - - // Record new anonymous struct to explore in next round. - nextCount[ft]++ - if nextCount[ft] == 1 { - f := field{name: ft.Name(), index: index, typ: ft} - next = append(next, f) - } - } - } - } - - sort.Sort(byName(fields)) - - // Delete all fields that are hidden by the Go rules for embedded fields, - // except that fields with TOML tags are promoted. - - // The fields are sorted in primary order of name, secondary order - // of field index length. Loop over names; for each name, delete - // hidden fields by choosing the one dominant field that survives. - out := fields[:0] - for advance, i := 0, 0; i < len(fields); i += advance { - // One iteration per name. - // Find the sequence of fields with the name of this first field. - fi := fields[i] - name := fi.name - for advance = 1; i+advance < len(fields); advance++ { - fj := fields[i+advance] - if fj.name != name { - break - } - } - if advance == 1 { // Only one field with this name - out = append(out, fi) - continue - } - dominant, ok := dominantField(fields[i : i+advance]) - if ok { - out = append(out, dominant) - } - } - - fields = out - sort.Sort(byIndex(fields)) - - return fields -} - -// dominantField looks through the fields, all of which are known to -// have the same name, to find the single field that dominates the -// others using Go's embedding rules, modified by the presence of -// TOML tags. If there are multiple top-level fields, the boolean -// will be false: This condition is an error in Go and we skip all -// the fields. -func dominantField(fields []field) (field, bool) { - // The fields are sorted in increasing index-length order. The winner - // must therefore be one with the shortest index length. Drop all - // longer entries, which is easy: just truncate the slice. - length := len(fields[0].index) - tagged := -1 // Index of first tagged field. - for i, f := range fields { - if len(f.index) > length { - fields = fields[:i] - break - } - if f.tag { - if tagged >= 0 { - // Multiple tagged fields at the same level: conflict. - // Return no field. - return field{}, false - } - tagged = i - } - } - if tagged >= 0 { - return fields[tagged], true - } - // All remaining fields have the same length. If there's more than one, - // we have a conflict (two fields named "X" at the same level) and we - // return no field. - if len(fields) > 1 { - return field{}, false - } - return fields[0], true -} - -var fieldCache struct { - sync.RWMutex - m map[reflect.Type][]field -} - -// cachedTypeFields is like typeFields but uses a cache to avoid repeated work. -func cachedTypeFields(t reflect.Type) []field { - fieldCache.RLock() - f := fieldCache.m[t] - fieldCache.RUnlock() - if f != nil { - return f - } - - // Compute fields without lock. - // Might duplicate effort but won't hold other computations back. - f = typeFields(t) - if f == nil { - f = []field{} - } - - fieldCache.Lock() - if fieldCache.m == nil { - fieldCache.m = map[reflect.Type][]field{} - } - fieldCache.m[t] = f - fieldCache.Unlock() - return f -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/nix.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/nix.go deleted file mode 100644 index 99c4e7a..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/nix.go +++ /dev/null @@ -1,23 +0,0 @@ -// +build linux darwin - -package gopass - -import ( - "syscall" - - "github.com/remind101/deploy/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal" -) - -func getch() byte { - if oldState, err := terminal.MakeRaw(0); err != nil { - panic(err) - } else { - defer terminal.Restore(0, oldState) - } - - var buf [1]byte - if n, err := syscall.Read(0, buf[:]); n == 0 || err != nil { - panic(err) - } - return buf[0] -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia.go deleted file mode 100644 index 34e7a43..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia.go +++ /dev/null @@ -1,133 +0,0 @@ -// Package hypermedia provides helpers for parsing hypermedia links in resources -// and expanding the links to make further requests. -package hypermedia - -import ( - "fmt" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jtacoma/uritemplates" - "net/url" - "reflect" -) - -// Hyperlink is a string url. If it is a uri template, it can be converted to -// a full URL with Expand(). -type Hyperlink string - -// Expand converts a uri template into a url.URL using the given M map. -func (l Hyperlink) Expand(m M) (*url.URL, error) { - template, err := uritemplates.Parse(string(l)) - if err != nil { - return nil, err - } - - // clone M to map[string]interface{} - // if we don't do this type assertion will - // fail on jtacoma/uritemplates - // see https://github.com/jtacoma/uritemplates/blob/master/uritemplates.go#L189 - mm := make(map[string]interface{}, len(m)) - for k, v := range m { - mm[k] = v - } - - expanded, err := template.Expand(mm) - if err != nil { - return nil, err - } - - return url.Parse(expanded) -} - -// M represents a map of values to expand a Hyperlink. -type M map[string]interface{} - -// Relations is a map of keys that point to Hyperlink objects. -type Relations map[string]Hyperlink - -// Rel fetches and expands the Hyperlink by its given key in the Relations map. -func (h Relations) Rel(name string, m M) (*url.URL, error) { - if rel, ok := h[name]; ok { - return rel.Expand(m) - } - return nil, fmt.Errorf("No %s relation found", name) -} - -// A HypermediaResource has link relations for next actions of a resource. -type HypermediaResource interface { - Rels() Relations -} - -// The HypermediaDecoder gets the link relations from any HypermediaResource. -func HypermediaDecoder(res HypermediaResource) Relations { - return res.Rels() -} - -// HALResource is a resource with hypermedia specified as JSON HAL. -// -// http://stateless.co/hal_specification.html -type HALResource struct { - Links Links `json:"_links"` - rels Relations -} - -// Rels gets the link relations from the HALResource's Links field. -func (r *HALResource) Rels() Relations { - if r.rels == nil { - r.rels = make(map[string]Hyperlink) - for name, link := range r.Links { - r.rels[name] = link.Href - } - } - return r.rels -} - -// Links is a collection of Link objects in a HALResource. Note that the HAL -// spec allows single link objects or an array of link objects. Sawyer -// currently only supports single link objects. -type Links map[string]Link - -// Link represents a single link in a HALResource. -type Link struct { - Href Hyperlink `json:"href"` -} - -// Expand converts a uri template into a url.URL using the given M map. -func (l *Link) Expand(m M) (*url.URL, error) { - return l.Href.Expand(m) -} - -// The HyperFieldDecoder gets link relations from a resource by reflecting on -// its Hyperlink properties. The relation name is taken either from the name -// of the field, or a "rel" struct tag. -// -// type Foo struct { -// Url Hyperlink `rel:"self" json:"url"` -// CommentsUrl Hyperlink `rel:"comments" json:"comments_url"` -// } -// -func HyperFieldDecoder(res interface{}) Relations { - rels := make(Relations) - t := reflect.TypeOf(res).Elem() - v := reflect.ValueOf(res).Elem() - fieldlen := t.NumField() - for i := 0; i < fieldlen; i++ { - fillRelation(rels, t, v, i) - } - return rels -} - -func fillRelation(rels map[string]Hyperlink, t reflect.Type, v reflect.Value, index int) { - f := t.Field(index) - - if hyperlinkType != f.Type { - return - } - - hl := v.Field(index).Interface().(Hyperlink) - name := f.Name - if rel := f.Tag.Get("rel"); len(rel) > 0 { - name = rel - } - rels[name] = hl -} - -var hyperlinkType = reflect.TypeOf(Hyperlink("foo")) diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia_test.go deleted file mode 100644 index da293b5..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia_test.go +++ /dev/null @@ -1,133 +0,0 @@ -package hypermedia - -import ( - "bytes" - "encoding/json" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" - "testing" -) - -func TestReflectRelations(t *testing.T) { - input := ` -{ "Login": "bob" -, "Url": "/self" -, "FooUrl": "/foo" -, "FooBarUrl": "/bar" -, "whatever": "/whatevs" -, "HomepageUrl": "http://example.com" -}` - - user := &ReflectedUser{} - decode(t, input, user) - - rels := HyperFieldDecoder(user) - assert.Equal(t, 4, len(rels)) - assert.Equal(t, "/self", string(rels["Url"])) - assert.Equal(t, "/foo", string(rels["FooUrl"])) - assert.Equal(t, "/bar", string(rels["FooBarUrl"])) - assert.Equal(t, "/whatevs", string(rels["whatevs"])) - - rel, err := rels.Rel("FooUrl", nil) - assert.Equal(t, nil, err) - assert.Equal(t, "/foo", rel.Path) -} - -func TestHALRelations(t *testing.T) { - input := ` -{ "Login": "bob" -, "Url": "/foo/bar{/arg}" -, "_links": - { "self": { "href": "/self" } - , "foo": { "href": "/foo" } - , "bar": { "href": "/bar" } - } -}` - - user := &HypermediaUser{} - decode(t, input, user) - - rels := HypermediaDecoder(user) - assert.Equal(t, 3, len(rels)) - assert.Equal(t, "/self", string(rels["self"])) - assert.Equal(t, "/foo", string(rels["foo"])) - assert.Equal(t, "/bar", string(rels["bar"])) - - rel, err := rels.Rel("foo", nil) - assert.Equal(t, nil, err) - assert.Equal(t, "/foo", rel.Path) -} - -func TestExpandAbsoluteUrls(t *testing.T) { - link := Hyperlink("/foo/bar{/arg}") - u, err := link.Expand(M{"arg": "baz", "foo": "bar"}) - assert.Equal(t, nil, err) - assert.Equal(t, "/foo/bar/baz", u.String()) -} - -func TestExpandRelativePaths(t *testing.T) { - link := Hyperlink("foo/bar{/arg}") - u, err := link.Expand(M{"arg": "baz", "foo": "bar"}) - assert.Equal(t, nil, err) - assert.Equal(t, "foo/bar/baz", u.String()) -} - -func TestExpandNil(t *testing.T) { - link := Hyperlink("/foo/bar{/arg}") - u, err := link.Expand(nil) - assert.Equal(t, nil, err) - assert.Equal(t, "/foo/bar", u.String()) -} - -func TestDecode(t *testing.T) { - input := ` -{ "Login": "bob" -, "Url": "/foo/bar{/arg}" -, "_links": - { "self": { "href": "/foo/bar{/arg}" } - } -}` - - user := &HypermediaUser{} - decode(t, input, user) - - assert.Equal(t, "bob", user.Login) - assert.Equal(t, 1, len(user.Links)) - - hl := user.Url - url, err := hl.Expand(M{"arg": "baz"}) - if err != nil { - t.Errorf("Errors parsing %s: %s", hl, err) - } - - assert.Equal(t, "/foo/bar/baz", url.String()) - - hl = user.Links["self"].Href - url, err = hl.Expand(M{"arg": "baz"}) - if err != nil { - t.Errorf("Errors parsing %s: %s", hl, err) - } - assert.Equal(t, "/foo/bar/baz", url.String()) -} - -func decode(t *testing.T, input string, resource interface{}) { - dec := json.NewDecoder(bytes.NewBufferString(input)) - err := dec.Decode(resource) - if err != nil { - t.Fatalf("Errors decoding json: %s", err) - } -} - -type HypermediaUser struct { - Login string - Url Hyperlink - *HALResource -} - -type ReflectedUser struct { - Login string - Url Hyperlink - FooUrl Hyperlink - FooBarUrl Hyperlink - Whatever Hyperlink `json:"whatever" rel:"whatevs"` - HomepageUrl string -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder.go deleted file mode 100644 index be7c01e..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder.go +++ /dev/null @@ -1,56 +0,0 @@ -package mediaheader - -import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" - "net/http" - "net/url" - "strings" -) - -// TODO: need a full link header parser for http://tools.ietf.org/html/rfc5988 -type Decoder struct { -} - -func (d *Decoder) Decode(header http.Header) (mediaHeader *MediaHeader) { - mediaHeader = &MediaHeader{Relations: hypermedia.Relations{}} - - link := header.Get("Link") - if len(link) == 0 { - return - } - - for _, l := range strings.Split(link, ",") { - l = strings.TrimSpace(l) - segments := strings.Split(l, ";") - - if len(segments) < 2 { - continue - } - - if !strings.HasPrefix(segments[0], "<") || !strings.HasSuffix(segments[0], ">") { - continue - } - - url, err := url.Parse(segments[0][1 : len(segments[0])-1]) - if err != nil { - continue - } - - link := hypermedia.Hyperlink(url.String()) - - for _, segment := range segments[1:] { - switch strings.TrimSpace(segment) { - case `rel="next"`: - mediaHeader.Relations["next"] = link - case `rel="prev"`: - mediaHeader.Relations["prev"] = link - case `rel="first"`: - mediaHeader.Relations["first"] = link - case `rel="last"`: - mediaHeader.Relations["last"] = link - } - } - } - - return -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder_test.go deleted file mode 100644 index 7264a0f..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder_test.go +++ /dev/null @@ -1,18 +0,0 @@ -package mediaheader - -import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" - "net/http" - "testing" -) - -func TestDecoder_Decode(t *testing.T) { - link := `; rel="next", ; rel="last"` - header := http.Header{} - header.Add("Link", link) - decoder := Decoder{} - mediaHeader := decoder.Decode(header) - - assert.Equal(t, "https://api.github.com/user/repos?page=3&per_page=100", string(mediaHeader.Relations["next"])) - assert.Equal(t, "https://api.github.com/user/repos?page=50&per_page=100", string(mediaHeader.Relations["last"])) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/mediaheader.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/mediaheader.go deleted file mode 100644 index 700faef..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/mediaheader.go +++ /dev/null @@ -1,9 +0,0 @@ -package mediaheader - -import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -type MediaHeader struct { - Relations hypermedia.Relations -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode_test.go deleted file mode 100644 index 57b6367..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode_test.go +++ /dev/null @@ -1,71 +0,0 @@ -package mediatype - -import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" - "io" - "strings" - "testing" -) - -func TestAddEncoder(t *testing.T) { - mt, err := Parse("application/test+test") - if err != nil { - t.Fatalf("Error parsing media type: %s", err.Error()) - } - - person := &Person{"bob"} - buf, err := mt.Encode(person) - if err != nil { - t.Fatalf("Error encoding: %s", err.Error()) - } - - assert.Equal(t, "bob", buf.String()) -} - -func TetRequiresEncoder(t *testing.T) { - mt, err := Parse("application/test+whatevs") - if err != nil { - t.Fatalf("Error parsing media type: %s", err.Error()) - } - - person := &Person{"bob"} - _, err = mt.Encode(person) - if err == nil { - t.Fatal("No encoding error") - } - - if !strings.HasPrefix(err.Error(), "No encoder found for format whatevs") { - t.Fatalf("Bad error: %s", err) - } -} - -func TetRequiresEncodedResource(t *testing.T) { - mt, err := Parse("application/test+test") - if err != nil { - t.Fatalf("Error parsing media type: %s", err.Error()) - } - - _, err = mt.Encode(nil) - if err == nil { - t.Fatal("No encoding error") - } - - assert.Equal(t, "Nothing to encode", err.Error()) -} - -type PersonEncoder struct { - body io.Writer -} - -func (d *PersonEncoder) Encode(v interface{}) error { - if p, ok := v.(*Person); ok { - d.body.Write([]byte(p.Name)) - } - return nil -} - -func init() { - AddEncoder("test", func(w io.Writer) Encoder { - return &PersonEncoder{w} - }) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/request.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/request.go deleted file mode 100644 index 6cf228f..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/request.go +++ /dev/null @@ -1,103 +0,0 @@ -package sawyer - -import ( - "io/ioutil" - "net/http" - "net/url" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype" -) - -type Request struct { - Client *http.Client - MediaType *mediatype.MediaType - Query url.Values - *http.Request -} - -func (c *Client) NewRequest(rawurl string) (*Request, error) { - u, err := c.ResolveReferenceString(rawurl) - if err != nil { - return nil, err - } - - httpreq, err := http.NewRequest(GetMethod, u, nil) - for key, _ := range c.Header { - httpreq.Header.Set(key, c.Header.Get(key)) - } - - return &Request{c.HttpClient, nil, httpreq.URL.Query(), httpreq}, err -} - -func (r *Request) Do(method string) *Response { - r.URL.RawQuery = r.Query.Encode() - r.Method = method - httpres, err := r.Client.Do(r.Request) - if err != nil { - return ResponseError(err) - } - - mtype, err := mediaType(httpres) - if err != nil { - httpres.Body.Close() - return ResponseError(err) - } - - headerDecoder := mediaheader.Decoder{} - mheader := headerDecoder.Decode(httpres.Header) - - return &Response{nil, mtype, mheader, UseApiError(httpres.StatusCode), false, httpres} -} - -func (r *Request) Head() *Response { - return r.Do(HeadMethod) -} - -func (r *Request) Get() *Response { - return r.Do(GetMethod) -} - -func (r *Request) Post() *Response { - return r.Do(PostMethod) -} - -func (r *Request) Put() *Response { - return r.Do(PutMethod) -} - -func (r *Request) Patch() *Response { - return r.Do(PatchMethod) -} - -func (r *Request) Delete() *Response { - return r.Do(DeleteMethod) -} - -func (r *Request) Options() *Response { - return r.Do(OptionsMethod) -} - -func (r *Request) SetBody(mtype *mediatype.MediaType, input interface{}) error { - r.MediaType = mtype - buf, err := mtype.Encode(input) - if err != nil { - return err - } - - r.Header.Set(ctypeHeader, mtype.String()) - r.ContentLength = int64(buf.Len()) - r.Body = ioutil.NopCloser(buf) - return nil -} - -const ( - ctypeHeader = "Content-Type" - HeadMethod = "HEAD" - GetMethod = "GET" - PostMethod = "POST" - PutMethod = "PUT" - PatchMethod = "PATCH" - DeleteMethod = "DELETE" - OptionsMethod = "OPTIONS" -) diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/request_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/request_test.go deleted file mode 100644 index b0c17a5..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/request_test.go +++ /dev/null @@ -1,236 +0,0 @@ -package sawyer - -import ( - "encoding/json" - "net/http" - "net/http/httptest" - "strings" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype" -) - -func TestSuccessfulGet(t *testing.T) { - setup := Setup(t) - defer setup.Teardown() - - setup.Mux.HandleFunc("/user", func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "GET", r.Method) - head := w.Header() - head.Set("Content-Type", "application/json") - link := `; rel="next", ; rel="last"` - head.Set("Link", link) - w.WriteHeader(http.StatusOK) - w.Write([]byte(`{"id": 1, "login": "sawyer"}`)) - }) - - client := setup.Client - user := &TestUser{} - - req, err := client.NewRequest("user") - assert.Equal(t, nil, err) - - res := req.Get() - assert.Equal(t, false, res.IsError()) - assert.Equal(t, false, res.IsApiError()) - - assert.Equal(t, nil, res.Decode(user)) - assert.Equal(t, 200, res.StatusCode) - assert.Equal(t, 1, user.Id) - assert.Equal(t, "sawyer", user.Login) - - mheader := res.MediaHeader - assert.Equal(t, "https://api.github.com/user/repos?page=3&per_page=100", string(mheader.Relations["next"])) - assert.Equal(t, "https://api.github.com/user/repos?page=50&per_page=100", string(mheader.Relations["last"])) -} - -func TestSuccessfulGetWithoutOutput(t *testing.T) { - setup := Setup(t) - defer setup.Teardown() - - setup.Mux.HandleFunc("/user", func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "GET", r.Method) - head := w.Header() - head.Set("Content-Type", "application/json") - w.WriteHeader(http.StatusOK) - w.Write([]byte(`{"id": 1, "login": "sawyer"}`)) - }) - - client := setup.Client - user := &TestUser{} - - req, err := client.NewRequest("user") - assert.Equal(t, nil, err) - - res := req.Get() - assert.Equal(t, false, res.IsError()) - assert.Equal(t, false, res.IsApiError()) - - assert.Tf(t, !res.IsError(), "Response shouldn't have error") - assert.Equal(t, 200, res.StatusCode) - assert.Equal(t, false, res.BodyClosed) - assert.Equal(t, 0, user.Id) - assert.Equal(t, "", user.Login) - - dec := json.NewDecoder(res.Body) - dec.Decode(user) - assert.Equal(t, 1, user.Id) - assert.Equal(t, "sawyer", user.Login) -} - -func TestSuccessfulGetWithoutDecoder(t *testing.T) { - setup := Setup(t) - defer setup.Teardown() - - setup.Mux.HandleFunc("/user", func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "GET", r.Method) - head := w.Header() - head.Set("Content-Type", "application/booya+booya") - w.WriteHeader(http.StatusOK) - w.Write([]byte(`{"id": 1, "login": "sawyer"}`)) - }) - - client := setup.Client - user := &TestUser{} - - req, err := client.NewRequest("user") - assert.Equal(t, nil, err) - - res := req.Get() - assert.NotEqual(t, nil, res.Decode(user), "response should have decoder error") - assert.Tf(t, strings.HasPrefix(res.Error(), "No decoder found for format booya"), "Bad error: %s", res.Error()) -} - -func TestSuccessfulPost(t *testing.T) { - setup := Setup(t) - defer setup.Teardown() - - mtype, err := mediatype.Parse("application/json") - - setup.Mux.HandleFunc("/users", func(w http.ResponseWriter, r *http.Request) { - assert.Equal(t, "POST", r.Method) - assert.Equal(t, mtype.String(), r.Header.Get("Content-Type")) - - user := &TestUser{} - mtype.Decode(user, r.Body) - assert.Equal(t, "sawyer", user.Login) - - head := w.Header() - head.Set("Content-Type", "application/json") - w.WriteHeader(http.StatusCreated) - w.Write([]byte(`{"login": "sawyer2"}`)) - }) - - client := setup.Client - user := &TestUser{} - - req, err := client.NewRequest("users") - assert.Equal(t, nil, err) - - user.Login = "sawyer" - req.SetBody(mtype, user) - res := req.Post() - assert.Equal(t, false, res.IsError()) - assert.Equal(t, false, res.IsApiError()) - assert.Equal(t, nil, res.Decode(user)) - - assert.Equal(t, nil, err) - assert.Equal(t, 201, res.StatusCode) - assert.Equal(t, "sawyer2", user.Login) - assert.Equal(t, true, res.BodyClosed) -} - -func TestErrorResponse(t *testing.T) { - setup := Setup(t) - defer setup.Teardown() - - setup.Mux.HandleFunc("/404", func(w http.ResponseWriter, r *http.Request) { - head := w.Header() - head.Set("Content-Type", "application/json") - w.WriteHeader(http.StatusNotFound) - w.Write([]byte(`{"message": "not found"}`)) - }) - - client := setup.Client - user := &TestUser{} - apierr := &TestError{} - - req, err := client.NewRequest("404") - if err != nil { - t.Fatalf("request errored: %s", err) - } - - res := req.Get() - assert.Equal(t, true, res.IsApiError()) - assert.Equal(t, false, res.IsError()) - assert.Equal(t, nil, res.Decode(apierr)) - - assert.Equal(t, 404, res.StatusCode) - assert.Equal(t, 0, user.Id) - assert.Equal(t, "", user.Login) - assert.Equal(t, "not found", apierr.Message) - assert.Equal(t, true, res.BodyClosed) -} - -func TestResolveRequestQuery(t *testing.T) { - setup := Setup(t) - defer setup.Teardown() - - setup.Mux.HandleFunc("/q", func(w http.ResponseWriter, r *http.Request) { - q := r.URL.Query() - assert.Equal(t, "1", q.Get("a")) - assert.Equal(t, "4", q.Get("b")) - assert.Equal(t, "3", q.Get("c")) - assert.Equal(t, "2", q.Get("d")) - assert.Equal(t, "1", q.Get("e")) - w.WriteHeader(123) - w.Write([]byte("ok")) - }) - - assert.Equal(t, "1", setup.Client.Query.Get("a")) - assert.Equal(t, "1", setup.Client.Query.Get("b")) - - setup.Client.Query.Set("b", "2") - setup.Client.Query.Set("c", "3") - - req, err := setup.Client.NewRequest("/q?d=4") - assert.Equal(t, nil, err) - - req.Query.Set("b", "4") - req.Query.Set("c", "3") - req.Query.Set("d", "2") - req.Query.Set("e", "1") - - res := req.Get() - assert.Equal(t, nil, err) - assert.Equal(t, 123, res.StatusCode) -} - -type TestUser struct { - Id int `json:"id"` - Login string `json:"login"` -} - -type TestError struct { - Message string `json:"message"` -} - -type SetupServer struct { - Client *Client - Server *httptest.Server - Mux *http.ServeMux -} - -func Setup(t *testing.T) *SetupServer { - mux := http.NewServeMux() - srv := httptest.NewServer(mux) - client, err := NewFromString(srv.URL+"?a=1&b=1", nil) - assert.Equalf(t, nil, err, "Unable to parse %s", srv.URL) - - return &SetupServer{client, srv, mux} -} - -func (s *SetupServer) Teardown() { - s.Server.Close() -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/response.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/response.go deleted file mode 100644 index ec7ac4c..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/response.go +++ /dev/null @@ -1,87 +0,0 @@ -package sawyer - -import ( - "errors" - "net/http" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype" -) - -type Response struct { - ResponseError error - MediaType *mediatype.MediaType - MediaHeader *mediaheader.MediaHeader - isApiError bool - BodyClosed bool - *http.Response -} - -func (r *Response) AnyError() bool { - return r.IsError() || r.IsApiError() -} - -func (r *Response) IsError() bool { - return r.ResponseError != nil -} - -func (r *Response) IsApiError() bool { - return r.isApiError -} - -func (r *Response) Error() string { - if r.ResponseError != nil { - return r.ResponseError.Error() - } - return "" -} - -func (r *Response) Decode(resource interface{}) error { - if r.MediaType == nil { - return errors.New("No media type for this response") - } - - if resource == nil || r.ResponseError != nil || r.BodyClosed { - return r.ResponseError - } - - defer r.Body.Close() - r.BodyClosed = true - - dec, err := r.MediaType.Decoder(r.Body) - if err != nil { - r.ResponseError = err - } else { - r.ResponseError = dec.Decode(resource) - } - return r.ResponseError -} - -func (r *Response) decode(output interface{}) { - if !r.isApiError { - r.Decode(output) - } -} - -func ResponseError(err error) *Response { - return &Response{ResponseError: err, BodyClosed: true} -} - -func UseApiError(status int) bool { - switch { - case status > 199 && status < 300: - return false - case status == 304: - return false - case status == 0: - return false - } - return true -} - -func mediaType(res *http.Response) (*mediatype.MediaType, error) { - if ctype := res.Header.Get(ctypeHeader); len(ctype) > 0 { - return mediatype.Parse(ctype) - } - return nil, nil -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer.go deleted file mode 100644 index 2217bb4..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer.go +++ /dev/null @@ -1,89 +0,0 @@ -package sawyer - -import ( - "encoding/json" - "io" - "net/http" - "net/url" - "strings" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype" -) - -// The default httpClient used if one isn't specified. -var httpClient = &http.Client{} - -// A Client wraps an *http.Client with a base url Endpoint and common header and -// query values. -type Client struct { - HttpClient *http.Client - Endpoint *url.URL - Header http.Header - Query url.Values -} - -// New returns a new Client with a given a URL and an optional client. -func New(endpoint *url.URL, client *http.Client) *Client { - if client == nil { - client = httpClient - } - - if len(endpoint.Path) > 0 && !strings.HasSuffix(endpoint.Path, "/") { - endpoint.Path = endpoint.Path + "/" - } - - return &Client{client, endpoint, make(http.Header), endpoint.Query()} -} - -// NewFromString returns a new Client given a string URL and an optional client. -func NewFromString(endpoint string, client *http.Client) (*Client, error) { - e, err := url.Parse(endpoint) - if err != nil { - return nil, err - } - - return New(e, client), nil -} - -// ResolveReference resolves a URI reference to an absolute URI from an absolute -// base URI. It also merges the query values. -func (c *Client) ResolveReference(u *url.URL) *url.URL { - absurl := c.Endpoint.ResolveReference(u) - if len(c.Query) > 0 { - absurl.RawQuery = mergeQueries(c.Query, absurl.Query()) - } - return absurl -} - -// ResolveReference resolves a string URI reference to an absolute URI from an -// absolute base URI. It also merges the query values. -func (c *Client) ResolveReferenceString(rawurl string) (string, error) { - u, err := url.Parse(rawurl) - if err != nil { - return "", err - } - return c.ResolveReference(u).String(), nil -} - -func mergeQueries(queries ...url.Values) string { - merged := make(url.Values) - for _, q := range queries { - if len(q) == 0 { - break - } - - for key, _ := range q { - merged.Set(key, q.Get(key)) - } - } - return merged.Encode() -} - -func init() { - mediatype.AddDecoder("json", func(r io.Reader) mediatype.Decoder { - return json.NewDecoder(r) - }) - mediatype.AddEncoder("json", func(w io.Writer) mediatype.Encoder { - return json.NewEncoder(w) - }) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/fmt b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/fmt deleted file mode 100644 index fd4005e..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/fmt +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -gofmt -w -l *.go ./mediatype diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/test b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/test deleted file mode 100644 index b49a966..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/test +++ /dev/null @@ -1,3 +0,0 @@ -script/fmt -go test -race -v "$@" ./mediatype -go test -race -v "$@" . diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitignore b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitignore deleted file mode 100644 index 0026861..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitignore +++ /dev/null @@ -1,22 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitmodules b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitmodules deleted file mode 100644 index 8eb6ba0..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "tests"] - path = tests - url = https://github.com/uri-templates/uritemplate-test.git diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.godocdown.md b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.godocdown.md deleted file mode 100644 index 4c0a96e..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.godocdown.md +++ /dev/null @@ -1,10 +0,0 @@ -{{ .EmitHeader }} - -[![Build Status](https://travis-ci.org/jtacoma/uritemplates.png)](https://travis-ci.org/jtacoma/uritemplates) [![Coverage Status](https://coveralls.io/repos/jtacoma/uritemplates/badge.png)](https://coveralls.io/r/jtacoma/uritemplates) - -{{ .EmitSynopsis }} - -## License - -Use of this source code is governed by a BSD-style license that can be found in -the LICENSE file. diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.travis.yml b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.travis.yml deleted file mode 100644 index 4f2ee4d..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.travis.yml +++ /dev/null @@ -1 +0,0 @@ -language: go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method.go deleted file mode 100644 index cee2a83..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method.go +++ /dev/null @@ -1,56 +0,0 @@ -package octokit - -import ( - "encoding/base64" - "fmt" - "net/url" - "os" - "path/filepath" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/fhs/go-netrc/netrc" -) - -// See http://developer.github.com/v3/auth/ -type AuthMethod interface { - fmt.Stringer -} - -type BasicAuth struct { - Login string - Password string - OneTimePassword string // for two-factor authentication -} - -func (b BasicAuth) String() string { - return fmt.Sprintf("Basic %s", hashAuth(b.Login, b.Password)) -} - -type NetrcAuth struct { - NetrcPath string -} - -func (n NetrcAuth) String() string { - netrcPath := n.NetrcPath - if netrcPath == "" { - netrcPath = filepath.Join(os.Getenv("HOME"), ".netrc") - } - apiURL, _ := url.Parse(gitHubAPIURL) - credentials, err := netrc.FindMachine(netrcPath, apiURL.Host) - if err != nil { - panic(fmt.Errorf("netrc error (%s): %v", apiURL.Host, err)) - } - return fmt.Sprintf("Basic %s", hashAuth(credentials.Login, credentials.Password)) -} - -func hashAuth(u, p string) string { - var a = fmt.Sprintf("%s:%s", u, p) - return base64.StdEncoding.EncodeToString([]byte(a)) -} - -type TokenAuth struct { - AccessToken string -} - -func (t TokenAuth) String() string { - return fmt.Sprintf("token %s", t.AccessToken) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method_test.go deleted file mode 100644 index 21dc8b2..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method_test.go +++ /dev/null @@ -1,22 +0,0 @@ -package octokit - -import ( - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestBasicAuth(t *testing.T) { - basicAuth := BasicAuth{Login: "jingweno", Password: "password"} - assert.Equal(t, "Basic amluZ3dlbm86cGFzc3dvcmQ=", basicAuth.String()) -} - -func TestNetrcAuth(t *testing.T) { - netrcAuth := NetrcAuth{NetrcPath: "../fixtures/example.netrc"} - assert.Equal(t, "Basic Y2F0c2J5OnY1UDZmZ2huN19hX2Zha2VfY29kZV9QR3VlbHZiRmF4QlBrVWcxaWI=", netrcAuth.String()) -} - -func TestTokenAuth(t *testing.T) { - tokenAuth := TokenAuth{AccessToken: "token"} - assert.Equal(t, "token token", tokenAuth.String()) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations.go deleted file mode 100644 index 6fe7ef5..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations.go +++ /dev/null @@ -1,68 +0,0 @@ -package octokit - -import ( - "net/url" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -var ( - AuthorizationsURL = Hyperlink("authorizations{/id}") -) - -// Create a AuthorizationsService with the base url.URL -func (c *Client) Authorizations(url *url.URL) (auths *AuthorizationsService) { - auths = &AuthorizationsService{client: c, URL: url} - return -} - -type AuthorizationsService struct { - client *Client - URL *url.URL -} - -func (a *AuthorizationsService) One() (auth *Authorization, result *Result) { - result = a.client.get(a.URL, &auth) - return -} - -func (a *AuthorizationsService) All() (auths []Authorization, result *Result) { - result = a.client.get(a.URL, &auths) - return -} - -func (a *AuthorizationsService) Create(params interface{}) (auth *Authorization, result *Result) { - result = a.client.post(a.URL, params, &auth) - return -} - -type Authorization struct { - *hypermedia.HALResource - - ID int `json:"id,omitempty"` - URL string `json:"url,omitempty"` - App App `json:"app,omitempty"` - Token string `json:"token,omitempty"` - Note string `json:"note,omitempty"` - NoteURL string `json:"note_url,omitempty"` - Scopes []string `json:"scopes,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` - UpdatedAt time.Time `json:"updated_at,omitempty"` -} - -type App struct { - *hypermedia.HALResource - - ClientID string `json:"client_id,omitempty"` - URL string `json:"url,omitempty"` - Name string `json:"name,omitempty"` -} - -type AuthorizationParams struct { - Scopes []string `json:"scopes,omitempty"` - Note string `json:"note,omitempty"` - NoteURL string `json:"note_url,omitempty"` - ClientID string `json:"client_id,omitempty"` - ClientSecret string `json:"client_secret,omitempty"` -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations_test.go deleted file mode 100644 index 78d3e14..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations_test.go +++ /dev/null @@ -1,109 +0,0 @@ -package octokit - -import ( - "encoding/json" - "net/http" - "reflect" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestAuthorizationsService_One(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/authorizations/1", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("authorization.json")) - }) - - url, err := AuthorizationsURL.Expand(M{"id": 1}) - assert.Equal(t, nil, err) - - auth, result := client.Authorizations(url).One() - - assert.T(t, !result.HasError()) - assert.Equal(t, 1, auth.ID) - assert.Equal(t, "https://api.github.com/authorizations/1", auth.URL) - assert.Equal(t, "456", auth.Token) - assert.Equal(t, "", auth.Note) - assert.Equal(t, "", auth.NoteURL) - assert.Equal(t, "2012-11-16 01:05:51 +0000 UTC", auth.CreatedAt.String()) - assert.Equal(t, "2013-08-21 03:29:51 +0000 UTC", auth.UpdatedAt.String()) - - app := App{ClientID: "123", URL: "http://localhost:8080", Name: "Test"} - assert.Equal(t, app, auth.App) - - assert.Equal(t, 2, len(auth.Scopes)) - scopes := []string{"repo", "user"} - assert.T(t, reflect.DeepEqual(auth.Scopes, scopes)) -} - -func TestAuthorizationsService_All(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/authorizations", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("authorizations.json")) - }) - - url, err := AuthorizationsURL.Expand(nil) - assert.Equal(t, nil, err) - - auths, result := client.Authorizations(url).All() - assert.T(t, !result.HasError()) - - firstAuth := auths[0] - assert.Equal(t, 1, firstAuth.ID) - assert.Equal(t, "https://api.github.com/authorizations/1", firstAuth.URL) - assert.Equal(t, "456", firstAuth.Token) - assert.Equal(t, "", firstAuth.Note) - assert.Equal(t, "", firstAuth.NoteURL) - assert.Equal(t, "2012-11-16 01:05:51 +0000 UTC", firstAuth.CreatedAt.String()) - assert.Equal(t, "2013-08-21 03:29:51 +0000 UTC", firstAuth.UpdatedAt.String()) - - app := App{ClientID: "123", URL: "http://localhost:8080", Name: "Test"} - assert.Equal(t, app, firstAuth.App) - - assert.Equal(t, 2, len(firstAuth.Scopes)) - scopes := []string{"repo", "user"} - assert.T(t, reflect.DeepEqual(firstAuth.Scopes, scopes)) -} - -func TestAuthorizationsService_Create(t *testing.T) { - setup() - defer tearDown() - - params := AuthorizationParams{Scopes: []string{"public_repo"}} - - mux.HandleFunc("/authorizations", func(w http.ResponseWriter, r *http.Request) { - var authParams AuthorizationParams - json.NewDecoder(r.Body).Decode(&authParams) - assert.T(t, reflect.DeepEqual(authParams, params)) - - testMethod(t, r, "POST") - respondWithJSON(w, loadFixture("create_authorization.json")) - }) - - url, err := AuthorizationsURL.Expand(nil) - assert.Equal(t, nil, err) - - auth, _ := client.Authorizations(url).Create(params) - - assert.Equal(t, 3844190, auth.ID) - assert.Equal(t, "https://api.github.com/authorizations/3844190", auth.URL) - assert.Equal(t, "123", auth.Token) - assert.Equal(t, "", auth.Note) - assert.Equal(t, "", auth.NoteURL) - assert.Equal(t, "2013-09-28 18:44:39 +0000 UTC", auth.CreatedAt.String()) - assert.Equal(t, "2013-09-28 18:44:39 +0000 UTC", auth.UpdatedAt.String()) - - app := App{ClientID: "00000000000000000000", URL: "http://developer.github.com/v3/oauth/#oauth-authorizations-api", Name: "GitHub API"} - assert.Equal(t, app, auth.App) - - assert.Equal(t, 1, len(auth.Scopes)) - scopes := []string{"public_repo"} - assert.T(t, reflect.DeepEqual(auth.Scopes, scopes)) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client_test.go deleted file mode 100644 index 749c740..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client_test.go +++ /dev/null @@ -1,146 +0,0 @@ -package octokit - -import ( - "net/http" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestSuccessfulGet(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/foo", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - testHeader(t, r, "Accept", defaultMediaType) - testHeader(t, r, "User-Agent", userAgent) - testHeader(t, r, "Authorization", "token token") - respondWithJSON(w, `{"login": "octokit"}`) - }) - - req, err := client.NewRequest("foo") - assert.Equal(t, nil, err) - - var output map[string]interface{} - _, err = req.Get(&output) - assert.Equal(t, nil, err) - assert.Equal(t, "octokit", output["login"]) -} - -func TestSuccessfulGet_BasicAuth(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/foo", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - testHeader(t, r, "Accept", defaultMediaType) - testHeader(t, r, "User-Agent", userAgent) - testHeader(t, r, "Authorization", "Basic amluZ3dlbm86cGFzc3dvcmQ=") - testHeader(t, r, "X-GitHub-OTP", "OTP") - respondWithJSON(w, `{"login": "octokit"}`) - }) - - client = NewClientWith( - server.URL, - userAgent, - BasicAuth{ - Login: "jingweno", - Password: "password", - OneTimePassword: "OTP", - }, - nil) - req, err := client.NewRequest("foo") - assert.Equal(t, nil, err) - - var output map[string]interface{} - _, err = req.Get(&output) - assert.Equal(t, nil, err) - assert.Equal(t, "octokit", output["login"]) -} - -func TestGetWithoutDecoder(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/foo", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - head := w.Header() - head.Set("Content-Type", "application/booya+booya") - respondWith(w, `{"login": "octokit"}`) - }) - - req, err := client.NewRequest("foo") - assert.Equal(t, nil, err) - - var output map[string]interface{} - _, err = req.Get(output) - assert.NotEqual(t, nil, err) -} - -func TestGetResponseError(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/foo", func(w http.ResponseWriter, r *http.Request) { - header := w.Header() - header.Set("Content-Type", "application/json") - w.WriteHeader(http.StatusNotFound) - respondWith(w, `{"message": "not found"}`) - }) - - req, err := client.NewRequest("foo") - assert.Equal(t, nil, err) - - var output map[string]interface{} - _, err = req.Get(output) - assert.NotEqual(t, nil, err) - respErr, ok := err.(*ResponseError) - assert.Tf(t, ok, "should be able to convert to *ResponseError") - assert.Equal(t, "not found", respErr.Message) - assert.Equal(t, ErrorNotFound, respErr.Type) -} - -func TestSuccessfulPost(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/foo", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "POST") - testHeader(t, r, "Accept", defaultMediaType) - testHeader(t, r, "Content-Type", defaultMediaType) - testHeader(t, r, "User-Agent", userAgent) - testHeader(t, r, "Authorization", "token token") - testBody(t, r, "{\"input\":\"bar\"}\n") - respondWithJSON(w, `{"login": "octokit"}`) - }) - - req, err := client.NewRequest("foo") - assert.Equal(t, nil, err) - - input := map[string]interface{}{"input": "bar"} - var output map[string]interface{} - _, err = req.Post(input, &output) - assert.Equal(t, nil, err) - assert.Equal(t, "octokit", output["login"]) -} - -func TestAddHeader(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/foo", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - testHeader(t, r, "Foo", "Bar") - assert.Equal(t, "example.com", r.Host) - respondWithJSON(w, `{"login": "octokit"}`) - }) - - client.Header.Set("Host", "example.com") - client.Header.Set("Foo", "Bar") - req, err := client.NewRequest("foo") - assert.Equal(t, nil, err) - - _, err = req.Get(nil) - assert.Equal(t, nil, err) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits.go deleted file mode 100644 index b8e6bf2..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits.go +++ /dev/null @@ -1,93 +0,0 @@ -package octokit - -import ( - "io" - "net/url" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -var CommitsURL = Hyperlink("repos/{owner}/{repo}/commits{/sha}") - -func (c *Client) Commits(url *url.URL) (commits *CommitsService) { - commits = &CommitsService{client: c, URL: url} - return -} - -type CommitsService struct { - client *Client - URL *url.URL -} - -// Get all commits on CommitsService#URL -func (c *CommitsService) All() (commits []Commit, result *Result) { - result = c.client.get(c.URL, &commits) - return -} - -// Get a commit based on CommitsService#URL -func (c *CommitsService) One() (commit *Commit, result *Result) { - result = c.client.get(c.URL, &commit) - return -} - -// Get a commit patch based on CommitsService#URL -func (c *CommitsService) Patch() (patch io.ReadCloser, result *Result) { - patch, result = c.client.getBody(c.URL, patchMediaType) - return -} - -type CommitFile struct { - Additions int `json:"additions,omitempty"` - BlobURL string `json:"blob_url,omitempty"` - Changes int `json:"changes,omitempty"` - ContentsURL string `json:"contents_url,omitempty"` - Deletions int `json:"deletions,omitempty"` - Filename string `json:"filename,omitempty"` - Patch string `json:"patch,omitempty"` - RawURL string `json:"raw_url,omitempty"` - Sha string `json:"sha,omitempty"` - Status string `json:"status,omitempty"` -} - -type CommitStats struct { - Additions int `json:"additions,omitempty"` - Deletions int `json:"deletions,omitempty"` - Total int `json:"total,omitempty"` -} - -type CommitCommit struct { - Author struct { - Date *time.Time `json:"date,omitempty"` - Email string `json:"email,omitempty"` - Name string `json:"name,omitempty"` - } `json:"author,omitempty"` - CommentCount int `json:"comment_count,omitempty"` - Committer struct { - Date *time.Time `json:"date,omitempty"` - Email string `json:"email,omitempty"` - Name string `json:"name,omitempty"` - } `json:"committer,omitempty"` - Message string `json:"message,omitempty"` - Tree struct { - Sha string `json:"sha,omitempty"` - URL string `json:"url,omitempty"` - } `json:"tree,omitempty"` - URL string `json:"url,omitempty"` -} - -type Commit struct { - *hypermedia.HALResource - - Author *User `json:"author,omitempty"` - CommentsURL string `json:"comments_url,omitempty"` - Commit *CommitCommit `json:"commit,omitempty"` - Committer *User `json:"committer,omitempty"` - Files []CommitFile `json:"files,omitempty"` - HtmlURL string `json:"html_url,omitempty"` - Parents []Commit `json:"parents,omitempty"` - Sha string `json:"sha,omitempty"` - Stats CommitStats `json:"stats,omitempty"` - URL string `json:"url,omitempty"` -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error.go deleted file mode 100644 index 5137fff..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error.go +++ /dev/null @@ -1,173 +0,0 @@ -package octokit - -import ( - "fmt" - "net/http" - "regexp" - "strings" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer" -) - -type ResponseErrorType int - -const ( - ErrorClientError ResponseErrorType = iota // 400-499 - ErrorBadRequest ResponseErrorType = iota // 400 - ErrorUnauthorized ResponseErrorType = iota // 401 - ErrorOneTimePasswordRequired ResponseErrorType = iota // 401 - ErrorForbidden ResponseErrorType = iota // 403 - ErrorTooManyRequests ResponseErrorType = iota // 403 - ErrorTooManyLoginAttempts ResponseErrorType = iota // 403 - ErrorNotFound ResponseErrorType = iota // 404 - ErrorNotAcceptable ResponseErrorType = iota // 406 - ErrorUnsupportedMediaType ResponseErrorType = iota // 414 - ErrorUnprocessableEntity ResponseErrorType = iota // 422 - ErrorServerError ResponseErrorType = iota // 500-599 - ErrorInternalServerError ResponseErrorType = iota // 500 - ErrorNotImplemented ResponseErrorType = iota // 501 - ErrorBadGateway ResponseErrorType = iota // 502 - ErrorServiceUnavailable ResponseErrorType = iota // 503 - ErrorMissingContentType ResponseErrorType = iota - ErrorUnknownError ResponseErrorType = iota -) - -type ErrorObject struct { - Resource string `json:"resource,omitempty"` - Code string `json:"code,omitempty"` - Field string `json:"field,omitempty"` - Message string `json:"message,omitempty"` -} - -func (e *ErrorObject) Error() string { - err := fmt.Sprintf("%v error", e.Code) - if e.Field != "" { - err = fmt.Sprintf("%v caused by %v field", err, e.Field) - } - err = fmt.Sprintf("%v on %v resource", err, e.Resource) - if e.Message != "" { - err = fmt.Sprintf("%v: %v", err, e.Message) - } - - return err -} - -type ResponseError struct { - Response *http.Response `json:"-"` - Type ResponseErrorType `json:"-"` - Message string `json:"message,omitempty"` - Err string `json:"error,omitempty"` - Errors []ErrorObject `json:"errors,omitempty"` - DocumentationURL string `json:"documentation_url,omitempty"` -} - -func (e *ResponseError) Error() string { - return fmt.Sprintf("%v %v: %d - %s", - e.Response.Request.Method, e.Response.Request.URL, - e.Response.StatusCode, e.errorMessage()) -} - -func (e *ResponseError) errorMessage() string { - messages := []string{} - - if e.Message != "" { - messages = append(messages, e.Message) - } - - if e.Err != "" { - m := fmt.Sprintf("Error: %s", e.Err) - messages = append(messages, m) - } - - if len(e.Errors) > 0 { - m := []string{} - m = append(m, "\nError summary:") - for _, e := range e.Errors { - m = append(m, fmt.Sprintf("\t%s", e.Error())) - } - messages = append(messages, strings.Join(m, "\n")) - } - - if e.DocumentationURL != "" { - messages = append(messages, fmt.Sprintf("// See: %s", e.DocumentationURL)) - } - - return strings.Join(messages, "\n") -} - -func NewResponseError(resp *sawyer.Response) (err *ResponseError) { - err = &ResponseError{} - - e := resp.Decode(&err) - if e != nil { - err.Message = fmt.Sprintf("Problems parsing error message: %s", e) - } - - err.Response = resp.Response - err.Type = getResponseErrorType(err) - return -} - -func getResponseErrorType(err *ResponseError) ResponseErrorType { - code := err.Response.StatusCode - header := err.Response.Header - - switch { - case code == http.StatusBadRequest: - return ErrorBadRequest - - case code == http.StatusUnauthorized: - otp := header.Get("X-GitHub-OTP") - r := regexp.MustCompile(`(?i)required; (\w+)`) - if r.MatchString(otp) { - return ErrorOneTimePasswordRequired - } - - return ErrorUnauthorized - - case code == http.StatusForbidden: - msg := err.Message - rr := regexp.MustCompile("(?i)rate limit exceeded") - if rr.MatchString(msg) { - return ErrorTooManyRequests - } - lr := regexp.MustCompile("(?i)login attempts exceeded") - if lr.MatchString(msg) { - return ErrorTooManyLoginAttempts - } - - return ErrorForbidden - - case code == http.StatusNotFound: - return ErrorNotFound - - case code == http.StatusNotAcceptable: - return ErrorNotAcceptable - - case code == http.StatusUnsupportedMediaType: - return ErrorUnsupportedMediaType - - case code == 422: - return ErrorUnprocessableEntity - - case code >= 400 && code <= 499: - return ErrorClientError - - case code == http.StatusInternalServerError: - return ErrorInternalServerError - - case code == http.StatusNotImplemented: - return ErrorNotImplemented - - case code == http.StatusBadGateway: - return ErrorBadGateway - - case code == http.StatusServiceUnavailable: - return ErrorServiceUnavailable - - case code >= 500 && code <= 599: - return ErrorServerError - } - - return ErrorUnknownError -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gist.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gist.go deleted file mode 100644 index b246b22..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gist.go +++ /dev/null @@ -1,88 +0,0 @@ -package octokit - -import ( - "io" - "net/url" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -var GistsURL = Hyperlink("gists{/gist_id}") - -func (c *Client) Gists(url *url.URL) (gists *GistsService) { - gists = &GistsService{client: c, URL: url} - return -} - -// A service to return gist records -type GistsService struct { - client *Client - URL *url.URL -} - -// Get a gist based on GistsService#URL -func (g *GistsService) One() (gist *Gist, result *Result) { - result = g.client.get(g.URL, &gist) - return -} - -// Update a gist based on GistsService#URL -func (g *GistsService) Update(params interface{}) (gist *Gist, result *Result) { - result = g.client.put(g.URL, params, &gist) - return -} - -// Get a list of gists based on UserService#URL -func (g *GistsService) All() (gists []Gist, result *Result) { - result = g.client.get(g.URL, &gists) - return -} - -// Get raw contents of first file in a gist -func (g *GistsService) Raw() (body io.ReadCloser, result *Result) { - var gist *Gist - var rawURL *url.URL - - gist, result = g.One() - for _, file := range gist.Files { - rawURL, _ = url.Parse(file.RawURL) - break - } - - body, result = g.client.getBody(rawURL, textMediaType) - return -} - -type GistFile struct { - *hypermedia.HALResource - - FileName string `json:"filename,omitempty"` - Type string `json:"type,omitempty"` - Language string `json:"language,omitempty"` - RawURL string `json:"raw_url,omitempty"` - Size int `json:"size,omitempty"` - Truncated bool `json:"truncated,omitempty"` - Content string `json:"content,omitempty"` -} - -type Gist struct { - *hypermedia.HALResource - - ID string `json:"id,omitempty"` - Comments float64 `json:"comments,omitempty"` - CommentsURL string `json:"comments_url,omitempty"` - CommitsURL string `json:"commits_url,omitempty"` - CreatedAt string `json:"created_at,omitempty"` - Description string `json:"description,omitempty"` - Files map[string]*GistFile `json:"files,omitempty"` - ForksURL Hyperlink `json:"forks_url,omitempty"` - GitPullURL Hyperlink `json:"git_pull_url,omitempty"` - GitPushURL Hyperlink `json:"git_push_url,omitempty"` - HtmlURL Hyperlink `json:"html_url,omitempty"` - Owner *User `json:"owner,omitempty"` - Public bool `json:"public,omitempty"` - UpdatedAt *time.Time `json:"updated_at,omitempty"` - URL string `json:"url,omitempty"` - User *User `json:"user,omitempty"` -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees_test.go deleted file mode 100644 index 99945e2..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees_test.go +++ /dev/null @@ -1,59 +0,0 @@ -package octokit - -import ( - "net/http" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestGitTreesService_One(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/pengwynn/flint/git/trees/master", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("tree.json")) - }) - - url, err := GitTreesURL.Expand(M{ - "owner": "pengwynn", - "repo": "flint", - "sha": "master", - }) - assert.Equal(t, nil, err) - tree, result := client.GitTrees(url).One() - - assert.T(t, !result.HasError()) - assert.Equal(t, "9c1337e761bbd517f3cc1b5acb9373b17f4810e8", tree.Sha) - assert.Equal(t, "https://api.github.com/repos/pengwynn/flint/git/trees/9c1337e761bbd517f3cc1b5acb9373b17f4810e8", tree.URL) - - entries := tree.Tree - assert.Equal(t, 9, len(entries)) -} - -func TestGitTreesService_One_Recursive(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/pengwynn/flint/git/trees/master", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("tree_recursive.json")) - }) - - url, err := GitTreesURL.Expand(M{ - "owner": "pengwynn", - "repo": "flint", - "sha": "master", - "recursive": "1", - }) - assert.Equal(t, nil, err) - tree, result := client.GitTrees(url).One() - - assert.T(t, !result.HasError()) - assert.Equal(t, "9c1337e761bbd517f3cc1b5acb9373b17f4810e8", tree.Sha) - assert.Equal(t, "https://api.github.com/repos/pengwynn/flint/git/trees/9c1337e761bbd517f3cc1b5acb9373b17f4810e8", tree.URL) - - entries := tree.Tree - assert.Equal(t, 15, len(entries)) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit_test.go deleted file mode 100644 index c6c26b4..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit_test.go +++ /dev/null @@ -1,124 +0,0 @@ -package octokit - -import ( - "fmt" - "io/ioutil" - "net/http" - "net/http/httptest" - "net/url" - "os" - "path" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -var ( - // mux is the HTTP request multiplexer used with the test server. - mux *http.ServeMux - - // client is the GitHub client being tested. - client *Client - - // server is a test HTTP server used to provide mock API responses. - server *httptest.Server -) - -// A http.Transport subtype that re-routes all requests in testing to the local -// server as indicated by `overrideURL`. -type TestTransport struct { - http.RoundTripper - overrideURL *url.URL -} - -func (t TestTransport) RoundTrip(req *http.Request) (*http.Response, error) { - req = cloneRequest(req) - req.Header.Set("X-Original-Scheme", req.URL.Scheme) - req.URL.Scheme = t.overrideURL.Scheme - req.URL.Host = t.overrideURL.Host - return t.RoundTripper.RoundTrip(req) -} - -func cloneRequest(r *http.Request) *http.Request { - r2 := new(http.Request) - *r2 = *r - r2.URL, _ = url.Parse(r.URL.String()) - r2.Header = make(http.Header) - for k, s := range r.Header { - r2.Header[k] = s - } - return r2 -} - -// setup sets up a test HTTP server along with a octokit.Client that is -// configured to talk to that test server. Tests should register handlers on -// mux which provide mock responses for the API method being tested. -func setup() { - // test server - mux = http.NewServeMux() - server = httptest.NewServer(mux) - serverURL, _ := url.Parse(server.URL) - - httpClient := http.Client{ - Transport: TestTransport{ - RoundTripper: http.DefaultTransport, - overrideURL: serverURL, - }, - } - - // octokit client configured to use test server - client = NewClientWith( - gitHubAPIURL, - userAgent, - TokenAuth{AccessToken: "token"}, - &httpClient, - ) -} - -// teardown closes the test HTTP server. -func tearDown() { - server.Close() -} - -func testMethod(t *testing.T, r *http.Request, want string) { - assert.Equal(t, want, r.Method) -} - -func testHeader(t *testing.T, r *http.Request, header string, want string) { - assert.Equal(t, want, r.Header.Get(header)) -} - -func testBody(t *testing.T, r *http.Request, want string) { - body, _ := ioutil.ReadAll(r.Body) - assert.Equal(t, want, string(body)) -} - -func respondWithJSON(w http.ResponseWriter, s string) { - header := w.Header() - header.Set("Content-Type", "application/json") - respondWith(w, s) -} - -func respondWithStatus(w http.ResponseWriter, statusCode int) { - w.WriteHeader(statusCode) -} - -func respondWith(w http.ResponseWriter, s string) { - fmt.Fprint(w, s) -} - -func testURLOf(path string) *url.URL { - u, _ := url.ParseRequestURI(testURLStringOf(path)) - return u -} - -func testURLStringOf(path string) string { - return fmt.Sprintf("%s/%s", server.URL, path) -} - -func loadFixture(f string) string { - pwd, _ := os.Getwd() - p := path.Join(pwd, "..", "fixtures", f) - c, _ := ioutil.ReadFile(p) - return string(c) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests.go deleted file mode 100644 index 830a985..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests.go +++ /dev/null @@ -1,104 +0,0 @@ -package octokit - -import ( - "io" - "net/url" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -var ( - PullRequestsURL = Hyperlink("repos/{owner}/{repo}/pulls{/number}") -) - -// Create a PullRequestsService with the base url.URL -func (c *Client) PullRequests(url *url.URL) (pullRequests *PullRequestsService) { - pullRequests = &PullRequestsService{client: c, URL: url} - return -} - -type PullRequestsService struct { - client *Client - URL *url.URL -} - -func (p *PullRequestsService) One() (pull *PullRequest, result *Result) { - result = p.client.get(p.URL, &pull) - return -} - -func (p *PullRequestsService) Create(params interface{}) (pull *PullRequest, result *Result) { - result = p.client.post(p.URL, params, &pull) - return -} - -func (p *PullRequestsService) All() (pulls []PullRequest, result *Result) { - result = p.client.get(p.URL, &pulls) - return -} - -func (p *PullRequestsService) Diff() (diff io.ReadCloser, result *Result) { - return p.client.getBody(p.URL, diffMediaType) -} - -func (p *PullRequestsService) Patch() (patch io.ReadCloser, result *Result) { - return p.client.getBody(p.URL, patchMediaType) -} - -type PullRequest struct { - *hypermedia.HALResource - - URL string `json:"url,omitempty"` - ID int `json:"id,omitempty"` - HTMLURL string `json:"html_url,omitempty"` - DiffURL string `json:"diff_url,omitempty"` - PatchURL string `json:"patch_url,omitempty"` - IssueURL string `json:"issue_url,omitempty"` - Title string `json:"title,omitempty"` - Number int `json:"number,omitempty"` - State string `json:"state,omitempty"` - User User `json:"user,omitempty"` - Body string `json:"body,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` - UpdatedAt time.Time `json:"updated_at,omitempty"` - ClosedAt *time.Time `json:"closed_at,omitempty"` - MergedAt *time.Time `json:"merged_at,omitempty"` - MergeCommitSha string `json:"merge_commit_sha,omitempty"` - Assignee *User `json:"assignee,omitempty"` - CommitsURL string `json:"commits_url,omitempty"` - ReviewCommentsURL string `json:"review_comments_url,omitempty"` - ReviewCommentURL string `json:"review_comment_url,omitempty"` - CommentsURL string `json:"comments_url,omitempty"` - Head PullRequestCommit `json:"head,omitempty"` - Base PullRequestCommit `json:"base,omitempty"` - Merged bool `json:"merged,omitempty"` - MergedBy User `json:"merged_by,omitempty"` - Comments int `json:"comments,omitempty"` - ReviewComments int `json:"review_comments,omitempty"` - Commits int `json:"commits,omitempty"` - Additions int `json:"additions,omitempty"` - Deletions int `json:"deletions,omitempty"` - ChangedFiles int `json:"changed_files,omitempty"` -} - -type PullRequestCommit struct { - Label string `json:"label,omitempty"` - Ref string `json:"ref,omitempty"` - Sha string `json:"sha,omitempty"` - User User `json:"user,omitempty"` - Repo *Repository `json:"repo,omitempty"` -} - -type PullRequestParams struct { - Base string `json:"base,omitempty"` - Head string `json:"head,omitempty"` - Title string `json:"title,omitempty"` - Body string `json:"body,omitempty"` -} - -type PullRequestForIssueParams struct { - Base string `json:"base,omitempty"` - Head string `json:"head,omitempty"` - Issue string `json:"issue,omitempty"` -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests_test.go deleted file mode 100644 index cd13645..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests_test.go +++ /dev/null @@ -1,162 +0,0 @@ -package octokit - -import ( - "fmt" - "io/ioutil" - "net/http" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestPullRequestService_One(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/octokit/go-octokit/pulls/1", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("pull_request.json")) - }) - - url, err := PullRequestsURL.Expand(M{"owner": "octokit", "repo": "go-octokit", "number": 1}) - assert.Equal(t, nil, err) - - pr, result := client.PullRequests(url).One() - - assert.T(t, !result.HasError()) - assert.Equal(t, 1, pr.ChangedFiles) - assert.Equal(t, 1, pr.Deletions) - assert.Equal(t, 1, pr.Additions) - assert.Equal(t, "aafce5dfc44270f35270b24677abbb859b20addf", pr.MergeCommitSha) - assert.Equal(t, "2013-06-09 00:53:38 +0000 UTC", pr.MergedAt.String()) - assert.Equal(t, "2013-06-09 00:53:38 +0000 UTC", pr.ClosedAt.String()) - assert.Equal(t, "2013-06-19 00:35:24 +0000 UTC", pr.UpdatedAt.String()) - assert.Equal(t, "2013-06-09 00:52:12 +0000 UTC", pr.CreatedAt.String()) - assert.Equal(t, "typo", pr.Body) - assert.Equal(t, "Update README.md", pr.Title) - assert.Equal(t, "https://api.github.com/repos/jingweno/octokat/pulls/1", pr.URL) - assert.Equal(t, 6206442, pr.ID) - assert.Equal(t, "https://github.com/jingweno/octokat/pull/1", pr.HTMLURL) - assert.Equal(t, "https://github.com/jingweno/octokat/pull/1.diff", pr.DiffURL) - assert.Equal(t, "https://github.com/jingweno/octokat/pull/1.patch", pr.PatchURL) - assert.Equal(t, "https://github.com/jingweno/octokat/pull/1", pr.IssueURL) - assert.Equal(t, 1, pr.Number) - assert.Equal(t, "closed", pr.State) - assert.T(t, nil == pr.Assignee) - assert.Equal(t, "https://github.com/jingweno/octokat/pull/1/commits", pr.CommitsURL) - assert.Equal(t, "https://github.com/jingweno/octokat/pull/1/comments", pr.ReviewCommentsURL) - assert.Equal(t, "/repos/jingweno/octokat/pulls/comments/{number}", pr.ReviewCommentURL) - assert.Equal(t, "https://api.github.com/repos/jingweno/octokat/issues/1/comments", pr.CommentsURL) -} - -func TestPullRequestService_Post(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/octokit/go-octokit/pulls", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "POST") - testBody(t, r, - "{\"base\":\"base\",\"head\":\"head\",\"title\":\"title\",\"body\":\"body\"}\n") - respondWithJSON(w, loadFixture("pull_request.json")) - }) - - url, err := PullRequestsURL.Expand(M{"owner": "octokit", "repo": "go-octokit"}) - assert.Equal(t, nil, err) - - params := PullRequestParams{ - Base: "base", - Head: "head", - Title: "title", - Body: "body", - } - pr, result := client.PullRequests(url).Create(params) - - assert.T(t, !result.HasError()) - assert.Equal(t, 1, pr.ChangedFiles) - assert.Equal(t, 1, pr.Deletions) - assert.Equal(t, 1, pr.Additions) - assert.Equal(t, "aafce5dfc44270f35270b24677abbb859b20addf", pr.MergeCommitSha) - assert.Equal(t, "2013-06-09 00:53:38 +0000 UTC", pr.MergedAt.String()) - assert.Equal(t, "2013-06-09 00:53:38 +0000 UTC", pr.ClosedAt.String()) - assert.Equal(t, "2013-06-19 00:35:24 +0000 UTC", pr.UpdatedAt.String()) - assert.Equal(t, "2013-06-09 00:52:12 +0000 UTC", pr.CreatedAt.String()) - assert.Equal(t, "typo", pr.Body) - assert.Equal(t, "Update README.md", pr.Title) - assert.Equal(t, "https://api.github.com/repos/jingweno/octokat/pulls/1", pr.URL) - assert.Equal(t, 6206442, pr.ID) - assert.Equal(t, "https://github.com/jingweno/octokat/pull/1", pr.HTMLURL) - assert.Equal(t, "https://github.com/jingweno/octokat/pull/1.diff", pr.DiffURL) - assert.Equal(t, "https://github.com/jingweno/octokat/pull/1.patch", pr.PatchURL) - assert.Equal(t, "https://github.com/jingweno/octokat/pull/1", pr.IssueURL) - assert.Equal(t, 1, pr.Number) - assert.Equal(t, "closed", pr.State) - assert.T(t, nil == pr.Assignee) - assert.Equal(t, "https://github.com/jingweno/octokat/pull/1/commits", pr.CommitsURL) - assert.Equal(t, "https://github.com/jingweno/octokat/pull/1/comments", pr.ReviewCommentsURL) - assert.Equal(t, "/repos/jingweno/octokat/pulls/comments/{number}", pr.ReviewCommentURL) - assert.Equal(t, "https://api.github.com/repos/jingweno/octokat/issues/1/comments", pr.CommentsURL) -} - -func TestPullRequestService_All(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/rails/rails/pulls", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - header := w.Header() - link := fmt.Sprintf(`<%s>; rel="next", <%s>; rel="last"`, testURLOf("repositories/8514/pulls?page=2"), testURLOf("repositories/8514/pulls?page=14")) - header.Set("Link", link) - respondWithJSON(w, loadFixture("pull_requests.json")) - }) - - url, err := PullRequestsURL.Expand(M{"owner": "rails", "repo": "rails"}) - assert.Equal(t, nil, err) - - prs, result := client.PullRequests(url).All() - assert.T(t, !result.HasError()) - assert.Equal(t, 30, len(prs)) - assert.Equal(t, testURLStringOf("repositories/8514/pulls?page=2"), string(*result.NextPage)) - assert.Equal(t, testURLStringOf("repositories/8514/pulls?page=14"), string(*result.LastPage)) -} - -func TestPullRequestService_Diff(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/octokit/go-octokit/pulls/1", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - testHeader(t, r, "Accept", diffMediaType) - respondWith(w, "diff --git") - }) - - url, err := PullRequestsURL.Expand(M{"owner": "octokit", "repo": "go-octokit", "number": 1}) - assert.Equal(t, nil, err) - - diff, result := client.PullRequests(url).Diff() - - assert.T(t, !result.HasError()) - content, err := ioutil.ReadAll(diff) - assert.Equal(t, nil, err) - assert.Equal(t, "diff --git", string(content)) -} - -func TestPullRequestService_Patch(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/octokit/go-octokit/pulls/1", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - testHeader(t, r, "Accept", patchMediaType) - respondWith(w, "patches galore") - }) - - url, err := PullRequestsURL.Expand(M{"owner": "octokit", "repo": "go-octokit", "number": 1}) - assert.Equal(t, nil, err) - - patch, result := client.PullRequests(url).Patch() - - assert.T(t, !result.HasError()) - content, err := ioutil.ReadAll(patch) - assert.Equal(t, nil, err) - assert.Equal(t, "patches galore", string(content)) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases.go deleted file mode 100644 index 26e1cef..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases.go +++ /dev/null @@ -1,79 +0,0 @@ -package octokit - -import ( - "net/url" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -var ( - ReleasesURL = Hyperlink("repos/{owner}/{repo}/releases{/id}") -) - -type Release struct { - *hypermedia.HALResource - - ID int `json:"id,omitempty"` - URL string `json:"url,omitempty"` - HTMLURL string `json:"html_url,omitempty"` - AssetsURL string `json:"assets_url,omitempty"` - UploadURL Hyperlink `json:"upload_url,omitempty"` - TagName string `json:"tag_name,omitempty"` - TargetCommitish string `json:"target_commitish,omitempty"` - Name string `json:"name,omitempty"` - Body string `json:"body,omitempty"` - Draft bool `json:"draft,omitempty"` - Prerelease bool `json:"prerelease,omitempty"` - CreatedAt *time.Time `json:"created_at,omitempty"` - PublishedAt *time.Time `json:"published_at,omitempty"` - Assets []Asset `json:"assets,omitempty"` -} - -type Asset struct { - ID int `json:"id,omitempty"` - Name string `json:"name,omitempty"` - Label string `json:"label,omitempty"` - ContentType string `json:"content_type,omitempty"` - State string `json:"state,omitempty"` - Size int `json:"size,omitempty"` - DownloadCount int `json:"download_count,omitempty"` - URL string `json:"url,omitempty"` - CreatedAt *time.Time `json:"created_at,omitempty"` - UpdatedAt *time.Time `json:"updated_at,omitempty"` -} - -// Create a ReleasesService with the base url.URL -func (c *Client) Releases(url *url.URL) (releases *ReleasesService) { - releases = &ReleasesService{client: c, URL: url} - return -} - -type ReleasesService struct { - client *Client - URL *url.URL -} - -func (r *ReleasesService) All() (releases []Release, result *Result) { - result = r.client.get(r.URL, &releases) - return -} - -func (r *ReleasesService) Create(params interface{}) (release *Release, result *Result) { - result = r.client.post(r.URL, params, &release) - return -} - -func (r *ReleasesService) Update(params interface{}) (release *Release, result *Result) { - result = r.client.patch(r.URL, params, &release) - return -} - -type ReleaseParams struct { - TagName string `json:"tag_name,omitempty"` - TargetCommitish string `json:"target_commitish,omitempty"` - Name string `json:"name,omitempty"` - Body string `json:"body,omitempty"` - Draft bool `json:"draft,omitempty"` - Prerelease bool `json:"prerelease,omitempty"` -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/request.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/request.go deleted file mode 100644 index e418b4f..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/request.go +++ /dev/null @@ -1,67 +0,0 @@ -package octokit - -import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype" -) - -func newRequest(client *Client, urlStr string) (req *Request, err error) { - sawyerReq, err := client.Client.NewRequest(urlStr) - if err != nil { - return - } - - req = &Request{client: client, Request: sawyerReq} - - return -} - -type Request struct { - *sawyer.Request - client *Client -} - -func (r *Request) Head(output interface{}) (*Response, error) { - return r.createResponse(r.Request.Head(), output) -} - -func (r *Request) Get(output interface{}) (*Response, error) { - return r.createResponse(r.Request.Get(), output) -} - -func (r *Request) Post(input interface{}, output interface{}) (*Response, error) { - r.setBody(input) - return r.createResponse(r.Request.Post(), output) -} - -func (r *Request) Put(input interface{}, output interface{}) (*Response, error) { - r.setBody(input) - return r.createResponse(r.Request.Put(), output) -} - -func (r *Request) Delete(output interface{}) (*Response, error) { - return r.createResponse(r.Request.Delete(), output) -} - -func (r *Request) Patch(input interface{}, output interface{}) (*Response, error) { - r.setBody(input) - return r.createResponse(r.Request.Patch(), output) -} - -func (r *Request) Options(output interface{}) (*Response, error) { - return r.createResponse(r.Request.Options(), output) -} - -func (r *Request) setBody(input interface{}) { - mtype, _ := mediatype.Parse(defaultMediaType) - r.Request.SetBody(mtype, input) -} - -func (r *Request) createResponse(sawyerResp *sawyer.Response, output interface{}) (resp *Response, err error) { - resp, err = NewResponse(sawyerResp) - if err == nil { - err = sawyerResp.Decode(output) - } - - return -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result.go deleted file mode 100644 index 2a7b9cd..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result.go +++ /dev/null @@ -1,61 +0,0 @@ -package octokit - -import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader" -) - -type pageable struct { - NextPage *Hyperlink - LastPage *Hyperlink - FirstPage *Hyperlink - PrevPage *Hyperlink -} - -type Result struct { - Response *Response - Err error - pageable -} - -func (r *Result) HasError() bool { - return r.Err != nil -} - -func (r *Result) Error() string { - if r.Err != nil { - return r.Err.Error() - } - - return "" -} - -func newResult(resp *Response, err error) *Result { - pageable := pageable{} - if resp != nil { - fillPageable(&pageable, resp.MediaHeader) - } - - return &Result{Response: resp, pageable: pageable, Err: err} -} - -func fillPageable(pageable *pageable, header *mediaheader.MediaHeader) { - if link, ok := header.Relations["next"]; ok { - l := Hyperlink(link) - pageable.NextPage = &l - } - - if link, ok := header.Relations["prev"]; ok { - l := Hyperlink(link) - pageable.PrevPage = &l - } - - if link, ok := header.Relations["first"]; ok { - l := Hyperlink(link) - pageable.FirstPage = &l - } - - if link, ok := header.Relations["last"]; ok { - l := Hyperlink(link) - pageable.LastPage = &l - } -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root.go deleted file mode 100644 index ebb7b1d..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root.go +++ /dev/null @@ -1,90 +0,0 @@ -package octokit - -import ( - "net/url" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -var ( - RootURL = Hyperlink("") -) - -func (c *Client) Rel(name string, m map[string]interface{}) (*url.URL, error) { - if c.rootRels == nil || len(c.rootRels) == 0 { - u, _ := url.Parse("/") - root, res := c.Root(u).One() - if res.HasError() { - return nil, res - } - c.rootRels = root.Rels() - } - - return c.rootRels.Rel(name, m) -} - -// Create a RooService with the base url.URL -func (c *Client) Root(url *url.URL) (root *RootService) { - root = &RootService{client: c, URL: url} - return -} - -type RootService struct { - client *Client - URL *url.URL -} - -func (r *RootService) One() (root *Root, result *Result) { - root = &Root{HALResource: &hypermedia.HALResource{}} - result = r.client.get(r.URL, &root) - if root != nil { - // Cached hyperlinks - root.PullsURL = hypermedia.Hyperlink(PullRequestsURL) - } - - return -} - -type Root struct { - *hypermedia.HALResource - - UserSearchURL hypermedia.Hyperlink `rel:"user_search" json:"user_search_url,omitempty"` - UserRepositoriesURL hypermedia.Hyperlink `rel:"user_repositories" json:"user_repositories_url,omitempty"` - UserOrganizationsURL hypermedia.Hyperlink `rel:"user_organizations" json:"user_organizations_url,omitempty"` - UserURL hypermedia.Hyperlink `rel:"user" json:"user_url,omitempty"` - TeamURL hypermedia.Hyperlink `rel:"team" json:"team_url,omitempty"` - StarredGistsURL hypermedia.Hyperlink `rel:"starred_gists" json:"starred_gists_url,omitempty"` - StarredURL hypermedia.Hyperlink `rel:"starred" json:"starred_url,omitempty"` - CurrentUserRepositoriesURL hypermedia.Hyperlink `rel:"current_user_repositories" json:"current_user_repositories_url,omitempty"` - RepositorySearchURL hypermedia.Hyperlink `rel:"repository_search" json:"repository_search_url,omitempty"` - RepositoryURL hypermedia.Hyperlink `rel:"repository" json:"repository_url,omitempty"` - RateLimitURL hypermedia.Hyperlink `rel:"rate_limit" json:"rate_limit_url,omitempty"` - GistsURL hypermedia.Hyperlink `rel:"gists" json:"gists_url,omitempty"` - FollowingURL hypermedia.Hyperlink `rel:"following" json:"following_url,omitempty"` - FeedsURL hypermedia.Hyperlink `rel:"feeds" json:"feeds_url,omitempty"` - EventsURL hypermedia.Hyperlink `rel:"events" json:"events_url,omitempty"` - EmojisURL hypermedia.Hyperlink `rel:"emojis" json:"emojis_url,omitempty"` - EmailsURL hypermedia.Hyperlink `rel:"emails" json:"emails_url,omitempty"` - AuthorizationsURL hypermedia.Hyperlink `rel:"authorizations" json:"authorizations_url,omitempty"` - CurrentUserURL hypermedia.Hyperlink `rel:"current_user" json:"current_user_url,omitempty"` - HubURL hypermedia.Hyperlink `rel:"hub" json:"hub_url,omitempty"` - IssueSearchURL hypermedia.Hyperlink `rel:"issue_search" json:"issue_search_url,omitempty"` - IssuesURL hypermedia.Hyperlink `rel:"issues" json:"issues_url,omitempty"` - KeysURL hypermedia.Hyperlink `rel:"keys" json:"keys_url,omitempty"` - NotificationsURL hypermedia.Hyperlink `rel:"notifications" json:"notifications_url,omitempty"` - OrganizationRepositoriesURL hypermedia.Hyperlink `rel:"organization_repositories" json:"organization_repositories_url,omitempty"` - OrganizationURL hypermedia.Hyperlink `rel:"organization" json:"organization_url,omitempty"` - PublicGistsURL hypermedia.Hyperlink `rel:"public_gists" json:"public_gists_url,omitempty"` - PullsURL hypermedia.Hyperlink `rel:"pulls" json:"-"` - rels hypermedia.Relations `json:"-"` -} - -func (r *Root) Rels() hypermedia.Relations { - if r.rels == nil || len(r.rels) == 0 { - r.rels = hypermedia.HyperFieldDecoder(r) - for key, hyperlink := range r.HALResource.Rels() { - r.rels[key] = hyperlink - } - } - return r.rels -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root_test.go deleted file mode 100644 index f2dd0ee..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root_test.go +++ /dev/null @@ -1,39 +0,0 @@ -package octokit - -import ( - "net/http" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestRootService_One(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("root.json")) - }) - - url, err := RootURL.Expand(nil) - assert.Equal(t, nil, err) - - root, result := client.Root(url).One() - assert.T(t, !result.HasError()) - assert.Equal(t, "https://api.github.com/users/{user}", string(root.UserURL)) -} - -func TestClientRel(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("root.json")) - }) - - u, err := client.Rel("user", M{"user": "root"}) - assert.Equal(t, nil, err) - assert.Equal(t, "https://api.github.com/users/root", u.String()) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses_test.go deleted file mode 100644 index 06c692e..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses_test.go +++ /dev/null @@ -1,30 +0,0 @@ -package octokit - -import ( - "net/http" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestStatuses(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/jingweno/gh/statuses/740211b9c6cd8e526a7124fe2b33115602fbc637", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("statuses.json")) - }) - - sha := "740211b9c6cd8e526a7124fe2b33115602fbc637" - url, err := StatusesURL.Expand(M{"owner": "jingweno", "repo": "gh", "ref": sha}) - assert.Equal(t, nil, err) - - statuses, err := client.Statuses(url).All() - - assert.Equal(t, 2, len(statuses)) - firstStatus := statuses[0] - assert.Equal(t, "pending", firstStatus.State) - assert.Equal(t, "The Travis CI build is in progress", firstStatus.Description) - assert.Equal(t, "https://travis-ci.org/jingweno/gh/builds/11911500", firstStatus.TargetURL) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users_test.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users_test.go deleted file mode 100644 index 8a13a02..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users_test.go +++ /dev/null @@ -1,113 +0,0 @@ -package octokit - -import ( - "fmt" - "net/http" - "regexp" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestUsersService_GetCurrentUser(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/user", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("user.json")) - }) - - url, _ := CurrentUserURL.Expand(nil) - user, result := client.Users(url).One() - - assert.T(t, !result.HasError()) - assert.Equal(t, 169064, user.ID) - assert.Equal(t, "jingweno", user.Login) - assert.Equal(t, "jingweno@gmail.com", user.Email) - assert.Equal(t, "User", user.Type) - assert.Equal(t, 17, user.Following) - assert.Equal(t, 28, user.Followers) - assert.Equal(t, 90, user.PublicRepos) - assert.Equal(t, false, user.SiteAdmin) - assert.Equal(t, "https://api.github.com/users/jingweno/repos", string(user.ReposURL)) -} - -func TestUsersService_UpdateCurrentUser(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/user", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "PUT") - testBody(t, r, "{\"email\":\"jingweno@gmail.com\"}\n") - respondWithJSON(w, loadFixture("user.json")) - }) - - url, _ := CurrentUserURL.Expand(nil) - userToUpdate := User{Email: "jingweno@gmail.com"} - user, result := client.Users(url).Update(userToUpdate) - - assert.T(t, !result.HasError()) - assert.Equal(t, 169064, user.ID) - assert.Equal(t, "jingweno", user.Login) - assert.Equal(t, "jingweno@gmail.com", user.Email) - assert.Equal(t, "User", user.Type) - assert.Equal(t, "https://api.github.com/users/jingweno/repos", string(user.ReposURL)) -} - -func TestUsersService_GetUser(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/users/jingweno", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("user.json")) - }) - - url, err := UserURL.Expand(M{"user": "jingweno"}) - assert.Equal(t, nil, err) - user, result := client.Users(url).One() - - assert.T(t, !result.HasError()) - assert.Equal(t, 169064, user.ID) - assert.Equal(t, "jingweno", user.Login) - assert.Equal(t, "jingweno@gmail.com", user.Email) - assert.Equal(t, "User", user.Type) - assert.Equal(t, "https://api.github.com/users/jingweno/repos", string(user.ReposURL)) -} - -func TestUsersService_All(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/users", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - - rr := regexp.MustCompile(`users\?since=\d+`) - assert.Tf(t, rr.MatchString(r.URL.String()), "Regexp should match users?since=\\d+") - - header := w.Header() - link := fmt.Sprintf(`<%s>; rel="next", <%s>; rel="first"`, testURLOf("users?since=135"), testURLOf("users{?since}")) - header.Set("Link", link) - respondWithJSON(w, loadFixture("users.json")) - }) - - url, err := UserURL.Expand(M{"since": 1}) - assert.Equal(t, nil, err) - - q := url.Query() - q.Set("since", "1") - url.RawQuery = q.Encode() - allUsers, result := client.Users(url).All() - - assert.T(t, !result.HasError()) - assert.Equal(t, 1, len(allUsers)) - assert.Equal(t, testURLStringOf("users?since=135"), string(*result.NextPage)) - - nextPageURL, err := result.NextPage.Expand(nil) - assert.Equal(t, nil, err) - - allUsers, result = client.Users(nextPageURL).All() - assert.T(t, !result.HasError()) - assert.Equal(t, 1, len(allUsers)) -} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/yamlh.go b/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/yamlh.go deleted file mode 100644 index 4b020b1..0000000 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/yamlh.go +++ /dev/null @@ -1,716 +0,0 @@ -package yaml - -import ( - "io" -) - -// The version directive data. -type yaml_version_directive_t struct { - major int8 // The major version number. - minor int8 // The minor version number. -} - -// The tag directive data. -type yaml_tag_directive_t struct { - handle []byte // The tag handle. - prefix []byte // The tag prefix. -} - -type yaml_encoding_t int - -// The stream encoding. -const ( - // Let the parser choose the encoding. - yaml_ANY_ENCODING yaml_encoding_t = iota - - yaml_UTF8_ENCODING // The default UTF-8 encoding. - yaml_UTF16LE_ENCODING // The UTF-16-LE encoding with BOM. - yaml_UTF16BE_ENCODING // The UTF-16-BE encoding with BOM. -) - -type yaml_break_t int - -// Line break types. -const ( - // Let the parser choose the break type. - yaml_ANY_BREAK yaml_break_t = iota - - yaml_CR_BREAK // Use CR for line breaks (Mac style). - yaml_LN_BREAK // Use LN for line breaks (Unix style). - yaml_CRLN_BREAK // Use CR LN for line breaks (DOS style). -) - -type yaml_error_type_t int - -// Many bad things could happen with the parser and emitter. -const ( - // No error is produced. - yaml_NO_ERROR yaml_error_type_t = iota - - yaml_MEMORY_ERROR // Cannot allocate or reallocate a block of memory. - yaml_READER_ERROR // Cannot read or decode the input stream. - yaml_SCANNER_ERROR // Cannot scan the input stream. - yaml_PARSER_ERROR // Cannot parse the input stream. - yaml_COMPOSER_ERROR // Cannot compose a YAML document. - yaml_WRITER_ERROR // Cannot write to the output stream. - yaml_EMITTER_ERROR // Cannot emit a YAML stream. -) - -// The pointer position. -type yaml_mark_t struct { - index int // The position index. - line int // The position line. - column int // The position column. -} - -// Node Styles - -type yaml_style_t int8 - -type yaml_scalar_style_t yaml_style_t - -// Scalar styles. -const ( - // Let the emitter choose the style. - yaml_ANY_SCALAR_STYLE yaml_scalar_style_t = iota - - yaml_PLAIN_SCALAR_STYLE // The plain scalar style. - yaml_SINGLE_QUOTED_SCALAR_STYLE // The single-quoted scalar style. - yaml_DOUBLE_QUOTED_SCALAR_STYLE // The double-quoted scalar style. - yaml_LITERAL_SCALAR_STYLE // The literal scalar style. - yaml_FOLDED_SCALAR_STYLE // The folded scalar style. -) - -type yaml_sequence_style_t yaml_style_t - -// Sequence styles. -const ( - // Let the emitter choose the style. - yaml_ANY_SEQUENCE_STYLE yaml_sequence_style_t = iota - - yaml_BLOCK_SEQUENCE_STYLE // The block sequence style. - yaml_FLOW_SEQUENCE_STYLE // The flow sequence style. -) - -type yaml_mapping_style_t yaml_style_t - -// Mapping styles. -const ( - // Let the emitter choose the style. - yaml_ANY_MAPPING_STYLE yaml_mapping_style_t = iota - - yaml_BLOCK_MAPPING_STYLE // The block mapping style. - yaml_FLOW_MAPPING_STYLE // The flow mapping style. -) - -// Tokens - -type yaml_token_type_t int - -// Token types. -const ( - // An empty token. - yaml_NO_TOKEN yaml_token_type_t = iota - - yaml_STREAM_START_TOKEN // A STREAM-START token. - yaml_STREAM_END_TOKEN // A STREAM-END token. - - yaml_VERSION_DIRECTIVE_TOKEN // A VERSION-DIRECTIVE token. - yaml_TAG_DIRECTIVE_TOKEN // A TAG-DIRECTIVE token. - yaml_DOCUMENT_START_TOKEN // A DOCUMENT-START token. - yaml_DOCUMENT_END_TOKEN // A DOCUMENT-END token. - - yaml_BLOCK_SEQUENCE_START_TOKEN // A BLOCK-SEQUENCE-START token. - yaml_BLOCK_MAPPING_START_TOKEN // A BLOCK-SEQUENCE-END token. - yaml_BLOCK_END_TOKEN // A BLOCK-END token. - - yaml_FLOW_SEQUENCE_START_TOKEN // A FLOW-SEQUENCE-START token. - yaml_FLOW_SEQUENCE_END_TOKEN // A FLOW-SEQUENCE-END token. - yaml_FLOW_MAPPING_START_TOKEN // A FLOW-MAPPING-START token. - yaml_FLOW_MAPPING_END_TOKEN // A FLOW-MAPPING-END token. - - yaml_BLOCK_ENTRY_TOKEN // A BLOCK-ENTRY token. - yaml_FLOW_ENTRY_TOKEN // A FLOW-ENTRY token. - yaml_KEY_TOKEN // A KEY token. - yaml_VALUE_TOKEN // A VALUE token. - - yaml_ALIAS_TOKEN // An ALIAS token. - yaml_ANCHOR_TOKEN // An ANCHOR token. - yaml_TAG_TOKEN // A TAG token. - yaml_SCALAR_TOKEN // A SCALAR token. -) - -func (tt yaml_token_type_t) String() string { - switch tt { - case yaml_NO_TOKEN: - return "yaml_NO_TOKEN" - case yaml_STREAM_START_TOKEN: - return "yaml_STREAM_START_TOKEN" - case yaml_STREAM_END_TOKEN: - return "yaml_STREAM_END_TOKEN" - case yaml_VERSION_DIRECTIVE_TOKEN: - return "yaml_VERSION_DIRECTIVE_TOKEN" - case yaml_TAG_DIRECTIVE_TOKEN: - return "yaml_TAG_DIRECTIVE_TOKEN" - case yaml_DOCUMENT_START_TOKEN: - return "yaml_DOCUMENT_START_TOKEN" - case yaml_DOCUMENT_END_TOKEN: - return "yaml_DOCUMENT_END_TOKEN" - case yaml_BLOCK_SEQUENCE_START_TOKEN: - return "yaml_BLOCK_SEQUENCE_START_TOKEN" - case yaml_BLOCK_MAPPING_START_TOKEN: - return "yaml_BLOCK_MAPPING_START_TOKEN" - case yaml_BLOCK_END_TOKEN: - return "yaml_BLOCK_END_TOKEN" - case yaml_FLOW_SEQUENCE_START_TOKEN: - return "yaml_FLOW_SEQUENCE_START_TOKEN" - case yaml_FLOW_SEQUENCE_END_TOKEN: - return "yaml_FLOW_SEQUENCE_END_TOKEN" - case yaml_FLOW_MAPPING_START_TOKEN: - return "yaml_FLOW_MAPPING_START_TOKEN" - case yaml_FLOW_MAPPING_END_TOKEN: - return "yaml_FLOW_MAPPING_END_TOKEN" - case yaml_BLOCK_ENTRY_TOKEN: - return "yaml_BLOCK_ENTRY_TOKEN" - case yaml_FLOW_ENTRY_TOKEN: - return "yaml_FLOW_ENTRY_TOKEN" - case yaml_KEY_TOKEN: - return "yaml_KEY_TOKEN" - case yaml_VALUE_TOKEN: - return "yaml_VALUE_TOKEN" - case yaml_ALIAS_TOKEN: - return "yaml_ALIAS_TOKEN" - case yaml_ANCHOR_TOKEN: - return "yaml_ANCHOR_TOKEN" - case yaml_TAG_TOKEN: - return "yaml_TAG_TOKEN" - case yaml_SCALAR_TOKEN: - return "yaml_SCALAR_TOKEN" - } - return "" -} - -// The token structure. -type yaml_token_t struct { - // The token type. - typ yaml_token_type_t - - // The start/end of the token. - start_mark, end_mark yaml_mark_t - - // The stream encoding (for yaml_STREAM_START_TOKEN). - encoding yaml_encoding_t - - // The alias/anchor/scalar value or tag/tag directive handle - // (for yaml_ALIAS_TOKEN, yaml_ANCHOR_TOKEN, yaml_SCALAR_TOKEN, yaml_TAG_TOKEN, yaml_TAG_DIRECTIVE_TOKEN). - value []byte - - // The tag suffix (for yaml_TAG_TOKEN). - suffix []byte - - // The tag directive prefix (for yaml_TAG_DIRECTIVE_TOKEN). - prefix []byte - - // The scalar style (for yaml_SCALAR_TOKEN). - style yaml_scalar_style_t - - // The version directive major/minor (for yaml_VERSION_DIRECTIVE_TOKEN). - major, minor int8 -} - -// Events - -type yaml_event_type_t int8 - -// Event types. -const ( - // An empty event. - yaml_NO_EVENT yaml_event_type_t = iota - - yaml_STREAM_START_EVENT // A STREAM-START event. - yaml_STREAM_END_EVENT // A STREAM-END event. - yaml_DOCUMENT_START_EVENT // A DOCUMENT-START event. - yaml_DOCUMENT_END_EVENT // A DOCUMENT-END event. - yaml_ALIAS_EVENT // An ALIAS event. - yaml_SCALAR_EVENT // A SCALAR event. - yaml_SEQUENCE_START_EVENT // A SEQUENCE-START event. - yaml_SEQUENCE_END_EVENT // A SEQUENCE-END event. - yaml_MAPPING_START_EVENT // A MAPPING-START event. - yaml_MAPPING_END_EVENT // A MAPPING-END event. -) - -// The event structure. -type yaml_event_t struct { - - // The event type. - typ yaml_event_type_t - - // The start and end of the event. - start_mark, end_mark yaml_mark_t - - // The document encoding (for yaml_STREAM_START_EVENT). - encoding yaml_encoding_t - - // The version directive (for yaml_DOCUMENT_START_EVENT). - version_directive *yaml_version_directive_t - - // The list of tag directives (for yaml_DOCUMENT_START_EVENT). - tag_directives []yaml_tag_directive_t - - // The anchor (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_ALIAS_EVENT). - anchor []byte - - // The tag (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). - tag []byte - - // The scalar value (for yaml_SCALAR_EVENT). - value []byte - - // Is the document start/end indicator implicit, or the tag optional? - // (for yaml_DOCUMENT_START_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT, yaml_SCALAR_EVENT). - implicit bool - - // Is the tag optional for any non-plain style? (for yaml_SCALAR_EVENT). - quoted_implicit bool - - // The style (for yaml_SCALAR_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT). - style yaml_style_t -} - -func (e *yaml_event_t) scalar_style() yaml_scalar_style_t { return yaml_scalar_style_t(e.style) } -func (e *yaml_event_t) sequence_style() yaml_sequence_style_t { return yaml_sequence_style_t(e.style) } -func (e *yaml_event_t) mapping_style() yaml_mapping_style_t { return yaml_mapping_style_t(e.style) } - -// Nodes - -const ( - yaml_NULL_TAG = "tag:yaml.org,2002:null" // The tag !!null with the only possible value: null. - yaml_BOOL_TAG = "tag:yaml.org,2002:bool" // The tag !!bool with the values: true and false. - yaml_STR_TAG = "tag:yaml.org,2002:str" // The tag !!str for string values. - yaml_INT_TAG = "tag:yaml.org,2002:int" // The tag !!int for integer values. - yaml_FLOAT_TAG = "tag:yaml.org,2002:float" // The tag !!float for float values. - yaml_TIMESTAMP_TAG = "tag:yaml.org,2002:timestamp" // The tag !!timestamp for date and time values. - - yaml_SEQ_TAG = "tag:yaml.org,2002:seq" // The tag !!seq is used to denote sequences. - yaml_MAP_TAG = "tag:yaml.org,2002:map" // The tag !!map is used to denote mapping. - - // Not in original libyaml. - yaml_BINARY_TAG = "tag:yaml.org,2002:binary" - yaml_MERGE_TAG = "tag:yaml.org,2002:merge" - - yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str. - yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq. - yaml_DEFAULT_MAPPING_TAG = yaml_MAP_TAG // The default mapping tag is !!map. -) - -type yaml_node_type_t int - -// Node types. -const ( - // An empty node. - yaml_NO_NODE yaml_node_type_t = iota - - yaml_SCALAR_NODE // A scalar node. - yaml_SEQUENCE_NODE // A sequence node. - yaml_MAPPING_NODE // A mapping node. -) - -// An element of a sequence node. -type yaml_node_item_t int - -// An element of a mapping node. -type yaml_node_pair_t struct { - key int // The key of the element. - value int // The value of the element. -} - -// The node structure. -type yaml_node_t struct { - typ yaml_node_type_t // The node type. - tag []byte // The node tag. - - // The node data. - - // The scalar parameters (for yaml_SCALAR_NODE). - scalar struct { - value []byte // The scalar value. - length int // The length of the scalar value. - style yaml_scalar_style_t // The scalar style. - } - - // The sequence parameters (for YAML_SEQUENCE_NODE). - sequence struct { - items_data []yaml_node_item_t // The stack of sequence items. - style yaml_sequence_style_t // The sequence style. - } - - // The mapping parameters (for yaml_MAPPING_NODE). - mapping struct { - pairs_data []yaml_node_pair_t // The stack of mapping pairs (key, value). - pairs_start *yaml_node_pair_t // The beginning of the stack. - pairs_end *yaml_node_pair_t // The end of the stack. - pairs_top *yaml_node_pair_t // The top of the stack. - style yaml_mapping_style_t // The mapping style. - } - - start_mark yaml_mark_t // The beginning of the node. - end_mark yaml_mark_t // The end of the node. - -} - -// The document structure. -type yaml_document_t struct { - - // The document nodes. - nodes []yaml_node_t - - // The version directive. - version_directive *yaml_version_directive_t - - // The list of tag directives. - tag_directives_data []yaml_tag_directive_t - tag_directives_start int // The beginning of the tag directives list. - tag_directives_end int // The end of the tag directives list. - - start_implicit int // Is the document start indicator implicit? - end_implicit int // Is the document end indicator implicit? - - // The start/end of the document. - start_mark, end_mark yaml_mark_t -} - -// The prototype of a read handler. -// -// The read handler is called when the parser needs to read more bytes from the -// source. The handler should write not more than size bytes to the buffer. -// The number of written bytes should be set to the size_read variable. -// -// [in,out] data A pointer to an application data specified by -// yaml_parser_set_input(). -// [out] buffer The buffer to write the data from the source. -// [in] size The size of the buffer. -// [out] size_read The actual number of bytes read from the source. -// -// On success, the handler should return 1. If the handler failed, -// the returned value should be 0. On EOF, the handler should set the -// size_read to 0 and return 1. -type yaml_read_handler_t func(parser *yaml_parser_t, buffer []byte) (n int, err error) - -// This structure holds information about a potential simple key. -type yaml_simple_key_t struct { - possible bool // Is a simple key possible? - required bool // Is a simple key required? - token_number int // The number of the token. - mark yaml_mark_t // The position mark. -} - -// The states of the parser. -type yaml_parser_state_t int - -const ( - yaml_PARSE_STREAM_START_STATE yaml_parser_state_t = iota - - yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE // Expect the beginning of an implicit document. - yaml_PARSE_DOCUMENT_START_STATE // Expect DOCUMENT-START. - yaml_PARSE_DOCUMENT_CONTENT_STATE // Expect the content of a document. - yaml_PARSE_DOCUMENT_END_STATE // Expect DOCUMENT-END. - yaml_PARSE_BLOCK_NODE_STATE // Expect a block node. - yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE // Expect a block node or indentless sequence. - yaml_PARSE_FLOW_NODE_STATE // Expect a flow node. - yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a block sequence. - yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE // Expect an entry of a block sequence. - yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE // Expect an entry of an indentless sequence. - yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. - yaml_PARSE_BLOCK_MAPPING_KEY_STATE // Expect a block mapping key. - yaml_PARSE_BLOCK_MAPPING_VALUE_STATE // Expect a block mapping value. - yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE // Expect the first entry of a flow sequence. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE // Expect an entry of a flow sequence. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE // Expect a key of an ordered mapping. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE // Expect a value of an ordered mapping. - yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE // Expect the and of an ordered mapping entry. - yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. - yaml_PARSE_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. - yaml_PARSE_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. - yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE // Expect an empty value of a flow mapping. - yaml_PARSE_END_STATE // Expect nothing. -) - -func (ps yaml_parser_state_t) String() string { - switch ps { - case yaml_PARSE_STREAM_START_STATE: - return "yaml_PARSE_STREAM_START_STATE" - case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: - return "yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE" - case yaml_PARSE_DOCUMENT_START_STATE: - return "yaml_PARSE_DOCUMENT_START_STATE" - case yaml_PARSE_DOCUMENT_CONTENT_STATE: - return "yaml_PARSE_DOCUMENT_CONTENT_STATE" - case yaml_PARSE_DOCUMENT_END_STATE: - return "yaml_PARSE_DOCUMENT_END_STATE" - case yaml_PARSE_BLOCK_NODE_STATE: - return "yaml_PARSE_BLOCK_NODE_STATE" - case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: - return "yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE" - case yaml_PARSE_FLOW_NODE_STATE: - return "yaml_PARSE_FLOW_NODE_STATE" - case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: - return "yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE" - case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: - return "yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE" - case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: - return "yaml_PARSE_BLOCK_MAPPING_KEY_STATE" - case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: - return "yaml_PARSE_BLOCK_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: - return "yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE" - case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: - return "yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE" - case yaml_PARSE_FLOW_MAPPING_KEY_STATE: - return "yaml_PARSE_FLOW_MAPPING_KEY_STATE" - case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: - return "yaml_PARSE_FLOW_MAPPING_VALUE_STATE" - case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: - return "yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE" - case yaml_PARSE_END_STATE: - return "yaml_PARSE_END_STATE" - } - return "" -} - -// This structure holds aliases data. -type yaml_alias_data_t struct { - anchor []byte // The anchor. - index int // The node id. - mark yaml_mark_t // The anchor mark. -} - -// The parser structure. -// -// All members are internal. Manage the structure using the -// yaml_parser_ family of functions. -type yaml_parser_t struct { - - // Error handling - - error yaml_error_type_t // Error type. - - problem string // Error description. - - // The byte about which the problem occured. - problem_offset int - problem_value int - problem_mark yaml_mark_t - - // The error context. - context string - context_mark yaml_mark_t - - // Reader stuff - - read_handler yaml_read_handler_t // Read handler. - - input_file io.Reader // File input data. - input []byte // String input data. - input_pos int - - eof bool // EOF flag - - buffer []byte // The working buffer. - buffer_pos int // The current position of the buffer. - - unread int // The number of unread characters in the buffer. - - raw_buffer []byte // The raw buffer. - raw_buffer_pos int // The current position of the buffer. - - encoding yaml_encoding_t // The input encoding. - - offset int // The offset of the current position (in bytes). - mark yaml_mark_t // The mark of the current position. - - // Scanner stuff - - stream_start_produced bool // Have we started to scan the input stream? - stream_end_produced bool // Have we reached the end of the input stream? - - flow_level int // The number of unclosed '[' and '{' indicators. - - tokens []yaml_token_t // The tokens queue. - tokens_head int // The head of the tokens queue. - tokens_parsed int // The number of tokens fetched from the queue. - token_available bool // Does the tokens queue contain a token ready for dequeueing. - - indent int // The current indentation level. - indents []int // The indentation levels stack. - - simple_key_allowed bool // May a simple key occur at the current position? - simple_keys []yaml_simple_key_t // The stack of simple keys. - - // Parser stuff - - state yaml_parser_state_t // The current parser state. - states []yaml_parser_state_t // The parser states stack. - marks []yaml_mark_t // The stack of marks. - tag_directives []yaml_tag_directive_t // The list of TAG directives. - - // Dumper stuff - - aliases []yaml_alias_data_t // The alias data. - - document *yaml_document_t // The currently parsed document. -} - -// Emitter Definitions - -// The prototype of a write handler. -// -// The write handler is called when the emitter needs to flush the accumulated -// characters to the output. The handler should write @a size bytes of the -// @a buffer to the output. -// -// @param[in,out] data A pointer to an application data specified by -// yaml_emitter_set_output(). -// @param[in] buffer The buffer with bytes to be written. -// @param[in] size The size of the buffer. -// -// @returns On success, the handler should return @c 1. If the handler failed, -// the returned value should be @c 0. -// -type yaml_write_handler_t func(emitter *yaml_emitter_t, buffer []byte) error - -type yaml_emitter_state_t int - -// The emitter states. -const ( - // Expect STREAM-START. - yaml_EMIT_STREAM_START_STATE yaml_emitter_state_t = iota - - yaml_EMIT_FIRST_DOCUMENT_START_STATE // Expect the first DOCUMENT-START or STREAM-END. - yaml_EMIT_DOCUMENT_START_STATE // Expect DOCUMENT-START or STREAM-END. - yaml_EMIT_DOCUMENT_CONTENT_STATE // Expect the content of a document. - yaml_EMIT_DOCUMENT_END_STATE // Expect DOCUMENT-END. - yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a flow sequence. - yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE // Expect an item of a flow sequence. - yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE // Expect the first key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_KEY_STATE // Expect a key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a flow mapping. - yaml_EMIT_FLOW_MAPPING_VALUE_STATE // Expect a value of a flow mapping. - yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE // Expect the first item of a block sequence. - yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE // Expect an item of a block sequence. - yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE // Expect the first key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_KEY_STATE // Expect the key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE // Expect a value for a simple key of a block mapping. - yaml_EMIT_BLOCK_MAPPING_VALUE_STATE // Expect a value of a block mapping. - yaml_EMIT_END_STATE // Expect nothing. -) - -// The emitter structure. -// -// All members are internal. Manage the structure using the @c yaml_emitter_ -// family of functions. -type yaml_emitter_t struct { - - // Error handling - - error yaml_error_type_t // Error type. - problem string // Error description. - - // Writer stuff - - write_handler yaml_write_handler_t // Write handler. - - output_buffer *[]byte // String output data. - output_file io.Writer // File output data. - - buffer []byte // The working buffer. - buffer_pos int // The current position of the buffer. - - raw_buffer []byte // The raw buffer. - raw_buffer_pos int // The current position of the buffer. - - encoding yaml_encoding_t // The stream encoding. - - // Emitter stuff - - canonical bool // If the output is in the canonical style? - best_indent int // The number of indentation spaces. - best_width int // The preferred width of the output lines. - unicode bool // Allow unescaped non-ASCII characters? - line_break yaml_break_t // The preferred line break. - - state yaml_emitter_state_t // The current emitter state. - states []yaml_emitter_state_t // The stack of states. - - events []yaml_event_t // The event queue. - events_head int // The head of the event queue. - - indents []int // The stack of indentation levels. - - tag_directives []yaml_tag_directive_t // The list of tag directives. - - indent int // The current indentation level. - - flow_level int // The current flow level. - - root_context bool // Is it the document root context? - sequence_context bool // Is it a sequence context? - mapping_context bool // Is it a mapping context? - simple_key_context bool // Is it a simple mapping key context? - - line int // The current line. - column int // The current column. - whitespace bool // If the last character was a whitespace? - indention bool // If the last character was an indentation character (' ', '-', '?', ':')? - open_ended bool // If an explicit document end is required? - - // Anchor analysis. - anchor_data struct { - anchor []byte // The anchor value. - alias bool // Is it an alias? - } - - // Tag analysis. - tag_data struct { - handle []byte // The tag handle. - suffix []byte // The tag suffix. - } - - // Scalar analysis. - scalar_data struct { - value []byte // The scalar value. - multiline bool // Does the scalar contain line breaks? - flow_plain_allowed bool // Can the scalar be expessed in the flow plain style? - block_plain_allowed bool // Can the scalar be expressed in the block plain style? - single_quoted_allowed bool // Can the scalar be expressed in the single quoted style? - block_allowed bool // Can the scalar be expressed in the literal or folded styles? - style yaml_scalar_style_t // The output style. - } - - // Dumper stuff - - opened bool // If the stream was already opened? - closed bool // If the stream was already closed? - - // The information associated with the document nodes. - anchors *struct { - references int // The number of references. - anchor int // The anchor id. - serialized bool // If the node has been emitted? - } - - last_anchor_id int // The last assigned anchor id. - - document *yaml_document_t // The currently emitted document. -} diff --git a/Godeps/_workspace/src/github.com/howeyc/gopass/LICENSE.txt b/Godeps/_workspace/src/github.com/howeyc/gopass/LICENSE.txt deleted file mode 100644 index 65e7260..0000000 --- a/Godeps/_workspace/src/github.com/howeyc/gopass/LICENSE.txt +++ /dev/null @@ -1,13 +0,0 @@ -Copyright (c) 2012 Chris Howey - -Permission to use, copy, modify, and distribute this software for any -purpose with or without fee is hereby granted, provided that the above -copyright notice and this permission notice appear in all copies. - -THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES -WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF -MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR -ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES -WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN -ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF -OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/Godeps/_workspace/src/github.com/howeyc/gopass/README.md b/Godeps/_workspace/src/github.com/howeyc/gopass/README.md deleted file mode 100644 index 81f8875..0000000 --- a/Godeps/_workspace/src/github.com/howeyc/gopass/README.md +++ /dev/null @@ -1,21 +0,0 @@ -# getpasswd in Go - -Retrieve password from user terminal input without echo - -Verified on BSD, Linux, and Windows. - -Example: -```go -package main - -import "fmt" -import "github.com/howeyc/gopass" - -func main() { - fmt.Printf("Password: ") - pass := gopass.GetPasswd() // Silent, for *'s use gopass.GetPasswdMasked() - // Do something with pass -} -``` - -Caution: Multi-byte characters not supported! diff --git a/Godeps/_workspace/src/github.com/howeyc/gopass/bsd.go b/Godeps/_workspace/src/github.com/howeyc/gopass/bsd.go deleted file mode 100644 index 6781884..0000000 --- a/Godeps/_workspace/src/github.com/howeyc/gopass/bsd.go +++ /dev/null @@ -1,29 +0,0 @@ -// +build freebsd openbsd netbsd - -package gopass - -/* -#include -#include -#include - -int getch() { - int ch; - struct termios t_old, t_new; - - tcgetattr(STDIN_FILENO, &t_old); - t_new = t_old; - t_new.c_lflag &= ~(ICANON | ECHO); - tcsetattr(STDIN_FILENO, TCSANOW, &t_new); - - ch = getchar(); - - tcsetattr(STDIN_FILENO, TCSANOW, &t_old); - return ch; -} -*/ -import "C" - -func getch() byte { - return byte(C.getch()) -} diff --git a/Godeps/_workspace/src/github.com/howeyc/gopass/pass.go b/Godeps/_workspace/src/github.com/howeyc/gopass/pass.go deleted file mode 100644 index d6f2df0..0000000 --- a/Godeps/_workspace/src/github.com/howeyc/gopass/pass.go +++ /dev/null @@ -1,44 +0,0 @@ -package gopass - -import ( - "os" -) - -// getPasswd returns the input read from terminal. -// If masked is true, typing will be matched by asterisks on the screen. -// Otherwise, typing will echo nothing. -func getPasswd(masked bool) []byte { - var pass, bs, mask []byte - if masked { - bs = []byte("\b \b") - mask = []byte("*") - } - - for { - if v := getch(); v == 127 || v == 8 { - if l := len(pass); l > 0 { - pass = pass[:l-1] - os.Stdout.Write(bs) - } - } else if v == 13 || v == 10 { - break - } else { - pass = append(pass, v) - os.Stdout.Write(mask) - } - } - println() - return pass -} - -// GetPasswd returns the password read from the terminal without echoing input. -// The returned byte array does not include end-of-line characters. -func GetPasswd() []byte { - return getPasswd(false) -} - -// GetPasswdMasked returns the password read from the terminal, echoing asterisks. -// The returned byte array does not include end-of-line characters. -func GetPasswdMasked() []byte { - return getPasswd(true) -} diff --git a/Godeps/_workspace/src/github.com/howeyc/gopass/win.go b/Godeps/_workspace/src/github.com/howeyc/gopass/win.go deleted file mode 100644 index ef0acf2..0000000 --- a/Godeps/_workspace/src/github.com/howeyc/gopass/win.go +++ /dev/null @@ -1,47 +0,0 @@ -// +build windows - -package gopass - -import "syscall" -import "unsafe" -import "unicode/utf16" - -func getch() byte { - modkernel32 := syscall.NewLazyDLL("kernel32.dll") - procReadConsole := modkernel32.NewProc("ReadConsoleW") - procGetConsoleMode := modkernel32.NewProc("GetConsoleMode") - procSetConsoleMode := modkernel32.NewProc("SetConsoleMode") - - var mode uint32 - pMode := &mode - procGetConsoleMode.Call(uintptr(syscall.Stdin), uintptr(unsafe.Pointer(pMode))) - - var echoMode, lineMode uint32 - echoMode = 4 - lineMode = 2 - var newMode uint32 - newMode = mode ^ (echoMode | lineMode) - - procSetConsoleMode.Call(uintptr(syscall.Stdin), uintptr(newMode)) - - line := make([]uint16, 1) - pLine := &line[0] - var n uint16 - procReadConsole.Call(uintptr(syscall.Stdin), uintptr(unsafe.Pointer(pLine)), uintptr(len(line)), uintptr(unsafe.Pointer(&n))) - - // For some reason n returned seems to big by 2 (Null terminated maybe?) - if n > 2 { - n -= 2 - } - - b := []byte(string(utf16.Decode(line[:n]))) - - procSetConsoleMode.Call(uintptr(syscall.Stdin), uintptr(mode)) - - // Not sure how this could happen, but it did for someone - if len(b) > 0 { - return b[0] - } else { - return 13 - } -} diff --git a/Godeps/_workspace/src/github.com/inconshreveable/go-update/check/check.go b/Godeps/_workspace/src/github.com/inconshreveable/go-update/check/check.go deleted file mode 100644 index 8717bb0..0000000 --- a/Godeps/_workspace/src/github.com/inconshreveable/go-update/check/check.go +++ /dev/null @@ -1,209 +0,0 @@ -package check - -import ( - "bytes" - _ "crypto/sha512" // for tls cipher support - "encoding/hex" - "encoding/json" - "fmt" - "io/ioutil" - "net/http" - "runtime" - - "github.com/remind101/deploy/Godeps/_workspace/src/bitbucket.org/kardianos/osext" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/inconshreveable/go-update" -) - -type Initiative string - -const ( - INITIATIVE_NEVER Initiative = "never" - INITIATIVE_AUTO = "auto" - INITIATIVE_MANUAL = "manual" -) - -var NoUpdateAvailable error = fmt.Errorf("No update available") - -type Params struct { - // protocol version - Version int `json:"version"` - // identifier of the application to update - AppId string `json:"app_id"` - // version of the application updating itself - AppVersion string `json:"app_version"` - // operating system of target platform - OS string `json:"-"` - // hardware architecture of target platform - Arch string `json:"-"` - // application-level user identifier - UserId string `json:"user_id"` - // checksum of the binary to replace (used for returning diff patches) - Checksum string `json:"checksum"` - // release channel (empty string means 'stable') - Channel string `json:"-"` - // tags for custom update channels - Tags map[string]string `json:"tags"` -} - -type Result struct { - up *update.Update - - // should the update be applied automatically/manually - Initiative Initiative `json:"initiative"` - // url where to download the updated application - Url string `json:"url"` - // a URL to a patch to apply - PatchUrl string `json:"patch_url"` - // the patch format (only bsdiff supported at the moment) - PatchType update.PatchType `json:"patch_type"` - // version of the new application - Version string `json:"version"` - // expected checksum of the new application - Checksum string `json:"checksum"` - // signature for verifying update authenticity - Signature string `json:"signature"` -} - -// CheckForUpdate makes an HTTP post to a URL with the JSON serialized -// representation of Params. It returns the deserialized result object -// returned by the remote endpoint or an error. If you do not set -// OS/Arch, CheckForUpdate will populate them for you. Similarly, if -// Version is 0, it will be set to 1. Lastly, if Checksum is the empty -// string, it will be automatically be computed for the running program's -// executable file. -func (p *Params) CheckForUpdate(url string, up *update.Update) (*Result, error) { - if p.Tags == nil { - p.Tags = make(map[string]string) - } - - if p.Channel == "" { - p.Channel = "stable" - } - - if p.OS == "" { - p.OS = runtime.GOOS - } - - if p.Arch == "" { - p.Arch = runtime.GOARCH - } - - if p.Version == 0 { - p.Version = 1 - } - - // ignore errors auto-populating the checksum - // if it fails, you just won't be able to patch - if up.TargetPath == "" { - p.Checksum = defaultChecksum() - } else { - checksum, err := update.ChecksumForFile(up.TargetPath) - if err != nil { - return nil, err - } - p.Checksum = hex.EncodeToString(checksum) - } - - p.Tags["os"] = p.OS - p.Tags["arch"] = p.Arch - p.Tags["channel"] = p.Channel - - body, err := json.Marshal(p) - if err != nil { - return nil, err - } - - resp, err := http.Post(url, "application/json", bytes.NewReader(body)) - if err != nil { - return nil, err - } - - // no content means no available update - if resp.StatusCode == 204 { - return nil, NoUpdateAvailable - } - - defer resp.Body.Close() - respBytes, err := ioutil.ReadAll(resp.Body) - if err != nil { - return nil, err - } - - result := &Result{up: up} - if err := json.Unmarshal(respBytes, result); err != nil { - return nil, err - } - - return result, nil -} - -func (p *Params) CheckAndApplyUpdate(url string, up *update.Update) (result *Result, err error, errRecover error) { - // check for an update - result, err = p.CheckForUpdate(url, up) - if err != nil { - return - } - - // run the available update - err, errRecover = result.Update() - return -} - -func (r *Result) Update() (err error, errRecover error) { - if r.Checksum != "" { - r.up.Checksum, err = hex.DecodeString(r.Checksum) - if err != nil { - return - } - } - - if r.Signature != "" { - r.up.Signature, err = hex.DecodeString(r.Signature) - if err != nil { - return - } - } - - if r.PatchType != "" { - r.up.PatchType = r.PatchType - } - - if r.Url == "" && r.PatchUrl == "" { - err = fmt.Errorf("Result does not contain an update url or patch update url") - return - } - - if r.PatchUrl != "" { - err, errRecover = r.up.FromUrl(r.PatchUrl) - if err == nil { - // success! - return - } else { - // failed to update from patch URL, try with the whole thing - if r.Url == "" || errRecover != nil { - // we can't try updating from a URL with the full contents - // in these cases, so fail - return - } else { - r.up.PatchType = update.PATCHTYPE_NONE - } - } - } - - // try updating from a URL with the full contents - return r.up.FromUrl(r.Url) -} - -func defaultChecksum() string { - path, err := osext.Executable() - if err != nil { - return "" - } - - checksum, err := update.ChecksumForFile(path) - if err != nil { - return "" - } - - return hex.EncodeToString(checksum) -} diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/LICENSE b/Godeps/_workspace/src/github.com/jingweno/go-sawyer/LICENSE deleted file mode 100644 index fff9d6c..0000000 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/LICENSE +++ /dev/null @@ -1,20 +0,0 @@ -Copyright (c) 2013 rick olson - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the -"Software"), to deal in the Software without restriction, including -without limitation the rights to use, copy, modify, merge, publish, -distribute, sublicense, and/or sell copies of the Software, and to -permit persons to whom the Software is furnished to do so, subject to -the following conditions: - -The above copyright notice and this permission notice shall be -included in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE -SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/README.md b/Godeps/_workspace/src/github.com/jingweno/go-sawyer/README.md deleted file mode 100644 index 805c772..0000000 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/README.md +++ /dev/null @@ -1,48 +0,0 @@ -# Sawyer - -Status: Very experimental - -Sawyer is an HTTP user agent for REST APIs. It is a spiritual compliment to -the [Ruby sawyer gem](https://github.com/lostisland/sawyer). - -![](http://techno-weenie.net/sawyer/images/sawyer.jpeg) - -Use this to build clients for HTTP/JSON APIs that behave like the GitHub API. - - -## Usage - -```go -type User struct { - Login string `json:"login"` -} - -class ApiError struct { - Message strign `json:"message"` -} - -client := sawyer.NewFromString("https://api.github.com") - -// the GitHub API prefers a vendor media type -client.Headers.Set("Accept", "application/vnd.github+json") - -apierr := &ApiError{} // decoded from response body on non-20x responses -user := &User{} -req := client.NewRequest("user/21", apierr) -res := req.Get(user) - -// get the user's repositories -apierr := &ApiError{} -repos := new([]Repository) -req := client.NewRequest(res.Hyperlink("repos", sawyer.M{"page": "2"}), apierr) -res := req.Get(repos) - -// post a new user -mtype := mediatype.Parse("application/vnd.github+json") -apierr := &ApiError{} -userInput := &User{Login: "bob"} -userOutput := &User{} -req := client.NewRequest("users", apierr) -err := req.SetBody(mtype, userInput) -res := req.Post(userOutput) -``` diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/gopack.config b/Godeps/_workspace/src/github.com/jingweno/go-sawyer/gopack.config deleted file mode 100644 index a87174a..0000000 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/gopack.config +++ /dev/null @@ -1,9 +0,0 @@ -repo = "github.com/lostisland/go-sawyer" - -[deps.uritemplates] -import = "github.com/jtacoma/uritemplates" -commit = "2b6fc855d3a722bc0e5525ae50bb3c10703c5450" - -[deps.assert] -import = "github.com/bmizerany/assert" -commit = "e17e99893cb6509f428e1728281c2ad60a6b31e3" diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/mediaheader.go b/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/mediaheader.go deleted file mode 100644 index 700faef..0000000 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/mediaheader.go +++ /dev/null @@ -1,9 +0,0 @@ -package mediaheader - -import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -type MediaHeader struct { - Relations hypermedia.Relations -} diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode.go b/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode.go deleted file mode 100644 index b40c374..0000000 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode.go +++ /dev/null @@ -1,51 +0,0 @@ -package mediatype - -import ( - "fmt" - "io" -) - -var decoders = make(map[string]DecoderFunc) - -// DecoderFunc is a function that creates a Decoder from an io.Reader. -type DecoderFunc func(r io.Reader) Decoder - -// A Decoder will decode the given value to the Decoder's io.Reader. -type Decoder interface { - Decode(v interface{}) error -} - -/* -AddDecoder installs a decoder for a given format. - - AddDecoder("json", func(r io.Reader) Encoder { return json.NewDecoder(r) }) - mt, err := Parse("application/json") - decoder, err := mt.Decoder(someReader) -*/ -func AddDecoder(format string, decfunc DecoderFunc) { - decoders[format] = decfunc -} - -// Decoder finds a decoder based on this MediaType's Format field. An error is -// returned if a decoder cannot be found. -func (m *MediaType) Decoder(body io.Reader) (Decoder, error) { - if decfunc, ok := decoders[m.Format]; ok { - return decfunc(body), nil - } - return nil, fmt.Errorf("No decoder found for format %s (%s)", m.Format, m.String()) -} - -// Encode uses this MediaType's Decoder to decode the io.Reader into the given -// value. -func (m *MediaType) Decode(v interface{}, body io.Reader) error { - if v == nil { - return nil - } - - dec, err := m.Decoder(body) - if err != nil { - return err - } - - return dec.Decode(v) -} diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode_test.go b/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode_test.go deleted file mode 100644 index 010a0d2..0000000 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode_test.go +++ /dev/null @@ -1,77 +0,0 @@ -package mediatype - -import ( - "bytes" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" - "io" - "io/ioutil" - "strings" - "testing" -) - -func TestAddDecoder(t *testing.T) { - buf := bytes.NewBufferString("bob") - mt, err := Parse("application/test+test") - if err != nil { - t.Fatalf("Error parsing media type: %s", err.Error()) - } - - person := &Person{} - err = mt.Decode(person, buf) - if err != nil { - t.Fatalf("Error decoding: %s", err.Error()) - } - assert.Equal(t, "bob", person.Name) -} - -func TestRequiresDecoder(t *testing.T) { - buf := bytes.NewBufferString("bob") - mt, err := Parse("application/test+whatevs") - if err != nil { - t.Fatalf("Error parsing media type: %s", err.Error()) - } - - person := &Person{} - err = mt.Decode(person, buf) - if err == nil { - t.Fatal("No decoding error") - } - - if !strings.HasPrefix(err.Error(), "No decoder found for format whatevs") { - t.Fatalf("Bad error: %s", err) - } -} - -func TestSkipsDecoderForNil(t *testing.T) { - buf := bytes.NewBufferString("bob") - mt, err := Parse("application/test+whatevs") - if err != nil { - t.Fatalf("Error parsing media type: %s", err.Error()) - } - - err = mt.Decode(nil, buf) - if err != nil { - t.Fatalf("Decoding error: %s", err.Error()) - } -} - -type PersonDecoder struct { - body io.Reader -} - -func (d *PersonDecoder) Decode(v interface{}) error { - if p, ok := v.(*Person); ok { - by, err := ioutil.ReadAll(d.body) - if err != nil { - return err - } - p.Name = string(by) - } - return nil -} - -func init() { - AddDecoder("test", func(r io.Reader) Decoder { - return &PersonDecoder{r} - }) -} diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode.go b/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode.go deleted file mode 100644 index f5e8207..0000000 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode.go +++ /dev/null @@ -1,53 +0,0 @@ -package mediatype - -import ( - "bytes" - "fmt" - "io" -) - -var encoders = make(map[string]EncoderFunc) - -// EncoderFunc is a function that creates an Encoder from an io.Writer. -type EncoderFunc func(w io.Writer) Encoder - -// An Encoder will encode the given value to the Encoder's io.Writer. -type Encoder interface { - Encode(v interface{}) error -} - -/* -AddEncoder installs an encoder for a given format. - - AddEncoder("json", func(w io.Writer) Encoder { return json.NewEncoder(w) }) - mt, err := Parse("application/json") - encoder, err := mt.Encoder(someWriter) -*/ -func AddEncoder(format string, encfunc EncoderFunc) { - encoders[format] = encfunc -} - -// Encoder finds an encoder based on this MediaType's Format field. An error is -// returned if an encoder cannot be found. -func (m *MediaType) Encoder(w io.Writer) (Encoder, error) { - if encfunc, ok := encoders[m.Format]; ok { - return encfunc(w), nil - } - return nil, fmt.Errorf("No encoder found for format %s (%s)", m.Format, m.String()) -} - -// Encode uses this MediaType's Encoder to encode the given value into a -// bytes.Buffer. -func (m *MediaType) Encode(v interface{}) (*bytes.Buffer, error) { - if v == nil { - return nil, fmt.Errorf("Nothing to encode") - } - - buf := new(bytes.Buffer) - enc, err := m.Encoder(buf) - if err != nil { - return buf, err - } - - return buf, enc.Encode(v) -} diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype.go b/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype.go deleted file mode 100644 index 242648d..0000000 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype.go +++ /dev/null @@ -1,134 +0,0 @@ -// Package mediatype contains helpers for parsing media type strings. Uses -// RFC4288 as a guide. -package mediatype - -import ( - "mime" - "strings" -) - -/* -A MediaType is a parsed representation of a media type string. - - application/vnd.github.raw+json; version=3; charset=utf-8 - -This gets broken up into the various fields: - -- Type: application/vnd.github.raw+json -- MainType: application -- SubType: vnd.github.raw -- Suffix: json -- Vendor: github -- Version: raw -- Format: json -- Params: - version: 3 - charset: utf-8 - -There are a few special behaviors that prioritize custom media types for APIs: - -If an API identifies with an "application/vnd" type, the Vendor and Version -fields are parsed from the remainder. The Version's semantic meaning depends on -the application. - -If it's not an "application/vnd" type, the Version field is taken from the -"version" parameter. - -The Format is taken from the Suffix by default. If not available, it is guessed -by looking for common strings anywhere in the media type. For instance, -"application/json" will identify as the "json" Format. - -The Format is used to get an Encoder and a Decoder. -*/ -type MediaType struct { - full string - Type string - MainType string - SubType string - Suffix string - Vendor string - Version string - Format string - Params map[string]string -} - -// Parse builds a *MediaType from a given media type string. -func Parse(v string) (*MediaType, error) { - mt, params, err := mime.ParseMediaType(v) - if err != nil { - return nil, err - } - - return parse(&MediaType{ - full: v, - Type: mt, - Params: params, - }) -} - -// String returns the full string representation of the MediaType. -func (m *MediaType) String() string { - return m.full -} - -// IsVendor determines if this MediaType is associated with commercially -// available products. -func (m *MediaType) IsVendor() bool { - return len(m.Vendor) > 0 -} - -func parse(m *MediaType) (*MediaType, error) { - pieces := strings.Split(m.Type, typeSplit) - m.MainType = pieces[0] - if len(pieces) > 1 { - subpieces := strings.Split(pieces[1], suffixSplit) - m.SubType = subpieces[0] - if len(subpieces) > 1 { - m.Suffix = subpieces[1] - } - } - - if strings.HasPrefix(m.SubType, vndPrefix) { - if vnd := m.SubType[vndLen:]; len(vnd) > 0 { - args := strings.SplitN(vnd, vndSplit, 2) - m.Vendor = args[0] - if len(args) > 1 { - m.Version = args[1] - } - } - } - - if len(m.Version) == 0 { - if v, ok := m.Params[versionKey]; ok { - m.Version = v - } - } - - if len(m.Suffix) > 0 { - m.Format = m.Suffix - } else { - guessFormat(m) - } - - return m, nil -} - -func guessFormat(m *MediaType) { - for _, fmt := range guessableTypes { - if strings.Contains(m.Type, fmt) { - m.Format = fmt - return - } - } -} - -const ( - typeSplit = "/" - suffixSplit = "+" - versionKey = "version" - vndPrefix = "vnd." - vndLen = 4 - vndSplit = "." -) - -var guessableTypes = []string{"json", "xml"} diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype_test.go b/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype_test.go deleted file mode 100644 index a972c8f..0000000 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype_test.go +++ /dev/null @@ -1,100 +0,0 @@ -package mediatype - -import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" - "testing" -) - -func TestParsesJsonType(t *testing.T) { - m := Get(t, "application/json") - assert.Equal(t, "application/json", m.Type) - assert.Equal(t, "application", m.MainType) - assert.Equal(t, "json", m.SubType) - assert.Equal(t, "", m.Suffix) - assert.Equal(t, "", m.Vendor) - assert.Equal(t, "json", m.Format) - assert.Equal(t, false, m.IsVendor()) - assert.Equal(t, 0, len(m.Params)) -} - -func TestParsesEmptyType(t *testing.T) { - m := Get(t, "*; q=.2") - assert.Equal(t, "*", m.Type) - assert.Equal(t, "*", m.MainType) - assert.Equal(t, "", m.SubType) - assert.Equal(t, "", m.Suffix) - assert.Equal(t, "", m.Vendor) - assert.Equal(t, "", m.Format) - assert.Equal(t, false, m.IsVendor()) - assert.Equal(t, 1, len(m.Params)) - assert.Equal(t, ".2", m.Params["q"]) -} - -func TestSimpleTypeWithParams(t *testing.T) { - m := Get(t, "text/plain; charset=utf-8") - assert.Equal(t, "text/plain", m.Type) - assert.Equal(t, "text", m.MainType) - assert.Equal(t, "plain", m.SubType) - assert.Equal(t, "", m.Suffix) - assert.Equal(t, "", m.Vendor) - assert.Equal(t, "", m.Format) - assert.Equal(t, false, m.IsVendor()) - assert.Equal(t, 1, len(m.Params)) - assert.Equal(t, "utf-8", m.Params["charset"]) -} - -func TestVendorType(t *testing.T) { - m := Get(t, "application/vnd.json+xml; charset=utf-8") - assert.Equal(t, "application/vnd.json+xml", m.Type) - assert.Equal(t, "application", m.MainType) - assert.Equal(t, "vnd.json", m.SubType) - assert.Equal(t, "xml", m.Suffix) - assert.Equal(t, "json", m.Vendor) - assert.Equal(t, "xml", m.Format) - assert.Equal(t, true, m.IsVendor()) - assert.Equal(t, 1, len(m.Params)) - assert.Equal(t, "utf-8", m.Params["charset"]) -} - -func TestSubtypeVersion(t *testing.T) { - m := Get(t, "application/vnd.abc.v1+xml; version=v2; charset=utf-8") - assert.Equal(t, "application/vnd.abc.v1+xml", m.Type) - assert.Equal(t, "application", m.MainType) - assert.Equal(t, "vnd.abc.v1", m.SubType) - assert.Equal(t, "xml", m.Suffix) - assert.Equal(t, "abc", m.Vendor) - assert.Equal(t, "v1", m.Version) - assert.Equal(t, "xml", m.Format) - assert.Equal(t, true, m.IsVendor()) - assert.Equal(t, 2, len(m.Params)) - assert.Equal(t, "utf-8", m.Params["charset"]) - assert.Equal(t, "v2", m.Params["version"]) -} - -func TestParamVersion(t *testing.T) { - m := Get(t, "application/vnd.abc+xml; version=v2; charset=utf-8") - assert.Equal(t, "application/vnd.abc+xml", m.Type) - assert.Equal(t, "application", m.MainType) - assert.Equal(t, "vnd.abc", m.SubType) - assert.Equal(t, "xml", m.Suffix) - assert.Equal(t, "abc", m.Vendor) - assert.Equal(t, "v2", m.Version) - assert.Equal(t, "xml", m.Format) - assert.Equal(t, true, m.IsVendor()) - assert.Equal(t, 2, len(m.Params)) - assert.Equal(t, "utf-8", m.Params["charset"]) - assert.Equal(t, "v2", m.Params["version"]) -} -func Get(t *testing.T, v string) *MediaType { - m, err := Parse(v) - if err != nil { - t.Fatalf("Errors parsing media type %s:\n%s", v, err.Error()) - } - assert.Equal(t, v, m.String()) - return m -} - -// used for encoding/decoding tests -type Person struct { - Name string -} diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer_test.go b/Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer_test.go deleted file mode 100644 index 27aa1ff..0000000 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer_test.go +++ /dev/null @@ -1,132 +0,0 @@ -package sawyer - -import ( - "net/url" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -var endpoints = map[string]map[string]string{ - "http://api.github.com": map[string]string{ - "user": "http://api.github.com/user", - "/user": "http://api.github.com/user", - "http://api.com/user": "http://api.com/user", - }, - "http://api.github.com/api/v1": map[string]string{ - "user": "http://api.github.com/api/v1/user", - "/user": "http://api.github.com/user", - "http://api.com/user": "http://api.com/user", - }, -} - -func TestResolve(t *testing.T) { - for endpoint, tests := range endpoints { - client, err := NewFromString(endpoint, nil) - if err != nil { - t.Fatal(err.Error()) - } - - for relative, result := range tests { - u, err := url.Parse(relative) - if err != nil { - t.Error(err.Error()) - break - } - - abs := client.ResolveReference(u) - if absurl := abs.String(); result != absurl { - t.Errorf("Bad absolute URL %s for %s + %s == %s", absurl, endpoint, relative, result) - } - } - } -} - -func TestResolveWithNoHeader(t *testing.T) { - client, err := NewFromString("http://api.github.com", nil) - if err != nil { - t.Fatal(err.Error()) - } - - req, _ := client.NewRequest("") - assert.Equal(t, 0, len(req.Header)) - - req.Header.Set("Cache-Control", "private") - assert.Equal(t, 1, len(req.Header)) - assert.Equal(t, 0, len(client.Header)) -} - -func TestResolveWithHeader(t *testing.T) { - client, err := NewFromString("http://api.github.com", nil) - if err != nil { - t.Fatal(err.Error()) - } - client.Header.Set("Cache-Control", "private") - - req, _ := client.NewRequest("") - assert.Equal(t, 1, len(req.Header)) - assert.Equal(t, "private", req.Header.Get("Cache-Control")) -} - -func TestResolveClientQuery(t *testing.T) { - client, err := NewFromString("http://api.github.com", nil) - if err != nil { - t.Fatal(err.Error()) - } - - u, err := client.ResolveReferenceString("/foo?a=1") - if err != nil { - t.Fatal(err.Error()) - } - - assert.Equal(t, "http://api.github.com/foo?a=1", u) -} - -func TestResolveClientQueryWithClientQuery(t *testing.T) { - client, err := NewFromString("http://api.github.com?a=1&b=1", nil) - if err != nil { - t.Fatal(err.Error()) - } - - assert.Equal(t, "1", client.Query.Get("a")) - assert.Equal(t, "1", client.Query.Get("b")) - - client.Query.Set("b", "2") - client.Query.Set("c", "3") - u, err := client.ResolveReferenceString("/foo?d=4") - if err != nil { - t.Fatal(err.Error()) - } - - assert.Equal(t, "http://api.github.com/foo?a=1&b=2&c=3&d=4", u) -} - -func TestResolveClientRelativeReference(t *testing.T) { - client, err := NewFromString("http://github.enterprise.com/api/v3/", nil) - if err != nil { - t.Fatal(err.Error()) - } - u, err := client.ResolveReferenceString("users") - if err != nil { - t.Fatal(err.Error()) - } - - assert.Equal(t, "http://github.enterprise.com/api/v3/users", u) -} - -func TestResolveClientRelativeHyperlink(t *testing.T) { - client, err := NewFromString("http://github.enterprise.com/api/v3/", nil) - if err != nil { - t.Fatal(err.Error()) - } - link := hypermedia.Hyperlink("repos/{repo}") - expanded, err := link.Expand(hypermedia.M{"repo": "foo"}) - - u, err := client.ResolveReferenceString(expanded.String()) - if err != nil { - t.Fatal(err.Error()) - } - - assert.Equal(t, "http://github.enterprise.com/api/v3/repos/foo", u) -} diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/fmt b/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/fmt deleted file mode 100644 index fd4005e..0000000 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/fmt +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -gofmt -w -l *.go ./mediatype diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/test b/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/test deleted file mode 100644 index b49a966..0000000 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/script/test +++ /dev/null @@ -1,3 +0,0 @@ -script/fmt -go test -race -v "$@" ./mediatype -go test -race -v "$@" . diff --git a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitignore b/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitignore deleted file mode 100644 index 0026861..0000000 --- a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitignore +++ /dev/null @@ -1,22 +0,0 @@ -# Compiled Object files, Static and Dynamic libs (Shared Objects) -*.o -*.a -*.so - -# Folders -_obj -_test - -# Architecture specific extensions/prefixes -*.[568vq] -[568vq].out - -*.cgo1.go -*.cgo2.c -_cgo_defun.c -_cgo_gotypes.go -_cgo_export.* - -_testmain.go - -*.exe diff --git a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitmodules b/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitmodules deleted file mode 100644 index 8eb6ba0..0000000 --- a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "tests"] - path = tests - url = https://github.com/uri-templates/uritemplate-test.git diff --git a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.godocdown.md b/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.godocdown.md deleted file mode 100644 index 4c0a96e..0000000 --- a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.godocdown.md +++ /dev/null @@ -1,10 +0,0 @@ -{{ .EmitHeader }} - -[![Build Status](https://travis-ci.org/jtacoma/uritemplates.png)](https://travis-ci.org/jtacoma/uritemplates) [![Coverage Status](https://coveralls.io/repos/jtacoma/uritemplates/badge.png)](https://coveralls.io/r/jtacoma/uritemplates) - -{{ .EmitSynopsis }} - -## License - -Use of this source code is governed by a BSD-style license that can be found in -the LICENSE file. diff --git a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.travis.yml b/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.travis.yml deleted file mode 100644 index 4f2ee4d..0000000 --- a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/.travis.yml +++ /dev/null @@ -1 +0,0 @@ -language: go diff --git a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/LICENSE b/Godeps/_workspace/src/github.com/jtacoma/uritemplates/LICENSE deleted file mode 100644 index de9c88c..0000000 --- a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/LICENSE +++ /dev/null @@ -1,18 +0,0 @@ -Copyright (c) 2013 Joshua Tacoma - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of -the Software, and to permit persons to whom the Software is furnished to do so, -subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR -COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER -IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN -CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/README.md b/Godeps/_workspace/src/github.com/jtacoma/uritemplates/README.md deleted file mode 100644 index ff82128..0000000 --- a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/README.md +++ /dev/null @@ -1,23 +0,0 @@ -# uritemplates --- - import "github.com/jtacoma/uritemplates" - -[![Build Status](https://travis-ci.org/jtacoma/uritemplates.png)](https://travis-ci.org/jtacoma/uritemplates) [![Coverage Status](https://coveralls.io/repos/jtacoma/uritemplates/badge.png)](https://coveralls.io/r/jtacoma/uritemplates) - -Package uritemplates is a level 4 implementation of RFC 6570 (URI -Template, http://tools.ietf.org/html/rfc6570). - -To use uritemplates, parse a template string and expand it with a value -map: - - template, _ := uritemplates.Parse("https://api.github.com/repos{/user,repo}") - values := make(map[string]interface{}) - values["user"] = "jtacoma" - values["repo"] = "uritemplates" - expanded, _ := template.ExpandString(values) - fmt.Printf(expanded) - -## License - -Use of this source code is governed by a BSD-style license that can be found in -the LICENSE file. diff --git a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/uritemplates.go b/Godeps/_workspace/src/github.com/jtacoma/uritemplates/uritemplates.go deleted file mode 100644 index 8a84813..0000000 --- a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/uritemplates.go +++ /dev/null @@ -1,359 +0,0 @@ -// Copyright 2013 Joshua Tacoma. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// Package uritemplates is a level 4 implementation of RFC 6570 (URI -// Template, http://tools.ietf.org/html/rfc6570). -// -// To use uritemplates, parse a template string and expand it with a value -// map: -// -// template, _ := uritemplates.Parse("https://api.github.com/repos{/user,repo}") -// values := make(map[string]interface{}) -// values["user"] = "jtacoma" -// values["repo"] = "uritemplates" -// expanded, _ := template.ExpandString(values) -// fmt.Printf(expanded) -// -package uritemplates - -import ( - "bytes" - "errors" - "fmt" - "reflect" - "regexp" - "strconv" - "strings" -) - -var ( - unreserved = regexp.MustCompile("[^A-Za-z0-9\\-._~]") - reserved = regexp.MustCompile("[^A-Za-z0-9\\-._~:/?#[\\]@!$&'()*+,;=]") - validname = regexp.MustCompile("^([A-Za-z0-9_\\.]|%[0-9A-Fa-f][0-9A-Fa-f])+$") - hex = []byte("0123456789ABCDEF") -) - -func pctEncode(src []byte) []byte { - dst := make([]byte, len(src)*3) - for i, b := range src { - buf := dst[i*3 : i*3+3] - buf[0] = 0x25 - buf[1] = hex[b/16] - buf[2] = hex[b%16] - } - return dst -} - -func escape(s string, allowReserved bool) (escaped string) { - if allowReserved { - escaped = string(reserved.ReplaceAllFunc([]byte(s), pctEncode)) - } else { - escaped = string(unreserved.ReplaceAllFunc([]byte(s), pctEncode)) - } - return escaped -} - -// A UriTemplate is a parsed representation of a URI template. -type UriTemplate struct { - raw string - parts []templatePart -} - -// Parse parses a URI template string into a UriTemplate object. -func Parse(rawtemplate string) (template *UriTemplate, err error) { - template = new(UriTemplate) - template.raw = rawtemplate - split := strings.Split(rawtemplate, "{") - template.parts = make([]templatePart, len(split)*2-1) - for i, s := range split { - if i == 0 { - if strings.Contains(s, "}") { - err = errors.New("unexpected }") - break - } - template.parts[i].raw = s - } else { - subsplit := strings.Split(s, "}") - if len(subsplit) != 2 { - err = errors.New("malformed template") - break - } - expression := subsplit[0] - template.parts[i*2-1], err = parseExpression(expression) - if err != nil { - break - } - template.parts[i*2].raw = subsplit[1] - } - } - if err != nil { - template = nil - } - return template, err -} - -type templatePart struct { - raw string - terms []templateTerm - first string - sep string - named bool - ifemp string - allowReserved bool -} - -type templateTerm struct { - name string - explode bool - truncate int -} - -func parseExpression(expression string) (result templatePart, err error) { - switch expression[0] { - case '+': - result.sep = "," - result.allowReserved = true - expression = expression[1:] - case '.': - result.first = "." - result.sep = "." - expression = expression[1:] - case '/': - result.first = "/" - result.sep = "/" - expression = expression[1:] - case ';': - result.first = ";" - result.sep = ";" - result.named = true - expression = expression[1:] - case '?': - result.first = "?" - result.sep = "&" - result.named = true - result.ifemp = "=" - expression = expression[1:] - case '&': - result.first = "&" - result.sep = "&" - result.named = true - result.ifemp = "=" - expression = expression[1:] - case '#': - result.first = "#" - result.sep = "," - result.allowReserved = true - expression = expression[1:] - default: - result.sep = "," - } - rawterms := strings.Split(expression, ",") - result.terms = make([]templateTerm, len(rawterms)) - for i, raw := range rawterms { - result.terms[i], err = parseTerm(raw) - if err != nil { - break - } - } - return result, err -} - -func parseTerm(term string) (result templateTerm, err error) { - if strings.HasSuffix(term, "*") { - result.explode = true - term = term[:len(term)-1] - } - split := strings.Split(term, ":") - if len(split) == 1 { - result.name = term - } else if len(split) == 2 { - result.name = split[0] - var parsed int64 - parsed, err = strconv.ParseInt(split[1], 10, 0) - result.truncate = int(parsed) - } else { - err = errors.New("multiple colons in same term") - } - if !validname.MatchString(result.name) { - err = errors.New("not a valid name: " + result.name) - } - if result.explode && result.truncate > 0 { - err = errors.New("both explode and prefix modifers on same term") - } - return result, err -} - -// Expand expands a URI template with a set of values to produce a string. -func (self *UriTemplate) Expand(value interface{}) (string, error) { - values, ismap := value.(map[string]interface{}) - if !ismap { - if m, ismap := struct2map(value); !ismap { - return "", errors.New("expected map[string]interface{}, struct, or pointer to struct.") - } else { - return self.Expand(m) - } - } - var buf bytes.Buffer - for _, p := range self.parts { - err := p.expand(&buf, values) - if err != nil { - return "", err - } - } - return buf.String(), nil -} - -func (self *templatePart) expand(buf *bytes.Buffer, values map[string]interface{}) error { - if len(self.raw) > 0 { - buf.WriteString(self.raw) - return nil - } - var zeroLen = buf.Len() - buf.WriteString(self.first) - var firstLen = buf.Len() - for _, term := range self.terms { - value, exists := values[term.name] - if !exists { - continue - } - if buf.Len() != firstLen { - buf.WriteString(self.sep) - } - switch v := value.(type) { - case string: - self.expandString(buf, term, v) - case []interface{}: - self.expandArray(buf, term, v) - case map[string]interface{}: - if term.truncate > 0 { - return errors.New("cannot truncate a map expansion") - } - self.expandMap(buf, term, v) - default: - if m, ismap := struct2map(value); ismap { - if term.truncate > 0 { - return errors.New("cannot truncate a map expansion") - } - self.expandMap(buf, term, m) - } else { - str := fmt.Sprintf("%v", value) - self.expandString(buf, term, str) - } - } - } - if buf.Len() == firstLen { - original := buf.Bytes()[:zeroLen] - buf.Reset() - buf.Write(original) - } - return nil -} - -func (self *templatePart) expandName(buf *bytes.Buffer, name string, empty bool) { - if self.named { - buf.WriteString(name) - if empty { - buf.WriteString(self.ifemp) - } else { - buf.WriteString("=") - } - } -} - -func (self *templatePart) expandString(buf *bytes.Buffer, t templateTerm, s string) { - if len(s) > t.truncate && t.truncate > 0 { - s = s[:t.truncate] - } - self.expandName(buf, t.name, len(s) == 0) - buf.WriteString(escape(s, self.allowReserved)) -} - -func (self *templatePart) expandArray(buf *bytes.Buffer, t templateTerm, a []interface{}) { - if len(a) == 0 { - return - } else if !t.explode { - self.expandName(buf, t.name, false) - } - for i, value := range a { - if t.explode && i > 0 { - buf.WriteString(self.sep) - } else if i > 0 { - buf.WriteString(",") - } - var s string - switch v := value.(type) { - case string: - s = v - default: - s = fmt.Sprintf("%v", v) - } - if len(s) > t.truncate && t.truncate > 0 { - s = s[:t.truncate] - } - if self.named && t.explode { - self.expandName(buf, t.name, len(s) == 0) - } - buf.WriteString(escape(s, self.allowReserved)) - } -} - -func (self *templatePart) expandMap(buf *bytes.Buffer, t templateTerm, m map[string]interface{}) { - if len(m) == 0 { - return - } - if !t.explode { - self.expandName(buf, t.name, len(m) == 0) - } - var firstLen = buf.Len() - for k, value := range m { - if firstLen != buf.Len() { - if t.explode { - buf.WriteString(self.sep) - } else { - buf.WriteString(",") - } - } - var s string - switch v := value.(type) { - case string: - s = v - default: - s = fmt.Sprintf("%v", v) - } - if t.explode { - buf.WriteString(escape(k, self.allowReserved)) - buf.WriteRune('=') - buf.WriteString(escape(s, self.allowReserved)) - } else { - buf.WriteString(escape(k, self.allowReserved)) - buf.WriteRune(',') - buf.WriteString(escape(s, self.allowReserved)) - } - } -} - -func struct2map(v interface{}) (map[string]interface{}, bool) { - value := reflect.ValueOf(v) - switch value.Type().Kind() { - case reflect.Ptr: - return struct2map(value.Elem().Interface()) - case reflect.Struct: - m := make(map[string]interface{}) - for i := 0; i < value.NumField(); i++ { - tag := value.Type().Field(i).Tag - var name string - if strings.Contains(string(tag), ":") { - name = tag.Get("uri") - } else { - name = strings.TrimSpace(string(tag)) - } - if len(name) == 0 { - name = value.Type().Field(i).Name - } - m[name] = value.Field(i).Interface() - } - return m, true - } - return nil, false -} diff --git a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/uritemplates_test.go b/Godeps/_workspace/src/github.com/jtacoma/uritemplates/uritemplates_test.go deleted file mode 100644 index f4cefd5..0000000 --- a/Godeps/_workspace/src/github.com/jtacoma/uritemplates/uritemplates_test.go +++ /dev/null @@ -1,245 +0,0 @@ -// Copyright 2013 Joshua Tacoma. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package uritemplates - -import ( - "encoding/json" - "os" - "testing" -) - -type spec struct { - title string - values map[string]interface{} - tests []specTest -} -type specTest struct { - template string - expected []string -} - -func loadSpec(t *testing.T, path string) []spec { - - file, err := os.Open(path) - if err != nil { - t.Errorf("Failed to load test specification: %s", err) - } - - stat, _ := file.Stat() - buffer := make([]byte, stat.Size()) - _, err = file.Read(buffer) - if err != nil { - t.Errorf("Failed to load test specification: %s", err) - } - - var root_ interface{} - err = json.Unmarshal(buffer, &root_) - if err != nil { - t.Errorf("Failed to load test specification: %s", err) - } - - root := root_.(map[string]interface{}) - results := make([]spec, 1024) - i := -1 - for title, spec_ := range root { - i = i + 1 - results[i].title = title - specMap := spec_.(map[string]interface{}) - results[i].values = specMap["variables"].(map[string]interface{}) - tests := specMap["testcases"].([]interface{}) - results[i].tests = make([]specTest, len(tests)) - for k, test_ := range tests { - test := test_.([]interface{}) - results[i].tests[k].template = test[0].(string) - switch typ := test[1].(type) { - case string: - results[i].tests[k].expected = make([]string, 1) - results[i].tests[k].expected[0] = test[1].(string) - case []interface{}: - arr := test[1].([]interface{}) - results[i].tests[k].expected = make([]string, len(arr)) - for m, s := range arr { - results[i].tests[k].expected[m] = s.(string) - } - case bool: - results[i].tests[k].expected = make([]string, 0) - default: - t.Errorf("Unrecognized value type %v", typ) - } - } - } - return results -} - -func runSpec(t *testing.T, path string) { - var spec = loadSpec(t, path) - for _, group := range spec { - for _, test := range group.tests { - template, err := Parse(test.template) - if err != nil { - if len(test.expected) > 0 { - t.Errorf("%s: %s %v", group.title, err, test.template) - } - continue - } - result, err := template.Expand(group.values) - if err != nil { - if len(test.expected) > 0 { - t.Errorf("%s: %s %v", group.title, err, test.template) - } - continue - } else if len(test.expected) == 0 { - t.Errorf("%s: should have failed while parsing or expanding %v but got %v", group.title, test.template, result) - continue - } - pass := false - for _, expected := range test.expected { - if result == expected { - pass = true - } - } - if !pass { - t.Errorf("%s: expected %v, but got %v", group.title, test.expected[0], result) - } - } - } -} - -func TestExtended(t *testing.T) { - runSpec(t, "tests/extended-tests.json") -} - -func TestNegative(t *testing.T) { - runSpec(t, "tests/negative-tests.json") -} - -func TestSpecExamples(t *testing.T) { - runSpec(t, "tests/spec-examples.json") -} - -var parse_tests = []struct { - Template string - ParseOk bool -}{ - { - // Syntax error, too many colons: - "{opts:1:2}", - false, - }, -} - -func TestParse(t *testing.T) { - for itest, test := range parse_tests { - if _, err := Parse(test.Template); err != nil { - if test.ParseOk { - t.Errorf("%v", err) - } - } else if !test.ParseOk { - t.Errorf("%d: expected error, got none.", itest) - } - } -} - -type Location struct { - Path []interface{} `uri:"path"` - Version int `json:"version"` - Opts Options `opts` -} - -type Options struct { - Format string `uri:"fmt"` -} - -var expand_tests = []struct { - Source interface{} - Template string - Expected string - ExpandOk bool -}{ - { - // General struct expansion: - Location{ - Path: []interface{}{"main", "quux"}, - Version: 2, - Opts: Options{ - Format: "pdf", - }, - }, - "{/path*,Version}{?opts*}", - "/main/quux/2?fmt=pdf", - true, - }, { - // Pointer to struct: - &Location{Opts: Options{Format: "pdf"}}, - "{?opts*}", - "?fmt=pdf", - true, - }, { - // Map expansion cannot be truncated: - Location{Opts: Options{Format: "pdf"}}, - "{?opts:3}", - "", - false, - }, { - // Map whose values are not all strings: - map[string]interface{}{ - "one": map[string]interface{}{ - "two": 42, - }, - }, - "{?one*}", - "?two=42", - true, - }, { - // Value of inappropriate type: - 42, - "{?one*}", - "", - false, - }, { - // Truncated array whose values are not all strings: - map[string]interface{}{"one": []interface{}{1234}}, - "{?one:3}", - "?one=123", - true, - }, -} - -func TestUriTemplate_Expand(t *testing.T) { - for itest, test := range expand_tests { - if template, err := Parse(test.Template); err != nil { - t.Errorf("%d: %v", itest, err) - } else if expanded, err := template.Expand(test.Source); err != nil { - if test.ExpandOk { - t.Errorf("%d: unexpected error: %v", itest, err) - } - } else if !test.ExpandOk { - t.Errorf("%d: expected error, got none.", itest, err) - } else if expanded != test.Expected { - t.Errorf("%d: expected %v, got %v", itest, test.Expected, expanded) - } - } -} - -func BenchmarkParse(b *testing.B) { - for i := 0; i < b.N; i++ { - Parse("http://localhost:6060{/type,path}{.fmt}{?q*}") - } -} - -func BenchmarkExpand(b *testing.B) { - templ, _ := Parse("http://localhost:6060{/type,path}{.fmt}{?q*}") - data := map[string]interface{}{ - "type": "pkg", - "path": [...]string{"github.com", "jtacoma", "uritemplates"}, - "q": map[string]interface{}{ - "somequery": "x!@#$", - "other": "y&*()", - }, - } - for i := 0; i < b.N; i++ { - templ.Expand(data) - } -} diff --git a/Godeps/_workspace/src/github.com/kballard/go-shellquote/LICENSE b/Godeps/_workspace/src/github.com/kballard/go-shellquote/LICENSE deleted file mode 100644 index a6d7731..0000000 --- a/Godeps/_workspace/src/github.com/kballard/go-shellquote/LICENSE +++ /dev/null @@ -1,19 +0,0 @@ -Copyright (C) 2014 Kevin Ballard - -Permission is hereby granted, free of charge, to any person obtaining -a copy of this software and associated documentation files (the "Software"), -to deal in the Software without restriction, including without limitation -the rights to use, copy, modify, merge, publish, distribute, sublicense, -and/or sell copies of the Software, and to permit persons to whom the -Software is furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included -in all copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, -EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES -OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. -IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, -TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE -OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/Godeps/_workspace/src/github.com/kballard/go-shellquote/README b/Godeps/_workspace/src/github.com/kballard/go-shellquote/README deleted file mode 100644 index 4d34e87..0000000 --- a/Godeps/_workspace/src/github.com/kballard/go-shellquote/README +++ /dev/null @@ -1,36 +0,0 @@ -PACKAGE - -package shellquote - import "github.com/kballard/go-shellquote" - - Shellquote provides utilities for joining/splitting strings using sh's - word-splitting rules. - -VARIABLES - -var ( - UnterminatedSingleQuoteError = errors.New("Unterminated single-quoted string") - UnterminatedDoubleQuoteError = errors.New("Unterminated double-quoted string") - UnterminatedEscapeError = errors.New("Unterminated backslash-escape") -) - - -FUNCTIONS - -func Join(args ...string) string - Join quotes each argument and joins them with a space. If passed to - /bin/sh, the resulting string will be split back into the original - arguments. - -func Split(input string) (words []string, err error) - Split splits a string according to /bin/sh's word-splitting rules. It - supports backslash-escapes, single-quotes, and double-quotes. Notably it - does not support the $'' style of quoting. It also doesn't attempt to - perform any other sort of expansion, including brace expansion, shell - expansion, or pathname expansion. - - If the given input has an unterminated quoted string or ends in a - backslash-escape, one of UnterminatedSingleQuoteError, - UnterminatedDoubleQuoteError, or UnterminatedEscapeError is returned. - - diff --git a/Godeps/_workspace/src/github.com/kballard/go-shellquote/both_test.go b/Godeps/_workspace/src/github.com/kballard/go-shellquote/both_test.go deleted file mode 100644 index 9cba3c8..0000000 --- a/Godeps/_workspace/src/github.com/kballard/go-shellquote/both_test.go +++ /dev/null @@ -1,29 +0,0 @@ -package shellquote - -import ( - "reflect" - "testing" - "testing/quick" -) - -// this is called bothtest because it tests Split and Join together - -func TestJoinSplit(t *testing.T) { - f := func(strs []string) bool { - // Join, then split, the input - combined := Join(strs...) - split, err := Split(combined) - if err != nil { - t.Logf("Error splitting %#v: %v", combined, err) - return false - } - if !reflect.DeepEqual(strs, split) { - t.Logf("Input %q did not match output %q", strs, split) - return false - } - return true - } - if err := quick.Check(f, nil); err != nil { - t.Error(err) - } -} diff --git a/Godeps/_workspace/src/github.com/kballard/go-shellquote/doc.go b/Godeps/_workspace/src/github.com/kballard/go-shellquote/doc.go deleted file mode 100644 index 9445fa4..0000000 --- a/Godeps/_workspace/src/github.com/kballard/go-shellquote/doc.go +++ /dev/null @@ -1,3 +0,0 @@ -// Shellquote provides utilities for joining/splitting strings using sh's -// word-splitting rules. -package shellquote diff --git a/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote.go b/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote.go deleted file mode 100644 index f6cacee..0000000 --- a/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote.go +++ /dev/null @@ -1,102 +0,0 @@ -package shellquote - -import ( - "bytes" - "strings" - "unicode/utf8" -) - -// Join quotes each argument and joins them with a space. -// If passed to /bin/sh, the resulting string will be split back into the -// original arguments. -func Join(args ...string) string { - var buf bytes.Buffer - for i, arg := range args { - if i != 0 { - buf.WriteByte(' ') - } - quote(arg, &buf) - } - return buf.String() -} - -const ( - specialChars = "\\'\"`${[|&;<>()*?!" - extraSpecialChars = " \t\n" - prefixChars = "~" -) - -func quote(word string, buf *bytes.Buffer) { - // We want to try to produce a "nice" output. As such, we will - // backslash-escape most characters, but if we encounter a space, or if we - // encounter an extra-special char (which doesn't work with - // backslash-escaping) we switch over to quoting the whole word. We do this - // with a space because it's typically easier for people to read multi-word - // arguments when quoted with a space rather than with ugly backslashes - // everywhere. - origLen := buf.Len() - - if len(word) == 0 { - // oops, no content - buf.WriteString("''") - return - } - - cur, prev := word, word - atStart := true - for len(cur) > 0 { - c, l := utf8.DecodeRuneInString(cur) - cur = cur[l:] - if strings.ContainsRune(specialChars, c) || (atStart && strings.ContainsRune(prefixChars, c)) { - // copy the non-special chars up to this point - if len(cur) < len(prev) { - buf.WriteString(word[0 : len(prev)-len(cur)-l]) - } - buf.WriteByte('\\') - buf.WriteRune(c) - prev = cur - } else if strings.ContainsRune(extraSpecialChars, c) { - // start over in quote mode - buf.Truncate(origLen) - goto quote - } - atStart = false - } - if len(prev) > 0 { - buf.WriteString(prev) - } - return - -quote: - // quote mode - // Use single-quotes, but if we find a single-quote in the word, we need - // to terminate the string, emit an escaped quote, and start the string up - // again - inQuote := false - for len(word) > 0 { - i := strings.IndexRune(word, '\'') - if i == -1 { - break - } - if i > 0 { - if !inQuote { - buf.WriteByte('\'') - inQuote = true - } - buf.WriteString(word[0:i]) - word = word[i+1:] - } - if inQuote { - buf.WriteByte('\'') - inQuote = false - } - buf.WriteString("\\'") - } - if len(word) > 0 { - if !inQuote { - buf.WriteByte('\'') - } - buf.WriteString(word) - buf.WriteByte('\'') - } -} diff --git a/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote_test.go b/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote_test.go deleted file mode 100644 index a4d2d82..0000000 --- a/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote_test.go +++ /dev/null @@ -1,28 +0,0 @@ -package shellquote - -import ( - "testing" -) - -func TestSimpleJoin(t *testing.T) { - for _, elem := range simpleJoinTest { - output := Join(elem.input...) - if output != elem.output { - t.Errorf("Input %q, got %q, expected %q", elem.input, output, elem.output) - } - } -} - -var simpleJoinTest = []struct { - input []string - output string -}{ - {[]string{"test"}, "test"}, - {[]string{"hello goodbye"}, "'hello goodbye'"}, - {[]string{"hello", "goodbye"}, "hello goodbye"}, - {[]string{"don't you know the dewey decimal system?"}, "'don'\\''t you know the dewey decimal system?'"}, - {[]string{"don't", "you", "know", "the", "dewey", "decimal", "system?"}, "don\\'t you know the dewey decimal system\\?"}, - {[]string{"~user", "u~ser", " ~user", "!~user"}, "\\~user u~ser ' ~user' \\!~user"}, - {[]string{"foo*", "M{ovies,usic}", "ab[cd]", "%3"}, "foo\\* M\\{ovies,usic} ab\\[cd] %3"}, - {[]string{"one", "", "three"}, "one '' three"}, -} diff --git a/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote.go b/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote.go deleted file mode 100644 index ba3a0f2..0000000 --- a/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote.go +++ /dev/null @@ -1,144 +0,0 @@ -package shellquote - -import ( - "bytes" - "errors" - "strings" - "unicode/utf8" -) - -var ( - UnterminatedSingleQuoteError = errors.New("Unterminated single-quoted string") - UnterminatedDoubleQuoteError = errors.New("Unterminated double-quoted string") - UnterminatedEscapeError = errors.New("Unterminated backslash-escape") -) - -var ( - splitChars = " \n\t" - singleChar = '\'' - doubleChar = '"' - escapeChar = '\\' - doubleEscapeChars = "$`\"\n\\" -) - -// Split splits a string according to /bin/sh's word-splitting rules. It -// supports backslash-escapes, single-quotes, and double-quotes. Notably it does -// not support the $'' style of quoting. It also doesn't attempt to perform any -// other sort of expansion, including brace expansion, shell expansion, or -// pathname expansion. -// -// If the given input has an unterminated quoted string or ends in a -// backslash-escape, one of UnterminatedSingleQuoteError, -// UnterminatedDoubleQuoteError, or UnterminatedEscapeError is returned. -func Split(input string) (words []string, err error) { - var buf bytes.Buffer - words = make([]string, 0) - - for len(input) > 0 { - // skip any splitChars at the start - c, l := utf8.DecodeRuneInString(input) - if strings.ContainsRune(splitChars, c) { - input = input[l:] - continue - } - - var word string - word, input, err = splitWord(input, &buf) - if err != nil { - return - } - words = append(words, word) - } - return -} - -func splitWord(input string, buf *bytes.Buffer) (word string, remainder string, err error) { - buf.Reset() - -raw: - { - cur := input - for len(cur) > 0 { - c, l := utf8.DecodeRuneInString(cur) - cur = cur[l:] - if c == singleChar { - buf.WriteString(input[0 : len(input)-len(cur)-l]) - input = cur - goto single - } else if c == doubleChar { - buf.WriteString(input[0 : len(input)-len(cur)-l]) - input = cur - goto double - } else if c == escapeChar { - buf.WriteString(input[0 : len(input)-len(cur)-l]) - input = cur - goto escape - } else if strings.ContainsRune(splitChars, c) { - buf.WriteString(input[0 : len(input)-len(cur)-l]) - return buf.String(), cur, nil - } - } - if len(input) > 0 { - buf.WriteString(input) - input = "" - } - goto done - } - -escape: - { - if len(input) == 0 { - return "", "", UnterminatedEscapeError - } - c, l := utf8.DecodeRuneInString(input) - if c == '\n' { - // a backslash-escaped newline is elided from the output entirely - } else { - buf.WriteString(input[:l]) - } - input = input[l:] - } - goto raw - -single: - { - i := strings.IndexRune(input, singleChar) - if i == -1 { - return "", "", UnterminatedSingleQuoteError - } - buf.WriteString(input[0:i]) - input = input[i+1:] - goto raw - } - -double: - { - cur := input - for len(cur) > 0 { - c, l := utf8.DecodeRuneInString(cur) - cur = cur[l:] - if c == doubleChar { - buf.WriteString(input[0 : len(input)-len(cur)-l]) - input = cur - goto raw - } else if c == escapeChar { - // bash only supports certain escapes in double-quoted strings - c2, l2 := utf8.DecodeRuneInString(cur) - cur = cur[l2:] - if strings.ContainsRune(doubleEscapeChars, c2) { - buf.WriteString(input[0 : len(input)-len(cur)-l-l2]) - if c2 == '\n' { - // newline is special, skip the backslash entirely - } else { - buf.WriteRune(c2) - } - input = cur - } - } - } - return "", "", UnterminatedDoubleQuoteError - } - -done: - return buf.String(), input, nil -} diff --git a/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote_test.go b/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote_test.go deleted file mode 100644 index 1444a9f..0000000 --- a/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote_test.go +++ /dev/null @@ -1,53 +0,0 @@ -package shellquote - -import ( - "reflect" - "testing" -) - -func TestSimpleSplit(t *testing.T) { - for _, elem := range simpleSplitTest { - output, err := Split(elem.input) - if err != nil { - t.Errorf("Input %q, got error %#v", elem.input, err) - } else if !reflect.DeepEqual(output, elem.output) { - t.Errorf("Input %q, got %q, expected %q", elem.input, output, elem.output) - } - } -} - -func TestErrorSplit(t *testing.T) { - for _, elem := range errorSplitTest { - _, err := Split(elem.input) - if err != elem.error { - t.Errorf("Input %q, got error %#v, expected error %#v", elem.input, err, elem.error) - } - } -} - -var simpleSplitTest = []struct { - input string - output []string -}{ - {"hello", []string{"hello"}}, - {"hello goodbye", []string{"hello", "goodbye"}}, - {"hello goodbye", []string{"hello", "goodbye"}}, - {"glob* test?", []string{"glob*", "test?"}}, - {"don\\'t you know the dewey decimal system\\?", []string{"don't", "you", "know", "the", "dewey", "decimal", "system?"}}, - {"'don'\\''t you know the dewey decimal system?'", []string{"don't you know the dewey decimal system?"}}, - {"one '' two", []string{"one", "", "two"}}, - {"text with\\\na backslash-escaped newline", []string{"text", "witha", "backslash-escaped", "newline"}}, - {"text \"with\na\" quoted newline", []string{"text", "with\na", "quoted", "newline"}}, - {"\"quoted\\d\\\\\\\" text with\\\na backslash-escaped newline\"", []string{"quoted\\d\\\" text witha backslash-escaped newline"}}, - {"foo\"bar\"baz", []string{"foobarbaz"}}, -} - -var errorSplitTest = []struct { - input string - error error -}{ - {"don't worry", UnterminatedSingleQuoteError}, - {"'test'\\''ing", UnterminatedSingleQuoteError}, - {"\"foo'bar", UnterminatedDoubleQuoteError}, - {"foo\\", UnterminatedEscapeError}, -} diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/.gitignore b/Godeps/_workspace/src/github.com/kr/binarydist/.gitignore deleted file mode 100644 index 653f160..0000000 --- a/Godeps/_workspace/src/github.com/kr/binarydist/.gitignore +++ /dev/null @@ -1 +0,0 @@ -test.* diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/testdata/sample.new b/Godeps/_workspace/src/github.com/kr/binarydist/testdata/sample.new deleted file mode 100644 index 592cdbe2dff6a7f7f9bcdbd80fdf30fb775d8e13..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 10000 zcmeHMeQZ_b9X@=y>*(kWxTVG-mw38$g7hlQavh!Aj+VWQZdM_)&gw0NN-LN4roEMR zW3gS!!tt~ln`Y^>$;yZencX7JWXuZizM6ROd#U6C{~?&p5ObP`_Bx4X=^A1t0d3Y!rf3aAyp(CmL`aOqU$jtq0 zq3-zf&9F7s@8~ubYwO%z-x80tCEM$-pGLl?HJ$zbw#N-!^B$OMb+LPyd8eKcfESBZ zly59IgKVm`e$mV4XjP1U!CN%a)RQ*g!(S}6Wm9sK?(6sI>4mEGejl&&j_h76)(joB zs)=Ol6!}6!VcBrrYzyrPr!eOb^(!=UY+|wcwzjPIZ25wFHNEi2EZ=k)tJD7F52pRB zYTeh@S5~a^eWdON*&m=2@E2SyjsNcBCWH8gbdF*N@V~A6*0uid)YjsuS{otfS-f2*PAzvi|+y?9e|1LtOvZ|ID+=dp{VcX#Wi?Ibu8y(cO) z{PDgUaz)B|_vLG9zPp7JGtoP@hKO5M=wzw1GyUV!OSIQFL&CP1=&k*lmhV%aDVvFu zjAzhGb|z+~cklsCX2a&2E{fi!)=kY@8`@^nH~eW$=8AVOj^5TS^|7|itxYrOixeZj ziahPd=5VsJkC)f~_G`T6eZ zjIrEq5c|KFJBW8iZ3s^uo|5*q){=NreaV*2_!hkL$MFark%a^1ms*SU8#>#P4ck|I zc?I@0)$0Y^*NA7zKE1f4s;RzpQ)_3*TE^Xumey@;C2Lw*8`ie8HzQ#;wKT7gh~!Mk zHyT>oFw$01p0ft@$*1nWE0zhAS$dSc)2OLxrkI$xe;sqV8~P;`8*&BL7P zueAfpU0yWs)v%?UW#|o|=WGjbra-yF_5hb*X4oD=4>R7mqaf3tw2M{$PCKX`!Jqy@ zn`?Kc*6z`3xgw;lA5)L2N4-04nQJMx)($JD)>dQ?QSLJ3#O*ref+ISO9IaD5tE^;# zdlP0ahZEx!b`8%qXlvP$OtofB63#N^k^)71BXnW^?aX)91iTd+wVI>_ zRhMI3Y0;kbOCZortQWXSi!OXu4-{~~TO6KdPvfkdJtu-8A)@#Et;FBs4BGpNNB7|_ zG>6hw@2{{gs8zFI`uIy?dNGhOmN6{tHiT`0+b6fo28UYUrVUdKyTB|p?zTKMwVVo> zsW8+Nn?*PpPTQ;_!)6_@Rim+4+_zbsZ!@4HHj4=Fw_haU#%Az{ESu51Fjk|sS)4X| z?wqz6%vozkVU!xW71~;~RSKLFP|H$q^^y0lvb+Z$2Lx^f{XK3+l~asHIgJ4{RZe5x zc$6nGpID?^-CU{DBNV zIRDni+H~s**tGjF&6HtA3EsUgLj0byQA!V$;iBWenO%SJ zN(wFSCkHZAFV?Duq54#vBivJPSB}mw5OuCNK2>KE3*L?72k^7Jeu}n7KerSj??h&y z*LPkAZF>$!b5(unq^PR#zvC)j$zi*TOz1OyszInXpQKQdtmZ3(=l!*I@$OfFXqj<|h##O>7y3vg9qT+Cb}9R%!9tu2y9b-=>Ilrvu;z6nq$QK+3_ zzetzFB$Sd==q-7kEUFkM6-bdz6|IHL#W7?e>DAgDs^<_;)fBTt9HJ!dgLap+eOg3@ zth13xZV@RqDrbGbE1R$^ccUG|M5KH2I|aq%?K@KD10v$+*piVK2Vg;LLEa@`&MN>b zw0ok=O5UWLqrgEv)3~<>-KM~eXLc_3N4UHLepjY8pF^xF7%g$gEe|X!4??xFGp~0~ z-uvX{)9t6pzB>PEF~~IRucQNM))T_|m9F=!ge?@ew^U|CJjnSisE`YcNg%QJ39 zTI;nVj8VyKKRN+t={~C@;0lXQMOnxI7lMypPwBY<@0xdXt_bo~N)KuqO3$!;LSSYl z8@7R`_=}8*zqm586Ts^lMn|g~@3k*r9KvDEs|`c6tnz$Gu9qcZ4?FQ{aU+ z%(8C3UF{mn-!*{GPRt_8y))ST{tvM)f7cI0@=?2(RD4|rCt15RJgCk3Jow?=Ra|b8q9sF>~=ils^oSol$l?*f1T;;%?u;pWyg)(fpv>2J%Bw6Ixo(ssO$h#P0 zVv=Lex3*pelOY`;&xy$yASrVV+I$5HAvE~Qu5G#mqyYUiAk1CPBIdZ;0CLKSWKkN5 zx)LNBI6)xrJWF$7QFtF?pOxyN@zl05+t1)Q)C_Az0t6+|Ku{0@@vFb^^fM@4#q&&l z?{*`WF8)Nl=LpU_KJq=!fsN=3KM5(v6c)!XEWLIR1o)$FCyt;LFZcyZ@kT26vUZwc zOw(x~To+e5*Rs1?y7yq&gF=L3B*x0G`r~$>3gwILU*%N7O?&M+-4A%{_USVmG8_sb zGh3g|avz)FvQOH*HgNatbj);Vr!k5}qeGk%?B`^-S$po{QEbn-&y*Gwyu#($d%R!&9pwfbVB>V!iw2SQFxa7M%uEZbbAo$rBY~Jzi|KKyQGG!)z)V5bPZ(xxKZ93qous z4;AK=4E5gO$ui5^ad3xykVfkWbll!Tl$-uU_sC79y<$x=M_~Q|jh16j{2*JDo+EOr z5)HlI9M(!p#M7i15+&Z7BGIs>Ck{aO(GEa<6z%wHS~QhP8@0BAJiCoN==PZ|?a+mO zu4th&QiYkK$=Wnn!P%i)u>9!bO_vO{XyixHRvOV7Gem2IJY$$QsDWJyHCD7-(ZXrb zx({L6FO9M`E!z3Fr$J5D2x?@gMI%3oHtC!!cX$ZIGv$xd4Z21QDV$inb0Zt0?ji*6C|4(;{w4 z)!ZH&P2=tT_+eed^kE$ZFJyLf=uB#y1QR&Gud><~kg!m27rE;p&Zth@X*1B35Z4K@ zL4}=UOVJvcfg0t2mKCNTh>FVE#|0=^ah~=W#0jDrx08f4zmalEK0<2wLMPt34->#? z0d9FF1u$*ib=@M%``$i{6UqbcHUJ_s;qiUK(29m=!gbTid&lP{&n=;U#uKbt@ ze_$@2(8=fv;zvg-@dlWT?7ZPA|r_ zD4ll(x3Am-jC}V%1mAZayX3?n7o7k^l8X)xD$eVaqN@QPOvVrDnMZ)vQLLGZ2i;?) zOX$YXSMC$qrF)Kq7`b|EwdxrU>%)3!k$PiF&<*Q#f)0smbV!@%d{Cd65+82E8z>jR Jc&U;-_-}XSHje-R diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/testdata/sample.old b/Godeps/_workspace/src/github.com/kr/binarydist/testdata/sample.old deleted file mode 100644 index 7bc64dacb0a93810ef51ba9bba355b1755738734..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 11000 zcmeHNZE#f88NQJZ*9l{GL^`oVjh!2t3PTpnk430#G~l(YHHB1Tjol<9vXLchlZ}QN zYcdHf>m^DRJ87FvQynX6Is@t`BhUs(1;1%^rm-K2Ox3HvG=5DZR_*h==iI$_H;JXc zI@4isbIv{YocBEM^M0K7-u0#9$38pNF!H7uhH(nMr{g=y0kr&_g|CHgELK%<`LgdU zTX_{neV?GO{{>yjaqx}Bs+ZMN`zE^ULuI}}boDi$7&GuCHhyBUR8vPv_0@HbykdRt zXJ@F6D_;*A)7_40W3jdk>l)T4Vr{ARhJ};hds4~S<}Q0k%hlI`=|&g3Gxas~41>H_ ztg@oILJzWuX#Gx`rc}}T9jsDJQ%~A}Uw&e-#`;vf>Z|L?m4 zR86K@C*V7DXGj)|>1Bs*3uQRx5cMn4_t?Z@4Q*{%*V*_EZc*|=JM&!A`DT0mul`RiW0yy=_buLrOK{9jYCa=AM^vDNslj??44Pe6CU ze^W#Inpk~9Lu=DbQ_FkaU8?1|m$|JgFVWo6#CucWJNt;T=kq5h?}pa;brd+2yvAjU z{&#MSV^qr)dAZ%t!EtZj8`^HlmG#mJ{* z{EOw$-FM>4bajEO_}~5+eNX3^{{Vy5ER;D*jq}ispR&{lVlW^5DjE0wxcTlJZ)id< zi1imw7**#~F~(=%Th`vzT9#;TC~Mr1Xw-8#9>!N;(Sv9*b1{F_hPG7Gx&@amz`EuJ zHHYis_)b`-CYME<8(QmIHGFAyH+Sm}A_4WgG`8{nNn%N{cOISqG)%t7>U$9ZHJV){~MsnyqM2Ce(?udm4D z+)bIeThv_6D7F?3TMt_gJA0p=W>|Ku8M4x~ro|1ymOal(C(Js_#*XO3Ia+7+EH+X} zu1)H-98T`9G;3Jfpsi&~F;!}7ib&71Y)T-+8(*MqDWHRpD$8E#p-kW!>Lkn>Ohi+g zUOUSMt3lf7ossl_s&)?AfYlO8a)k&o_? zE;Rc*tM^r!BIB#$G!MO)YN~ z>sukHCpHUnH00T=!)LP&*eb4VmT+xW=h_VHh|R*X_v6ozacwhrM3&8HUKlH`Y?h$S z79^C-V9r`I0;AN>t{E5{GAZTiA}@PqCVn#pHI3Emr?1N@$&5u%66aM5??f&g$2h4RG~QL_|Q3K4FY zx)6W-{365o?t?znOO@&&s6J8WFxM1+J4a_2h&rD(iOxC}oJHIZ-R$2o{>nds=d#Os>1*1%DJwRLuMB@q0hLf2BCh@vls&lNLKk4!t?f8vvkv+ zyN(+L8~L`t>KjnYkW&hJyE$TSRwH(=N?4$)3gcqt8tEWlJ64*4G@=3)9%iNUEyOno zDqT~goI;^X;U<)lROtMDgj+-zCzVK%=_sv*%*8QeBI(td9ahgiu&N|xK^!C!d#l+c z?Pdj$A)BsdlABG5aVxzd;N%@N47=J4;zqdp_^pMd740`?%%}V!H!off3t|cKE(uG& z3bKkfjV>`#mssfo;6Yx~*w+T_=D?yOooBlvoZbPyTVky~hFG;=wB$a!A~3Ha2-TJx zdAVjO)xzQXDfHN zEZ18ev3+T+*9~0M62V)RKTJDmfJ5K?b_)0h4cE>A3+Xy;kMdATQ8G4kFXE$-WX0Ed#+_) zSG?-RLg%Az8Q0g^G-fXPd+P>cruv1{rs2`NP1BZSE# zJ)1eMXxOKlO!BCRpN~WXCkTW`cr+K4jNQf9=Sda&U1r%5)6L*0YKGKCLIja0A_#;; zynGi_fbHm2tY->(*J)Vl@F(j%Pov&(nYZ>~qUZ}hDYnx3U`e|NORpJ(0Dh?3=pG!! zFaL^1aWxhElX99;+@{k2To-4iFJO0*bZ^A8ts;bD6vmU^>Pwh`D2^`zji#5uO?%Bc z)eksN{8H6$w3SK@n^i{p!VH&PY4@53us^ZGPS5Ku?KF(O7#(m9vY(UT-rLLjQpD|6 zle2?f)(idIK+#Y6r2qPcnTB(CIn((F)48uKKq1?&(tZ3XN%s!-2B*XQ;PkdZaw<-P zbIPgu3hvpnOxPrVDGWbmr_m3y%4s=;q+mb%D3$|rY32~;^x#2GSKi}1`aX^u>;M~k zX>a^gq&*CFs59n!dorqP|KJ&k;6jxk`fyd8x*?1MB~N1_vEBU!$~rjzHU(q6HqHux*Rpl}R` zA7n%6d0L*T@B zV8jD>|C2uMWUdD=ASXkXqq$Fzd`rL$6+rGsfJFq@J>uXpJER>I()avULb@OKovrw} z$QQf(p@YE~x`m{;VI>EVoH|l<{7u~}m1l}M+f^<=ag88CbVktDA!U_}ZNWVCSj)7C zC#5PbKk$ghw{zXyJPNB(8nIZ%KE--PhsvZnMdJnv_{B#1=@cvy?Ai7Tyd~JJ61Qgt zx)S0#DK@AyQ)~$>PU&$e(6Yi50#WMo?BfI+SqWDA4B{kNO_(WC`bta{gUClnEuSQD z)jdK2CndOiPljOHJX|u{a8~?W@q`10&o(&;kJrOGU#iEN9mg^m5J5>X*1^gHw!~%FQ6|RwpC06&=M*S&hw%0l0{BI9|Fd} zqXw+{BK@FF&h+$isD-7ZPfpD|&&ZrtMgXr$))YNMqjh#Xp%jqJiYK*9X03Y0I|(vI z!9yL5;(v)${zLdV94Co`C;a)ahQ4_jA5|u^&Nk)#KiMVLVZ4&o(R#eA_{~o<*75RO z_yhgaiJ6A;*ZVLZJ3)Uuzk<3kjP`tD6x{J9C7}ixWZO3&HE7Q*0psQwPC=A%@lYTy zvf#U;)tR6Osh|MGQ3MU@UZUx*fUlhEqCr04c_xhZnvdx&^9iG#dr8X;%mmqr`%Y)A z_@u#J{2&%0Ah(%brkWU_!FL&BQifKtBtd{8{@|tz&h3m|=o1OBWYI1qcXol*iD8TN z$Sb#xVHHx^0V4NO8ZY4qOgGvS26UI6u!WPcl|-2RnAyeaOprzOb_g>4;N->`(jIt| z)xsdfE4x!V?LKFdoFfdB>@+u~0_n<~uv7O@5y32qm)R3uYdeeoF#`-RvqJqE(vCV= zUP6~z@B>R;pkcJsd!bpE(VY+f@H^4E(yV4nF_t84_<7xlR$Ems0!kw7o zR}^8PoW6=GCd_K;cl|XgslzQm{w(~zDTEi$@WRSVBYwz9HP%7Clb0h>+wXB*RYd}8 zUzmzj47;>X7hBgpN`8WQm9ZKj><1rFu*F0&TO-e1lvIm4xF|K*qMT?_L^0h7d1wz#V7RmvGxF{@ns%s^Xt< zRQej@suR}ZOUYLrfJj&^%pv^@o%1ws|62?-c^aVc0VeQ1&q4)3McnEgip5~Y_qjqH zhZo9Os%OLbv^MlT$$%i153!&9nj&!U<3$bP@0hjdNI`EN)rZ+I2Tl@UDpW@F{YoM3 zT&H{^&Oi%t5dt{t4Fw0`%{Ag9_(?@M2vbIuq2>1tXX|!uBrkJ(FHf>2!heuL=|*vS yM8kv?8FwfzDRfTkNPXi6aEpw#a~?2^ci#OBJibP-cVmu$|FdIXQ&!8p7bwY#+uf@%%|U^QS(nF! zty}uFWwb@hX``4kJPwsp3=Z2w)ZMu>SDNb+{|p7aqY9CltcROU1U79?W(g^fy{5|D zv3ru{p_bMQs;wFb=bze~d5(c0tNuR_-2Ynf_r{&zLU$i0XNIbXpr8v03=Rwq9$X0y z0nrDT1Rh*FC6y)QyW&>kvWBXLuTr+YMpDcQKvAfd#- z_)3z`#3bWF#sw3Qh!GH*usJX`xpaisyGmugs+uX4apL5ws>a3cD<_wYm zVHDXji77;FRc_YIj%yh9&F=(El?yV3dTXa#vF;&YqQKM_Y+W8eaQ{vQk zMEvJCP34FU7U4W_xaeSy!irgW0!}v~EIF82o*g=nY3)_LLaFnJfI!kgS!1>(fis-g zBsy|C+vGQSx3FC{S?t;`$ToAo6yK9%#kF0}Z3HH+JG5#hZ>R2_4wgx~E(t^=ut^Ih zWU93OOp3n#OKR!$v#oFgX8jV*ysA&%=UlR@yfE#;0gE-f`dOboZtdHB zcj2M_5`j%pvpbnACo)aXd)guz=W4S0++M%#iJ>M}{Jesfb!kW(c;KND^h9Ze{d>=r zqdPk}j3U1$e#}l%{W1Bb(PaO#cS;UsmS31;uq>@!v!hwri0jRijkA`hu-w>n+cH?t zw&+glq9e;)i*!v2Z*iwQ*O~4dH`|cyNZjp*{Ka`&Lv7vePdi_hZFw#8{Em9-3%;eV z{7&ooNapKIb>Oes6xA^CchKyuYUfTbQjo~AF}S(Bx#dQ?sE>YNhMvM4*Eb!n6?tzT zw#=vtQ4lv3F~0HW^&+Rt-0KXkf7VWU{ikiE*wm_Q#hSn^TZ?icj&V z>y3KOAItRCU9-Qr?f-_)^>JY@Jdfu}1Z+@MtkHVdbp8i89kzwe_A5Z7L+uqJR~Z;Q z{{MgCaDwL;M+$>Wf`dB~2Lp>wK7)%BkBCEJgM-5XCy$r~3{%-wzEU$~^J!C?Wpd$y ztgvEvGI%qo5k9R5>qom;bCe%MUw5v|Mj~vZi(G zJPVoHmUo^Urv!G0^l(a7Z7RrN7b=Su{vu*ove@y`j_eH!!d@NY+TXoXX^w`6=oPN4 F1^_f>!+rn& diff --git a/Godeps/_workspace/src/github.com/kr/pretty/.gitignore b/Godeps/_workspace/src/github.com/kr/pretty/.gitignore deleted file mode 100644 index 1f0a99f..0000000 --- a/Godeps/_workspace/src/github.com/kr/pretty/.gitignore +++ /dev/null @@ -1,4 +0,0 @@ -[568].out -_go* -_test* -_obj diff --git a/Godeps/_workspace/src/github.com/kr/text/colwriter/Readme b/Godeps/_workspace/src/github.com/kr/text/colwriter/Readme deleted file mode 100644 index 1c1f4e6..0000000 --- a/Godeps/_workspace/src/github.com/kr/text/colwriter/Readme +++ /dev/null @@ -1,5 +0,0 @@ -Package colwriter provides a write filter that formats -input lines in multiple columns. - -The package is a straightforward translation from -/src/cmd/draw/mc.c in Plan 9 from User Space. diff --git a/Godeps/_workspace/src/github.com/kr/text/colwriter/column.go b/Godeps/_workspace/src/github.com/kr/text/colwriter/column.go deleted file mode 100644 index 7302ce9..0000000 --- a/Godeps/_workspace/src/github.com/kr/text/colwriter/column.go +++ /dev/null @@ -1,147 +0,0 @@ -// Package colwriter provides a write filter that formats -// input lines in multiple columns. -// -// The package is a straightforward translation from -// /src/cmd/draw/mc.c in Plan 9 from User Space. -package colwriter - -import ( - "bytes" - "io" - "unicode/utf8" -) - -const ( - tab = 4 -) - -const ( - // Print each input line ending in a colon ':' separately. - BreakOnColon uint = 1 << iota -) - -// A Writer is a filter that arranges input lines in as many columns as will -// fit in its width. Tab '\t' chars in the input are translated to sequences -// of spaces ending at multiples of 4 positions. -// -// If BreakOnColon is set, each input line ending in a colon ':' is written -// separately. -// -// The Writer assumes that all Unicode code points have the same width; this -// may not be true in some fonts. -type Writer struct { - w io.Writer - buf []byte - width int - flag uint -} - -// NewWriter allocates and initializes a new Writer writing to w. -// Parameter width controls the total number of characters on each line -// across all columns. -func NewWriter(w io.Writer, width int, flag uint) *Writer { - return &Writer{ - w: w, - width: width, - flag: flag, - } -} - -// Write writes p to the writer w. The only errors returned are ones -// encountered while writing to the underlying output stream. -func (w *Writer) Write(p []byte) (n int, err error) { - var linelen int - var lastWasColon bool - for i, c := range p { - w.buf = append(w.buf, c) - linelen++ - if c == '\t' { - w.buf[len(w.buf)-1] = ' ' - for linelen%tab != 0 { - w.buf = append(w.buf, ' ') - linelen++ - } - } - if w.flag&BreakOnColon != 0 && c == ':' { - lastWasColon = true - } else if lastWasColon { - if c == '\n' { - pos := bytes.LastIndex(w.buf[:len(w.buf)-1], []byte{'\n'}) - if pos < 0 { - pos = 0 - } - line := w.buf[pos:] - w.buf = w.buf[:pos] - if err = w.columnate(); err != nil { - if len(line) < i { - return i - len(line), err - } - return 0, err - } - if n, err := w.w.Write(line); err != nil { - if r := len(line) - n; r < i { - return i - r, err - } - return 0, err - } - } - lastWasColon = false - } - if c == '\n' { - linelen = 0 - } - } - return len(p), nil -} - -// Flush should be called after the last call to Write to ensure that any data -// buffered in the Writer is written to output. -func (w *Writer) Flush() error { - return w.columnate() -} - -func (w *Writer) columnate() error { - words := bytes.Split(w.buf, []byte{'\n'}) - w.buf = nil - if len(words[len(words)-1]) == 0 { - words = words[:len(words)-1] - } - maxwidth := 0 - for _, wd := range words { - if n := utf8.RuneCount(wd); n > maxwidth { - maxwidth = n - } - } - maxwidth++ // space char - wordsPerLine := w.width / maxwidth - if wordsPerLine <= 0 { - wordsPerLine = 1 - } - nlines := (len(words) + wordsPerLine - 1) / wordsPerLine - for i := 0; i < nlines; i++ { - col := 0 - endcol := 0 - for j := i; j < len(words); j += nlines { - endcol += maxwidth - _, err := w.w.Write(words[j]) - if err != nil { - return err - } - col += utf8.RuneCount(words[j]) - if j+nlines < len(words) { - for col < endcol { - _, err := w.w.Write([]byte{' '}) - if err != nil { - return err - } - col++ - } - } - } - _, err := w.w.Write([]byte{'\n'}) - if err != nil { - return err - } - } - return nil -} diff --git a/Godeps/_workspace/src/github.com/kr/text/colwriter/column_test.go b/Godeps/_workspace/src/github.com/kr/text/colwriter/column_test.go deleted file mode 100644 index 8d0bf8f..0000000 --- a/Godeps/_workspace/src/github.com/kr/text/colwriter/column_test.go +++ /dev/null @@ -1,90 +0,0 @@ -package colwriter - -import ( - "bytes" - "testing" -) - -var src = ` -.git -.gitignore -.godir -Procfile: -README.md -api.go -apps.go -auth.go -darwin.go -data.go -dyno.go: -env.go -git.go -help.go -hkdist -linux.go -ls.go -main.go -plugin.go -run.go -scale.go -ssh.go -tail.go -term -unix.go -update.go -version.go -windows.go -`[1:] - -var tests = []struct{ - wid int - flag uint - src string - want string -}{ - {80, 0, "", ""}, - {80, 0, src, ` -.git README.md darwin.go git.go ls.go scale.go unix.go -.gitignore api.go data.go help.go main.go ssh.go update.go -.godir apps.go dyno.go: hkdist plugin.go tail.go version.go -Procfile: auth.go env.go linux.go run.go term windows.go -`[1:]}, - {80, BreakOnColon, src, ` -.git .gitignore .godir - -Procfile: -README.md api.go apps.go auth.go darwin.go data.go - -dyno.go: -env.go hkdist main.go scale.go term version.go -git.go linux.go plugin.go ssh.go unix.go windows.go -help.go ls.go run.go tail.go update.go -`[1:]}, - {20, 0, ` -Hello -Γειά σου -안녕 -今日は -`[1:], ` -Hello 안녕 -Γειά σου 今日は -`[1:]}, -} - -func TestWriter(t *testing.T) { - for _, test := range tests { - b := new(bytes.Buffer) - w := NewWriter(b, test.wid, test.flag) - if _, err := w.Write([]byte(test.src)); err != nil { - t.Error(err) - } - if err := w.Flush(); err != nil { - t.Error(err) - } - if g := b.String(); test.want != g { - t.Log("\n" + test.want) - t.Log("\n" + g) - t.Errorf("%q != %q", test.want, g) - } - } -} diff --git a/Godeps/_workspace/src/github.com/kr/text/mc/Readme b/Godeps/_workspace/src/github.com/kr/text/mc/Readme deleted file mode 100644 index 519ddc0..0000000 --- a/Godeps/_workspace/src/github.com/kr/text/mc/Readme +++ /dev/null @@ -1,9 +0,0 @@ -Command mc prints in multiple columns. - - Usage: mc [-] [-N] [file...] - -Mc splits the input into as many columns as will fit in N -print positions. If the output is a tty, the default N is -the number of characters in a terminal line; otherwise the -default N is 80. Under option - each input line ending in -a colon ':' is printed separately. diff --git a/Godeps/_workspace/src/github.com/kr/text/mc/mc.go b/Godeps/_workspace/src/github.com/kr/text/mc/mc.go deleted file mode 100644 index 3f9b6e2..0000000 --- a/Godeps/_workspace/src/github.com/kr/text/mc/mc.go +++ /dev/null @@ -1,62 +0,0 @@ -// Command mc prints in multiple columns. -// -// Usage: mc [-] [-N] [file...] -// -// Mc splits the input into as many columns as will fit in N -// print positions. If the output is a tty, the default N is -// the number of characters in a terminal line; otherwise the -// default N is 80. Under option - each input line ending in -// a colon ':' is printed separately. -package main - -import ( - "github.com/kr/pty" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/kr/text/colwriter" - "io" - "log" - "os" - "strconv" -) - -func main() { - var width int - var flag uint - args := os.Args[1:] - for len(args) > 0 && len(args[0]) > 0 && args[0][0] == '-' { - if len(args[0]) > 1 { - width, _ = strconv.Atoi(args[0][1:]) - } else { - flag |= colwriter.BreakOnColon - } - args = args[1:] - } - if width < 1 { - _, width, _ = pty.Getsize(os.Stdout) - } - if width < 1 { - width = 80 - } - - w := colwriter.NewWriter(os.Stdout, width, flag) - if len(args) > 0 { - for _, s := range args { - if f, err := os.Open(s); err == nil { - copyin(w, f) - f.Close() - } else { - log.Println(err) - } - } - } else { - copyin(w, os.Stdin) - } -} - -func copyin(w *colwriter.Writer, r io.Reader) { - if _, err := io.Copy(w, r); err != nil { - log.Println(err) - } - if err := w.Flush(); err != nil { - log.Println(err) - } -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client.go deleted file mode 100644 index 992ff9d..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client.go +++ /dev/null @@ -1,141 +0,0 @@ -package octokit - -import ( - "io" - "net/http" - "net/url" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -func NewClient(authMethod AuthMethod) *Client { - return NewClientWith(gitHubAPIURL, userAgent, authMethod, nil) -} - -func NewClientWith(baseURL string, userAgent string, authMethod AuthMethod, httpClient *http.Client) *Client { - client, _ := sawyer.NewFromString(baseURL, httpClient) - return &Client{Client: client, UserAgent: userAgent, AuthMethod: authMethod} -} - -type Client struct { - *sawyer.Client - - UserAgent string - AuthMethod AuthMethod - rootRels hypermedia.Relations -} - -func (c *Client) NewRequest(urlStr string) (req *Request, err error) { - req, err = newRequest(c, urlStr) - if err != nil { - return - } - - c.applyRequestHeaders(req) - - return -} - -// a GET request with specific media type set -func (c *Client) getBody(url *url.URL, mediaType string) (patch io.ReadCloser, result *Result) { - result = sendRequest(c, url, func(req *Request) (*Response, error) { - req.Header.Set("Accept", mediaType) - return req.Get(nil) - }) - - if result.Response != nil { - patch = result.Response.Body - } - - return -} - -func (c *Client) head(url *url.URL, output interface{}) (result *Result) { - return sendRequest(c, url, func(req *Request) (*Response, error) { - return req.Head(output) - }) -} - -func (c *Client) get(url *url.URL, output interface{}) (result *Result) { - return sendRequest(c, url, func(req *Request) (*Response, error) { - return req.Get(output) - }) -} - -func (c *Client) post(url *url.URL, input interface{}, output interface{}) (result *Result) { - return sendRequest(c, url, func(req *Request) (*Response, error) { - return req.Post(input, output) - }) -} - -func (c *Client) put(url *url.URL, input interface{}, output interface{}) *Result { - return sendRequest(c, url, func(req *Request) (*Response, error) { - return req.Put(input, output) - }) -} - -func (c *Client) delete(url *url.URL, output interface{}) (result *Result) { - return sendRequest(c, url, func(req *Request) (*Response, error) { - return req.Delete(output) - }) -} - -func (c *Client) patch(url *url.URL, input interface{}, output interface{}) (result *Result) { - return sendRequest(c, url, func(req *Request) (*Response, error) { - return req.Patch(input, output) - }) -} - -func (c *Client) upload(uploadUrl *url.URL, asset io.ReadCloser, contentType string, contentLength int64) (result *Result) { - req, err := c.NewRequest(uploadUrl.String()) - if err != nil { - result = newResult(nil, err) - return - } - - req.Header.Set("Content-Type", contentType) - req.ContentLength = contentLength - - req.Body = asset - sawyerResp := req.Request.Post() - - resp, err := NewResponse(sawyerResp) - return newResult(resp, err) -} - -func (c *Client) applyRequestHeaders(req *Request) { - req.Header.Set("Accept", defaultMediaType) - req.Header.Set("User-Agent", c.UserAgent) - - if c.AuthMethod != nil { - req.Header.Set("Authorization", c.AuthMethod.String()) - } - - if basicAuth, ok := c.AuthMethod.(BasicAuth); ok && basicAuth.OneTimePassword != "" { - req.Header.Set("X-GitHub-OTP", basicAuth.OneTimePassword) - } - - // Go doesn't apply `Host` on the header, instead it consults `Request.Host` - // Populate `Host` if it exists in `Client.Header` - // See Bug https://code.google.com/p/go/issues/detail?id=7682 - host := c.Header.Get("Host") - if host != "" { - req.Request.Host = host - } - - return -} - -func sendRequest(c *Client, url *url.URL, fn func(r *Request) (*Response, error)) (result *Result) { - req, err := c.NewRequest(url.String()) - if err != nil { - result = newResult(nil, err) - return - } - - resp, err := fn(req) - result = newResult(resp, err) - - return -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits_test.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits_test.go deleted file mode 100644 index 3d3d057..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits_test.go +++ /dev/null @@ -1,58 +0,0 @@ -package octokit - -import ( - "io/ioutil" - "net/http" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestCommitsService_One(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/octokit/go-octokit/commits/4351fb69b8d5ed075e9cd844e67ad2114b335c82", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("commit.json")) - }) - - url, err := CommitsURL.Expand(M{ - "owner": "octokit", - "repo": "go-octokit", - "sha": "4351fb69b8d5ed075e9cd844e67ad2114b335c82", - }) - assert.Equal(t, nil, err) - commit, result := client.Commits(url).One() - - assert.T(t, !result.HasError()) - assert.Equal(t, "4351fb69b8d5ed075e9cd844e67ad2114b335c82", commit.Sha) - assert.Equal(t, "https://api.github.com/repos/octokit/go-octokit/commits/4351fb69b8d5ed075e9cd844e67ad2114b335c82", commit.URL) - - files := commit.Files - assert.Equal(t, 35, len(files)) -} - -func TestCommitsService_Patch(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/octokit/go-octokit/commits/b6d21008bf7553a29ad77ee0a8bb3b66e6f11aa2", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - testHeader(t, r, "Accept", patchMediaType) - respondWithJSON(w, loadFixture("commit.patch")) - }) - - url, err := CommitsURL.Expand(M{ - "owner": "octokit", - "repo": "go-octokit", - "sha": "b6d21008bf7553a29ad77ee0a8bb3b66e6f11aa2", - }) - assert.Equal(t, nil, err) - patch, result := client.Commits(url).Patch() - - assert.T(t, !result.HasError()) - content, err := ioutil.ReadAll(patch) - assert.Equal(t, nil, err) - assert.T(t, len(content) > 0) -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis.go deleted file mode 100644 index 36e86aa..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis.go +++ /dev/null @@ -1,25 +0,0 @@ -package octokit - -import ( - "net/url" -) - -var ( - EmojisURL = Hyperlink("/emojis") -) - -// Create a EmojisService with the base url.URL -func (c *Client) Emojis(url *url.URL) (emojis *EmojisService) { - emojis = &EmojisService{client: c, URL: url} - return -} - -type EmojisService struct { - client *Client - URL *url.URL -} - -func (s *EmojisService) All() (emojis map[string]string, result *Result) { - result = s.client.get(s.URL, &emojis) - return -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis_test.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis_test.go deleted file mode 100644 index fafca78..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis_test.go +++ /dev/null @@ -1,28 +0,0 @@ -package octokit - -import ( - "net/http" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestRootEmojisService_All(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/emojis", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("emojis.json")) - }) - - url, err := EmojisURL.Expand(nil) - assert.Equal(t, nil, err) - - emojis, result := client.Emojis(url).All() - assert.T(t, !result.HasError()) - var penguin = "https://github.global.ssl.fastly.net/images/icons/emoji/penguin.png?v5" - var metal = "https://github.global.ssl.fastly.net/images/icons/emoji/metal.png?v5" - assert.Equal(t, penguin, emojis["penguin"]) - assert.Equal(t, metal, emojis["metal"]) -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error_test.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error_test.go deleted file mode 100644 index 79fab72..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error_test.go +++ /dev/null @@ -1,140 +0,0 @@ -package octokit - -import ( - "net/http" - "strings" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestResponseError_empty_body(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/error", func(w http.ResponseWriter, r *http.Request) { - head := w.Header() - head.Set("Content-Type", "application/json") - w.WriteHeader(http.StatusBadRequest) - respondWith(w, "") - }) - - req, _ := client.NewRequest("error") - _, err := req.Get(nil) - assert.Tf(t, strings.Contains(err.Error(), "400 - Problems parsing error message: EOF"), "%s", err.Error()) - - e := err.(*ResponseError) - assert.Equal(t, ErrorBadRequest, e.Type) -} - -func TestResponseError_Error_400(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/error", func(w http.ResponseWriter, r *http.Request) { - head := w.Header() - head.Set("Content-Type", "application/json") - w.WriteHeader(http.StatusBadRequest) - respondWith(w, `{"message":"Problems parsing JSON"}`) - }) - - req, _ := client.NewRequest("error") - _, err := req.Get(nil) - assert.Tf(t, strings.Contains(err.Error(), "400 - Problems parsing JSON"), "%s", err.Error()) - - e := err.(*ResponseError) - assert.Equal(t, ErrorBadRequest, e.Type) -} - -func TestResponseError_Error_401(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/error", func(w http.ResponseWriter, r *http.Request) { - head := w.Header() - head.Set("Content-Type", "application/json") - w.WriteHeader(http.StatusUnauthorized) - respondWith(w, `{"message":"Unauthorized"}`) - }) - - req, _ := client.NewRequest("error") - _, err := req.Get(nil) - assert.Tf(t, strings.Contains(err.Error(), "401 - Unauthorized"), "%s", err.Error()) - - e := err.(*ResponseError) - assert.Equal(t, ErrorUnauthorized, e.Type) - - mux.HandleFunc("/error_2fa", func(w http.ResponseWriter, r *http.Request) { - head := w.Header() - head.Set("Content-Type", "application/json") - head.Set("X-GitHub-OTP", "required; app") - w.WriteHeader(http.StatusUnauthorized) - respondWith(w, `{"message":"Unauthorized"}`) - }) - - req, _ = client.NewRequest("error_2fa") - _, err = req.Get(nil) - assert.Tf(t, strings.Contains(err.Error(), "401 - Unauthorized"), "%s", err.Error()) - - e = err.(*ResponseError) - assert.Equal(t, ErrorOneTimePasswordRequired, e.Type) -} - -func TestResponseError_Error_422_error(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/error", func(w http.ResponseWriter, r *http.Request) { - head := w.Header() - head.Set("Content-Type", "application/json") - w.WriteHeader(422) - respondWith(w, `{"error":"No repository found for hubtopic"}`) - }) - - req, _ := client.NewRequest("error") - _, err := req.Get(nil) - assert.Tf(t, strings.Contains(err.Error(), "Error: No repository found for hubtopic"), "%s", err.Error()) - - e := err.(*ResponseError) - assert.Equal(t, ErrorUnprocessableEntity, e.Type) -} - -func TestResponseError_Error_422_error_summary(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/error", func(w http.ResponseWriter, r *http.Request) { - head := w.Header() - head.Set("Content-Type", "application/json") - w.WriteHeader(422) - respondWith(w, `{"message":"Validation Failed", "errors": [{"resource":"Issue", "field": "title", "code": "missing_field"}]}`) - }) - - req, _ := client.NewRequest("error") - _, err := req.Get(nil) - assert.Tf(t, strings.Contains(err.Error(), "422 - Validation Failed"), "%s", err.Error()) - assert.Tf(t, strings.Contains(err.Error(), "missing_field error caused by title field on Issue resource"), "%s", err.Error()) - - e := err.(*ResponseError) - assert.Equal(t, ErrorUnprocessableEntity, e.Type) -} - -func TestResponseError_Error_415(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/error", func(w http.ResponseWriter, r *http.Request) { - head := w.Header() - head.Set("Content-Type", "application/json") - w.WriteHeader(http.StatusUnsupportedMediaType) - respondWith(w, `{"message":"Unsupported Media Type", "documentation_url":"http://developer.github.com/v3"}`) - }) - - req, _ := client.NewRequest("error") - _, err := req.Get(nil) - assert.Tf(t, strings.Contains(err.Error(), "415 - Unsupported Media Type"), "%s", err.Error()) - assert.Tf(t, strings.Contains(err.Error(), "// See: http://developer.github.com/v3"), "%s", err.Error()) - - e := err.(*ResponseError) - assert.Equal(t, ErrorUnsupportedMediaType, e.Type) -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gists_test.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gists_test.go deleted file mode 100644 index acb5e2e..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gists_test.go +++ /dev/null @@ -1,59 +0,0 @@ -package octokit - -import ( - "io/ioutil" - "net/http" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestGistsService_One(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/gists/a6bea192debdbec0d4ab", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("gist.json")) - }) - - url, _ := GistsURL.Expand(M{"gist_id": "a6bea192debdbec0d4ab"}) - gist, result := client.Gists(url).One() - - assert.T(t, !result.HasError()) - assert.Equal(t, "a6bea192debdbec0d4ab", gist.ID) - assert.Equal(t, 1, len(gist.Files)) - - file := gist.Files["grep_cellar"] - assert.Equal(t, "grep_cellar", file.FileName) - assert.Equal(t, "text/plain", file.Type) - assert.Equal(t, "", file.Language) - assert.Equal(t, "https://gist.githubusercontent.com/jingweno/a6bea192debdbec0d4ab/raw/80757419d2bd4cfddf7c6be24308eca11b3c330e/grep_cellar", file.RawURL) - assert.Equal(t, 8107, file.Size) - assert.Equal(t, false, file.Truncated) -} - -func TestGistsService_Raw(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/gists/a6bea192debdbec0d4ab", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("gist.json")) - }) - - mux.HandleFunc("/jingweno/a6bea192debdbec0d4ab/raw/80757419d2bd4cfddf7c6be24308eca11b3c330e/grep_cellar", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - assert.Equal(t, "gist.githubusercontent.com", r.Host) - testHeader(t, r, "Accept", textMediaType) - respondWith(w, "hello") - }) - - url, _ := GistsURL.Expand(M{"gist_id": "a6bea192debdbec0d4ab"}) - body, result := client.Gists(url).Raw() - - assert.T(t, !result.HasError()) - content, err := ioutil.ReadAll(body) - assert.Equal(t, nil, err) - assert.Equal(t, "hello", string(content)) -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees.go deleted file mode 100644 index 5ef5d97..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees.go +++ /dev/null @@ -1,39 +0,0 @@ -package octokit - -import ( - "net/url" -) - -var GitTreesURL = Hyperlink("repos/{owner}/{repo}/git/trees/{sha}{?recursive}") - -func (c *Client) GitTrees(url *url.URL) (trees *GitTreesService) { - trees = &GitTreesService{client: c, URL: url} - return -} - -type GitTreesService struct { - client *Client - URL *url.URL -} - -// Get a Git Tree -func (c *GitTreesService) One() (tree *GitTree, result *Result) { - result = c.client.get(c.URL, &tree) - return -} - -type GitTree struct { - Sha string `json:"sha,omitempty"` - Tree []GitTreeEntry `json:"tree,omitempty"` - Truncated bool `json:"truncated,omitempty"` - URL string `json:"url,omitempty"` -} - -type GitTreeEntry struct { - Mode string `json:"mode,omitempty"` - Path string `json:"path,omitempty"` - Sha string `json:"sha,omitempty"` - Size int `json:"size,omitempty"` - Type string `json:"type,omitempty"` - URL string `json:"url,omitempty"` -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink.go deleted file mode 100644 index dffff02..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink.go +++ /dev/null @@ -1,17 +0,0 @@ -package octokit - -import ( - "net/url" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -type M map[string]interface{} - -type Hyperlink string - -func (l Hyperlink) Expand(m M) (u *url.URL, err error) { - sawyerHyperlink := hypermedia.Hyperlink(string(l)) - u, err = sawyerHyperlink.Expand(hypermedia.M(m)) - return -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink_test.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink_test.go deleted file mode 100644 index b166dc0..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink_test.go +++ /dev/null @@ -1,23 +0,0 @@ -package octokit - -import ( - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestHyperlink_Expand(t *testing.T) { - link := Hyperlink("https://api.github.com/users/{user}") - url, err := link.Expand(M{"user": "jingweno"}) - assert.Equal(t, nil, err) - assert.Equal(t, "https://api.github.com/users/jingweno", url.String()) - - link = Hyperlink("https://api.github.com/user") - url, err = link.Expand(nil) - assert.Equal(t, nil, err) - assert.Equal(t, "https://api.github.com/user", url.String()) - - url, err = link.Expand(M{}) - assert.Equal(t, nil, err) - assert.Equal(t, "https://api.github.com/user", url.String()) -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues.go deleted file mode 100644 index 77fdc3b..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues.go +++ /dev/null @@ -1,91 +0,0 @@ -package octokit - -import ( - "net/url" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -var ( - RepoIssuesURL = Hyperlink("repos/{owner}/{repo}/issues{/number}") -) - -// Create a IssuesService with the base url.URL -func (c *Client) Issues(url *url.URL) (issues *IssuesService) { - issues = &IssuesService{client: c, URL: url} - return -} - -type IssuesService struct { - client *Client - URL *url.URL -} - -func (i *IssuesService) One() (issue *Issue, result *Result) { - result = i.client.get(i.URL, &issue) - return -} - -func (i *IssuesService) All() (issues []Issue, result *Result) { - result = i.client.get(i.URL, &issues) - return -} - -func (i *IssuesService) Create(params interface{}) (issue *Issue, result *Result) { - result = i.client.post(i.URL, params, &issue) - return -} - -func (i *IssuesService) Update(params interface{}) (issue *Issue, result *Result) { - result = i.client.patch(i.URL, params, &issue) - return -} - -type Issue struct { - *hypermedia.HALResource - - URL string `json:"url,omitempty,omitempty"` - HTMLURL string `json:"html_url,omitempty,omitempty"` - Number int `json:"number,omitempty"` - State string `json:"state,omitempty"` - Title string `json:"title,omitempty"` - Body string `json:"body,omitempty"` - User User `json:"user,omitempty"` - Labels []struct { - URL string `json:"url,omitempty"` - Name string `json:"name,omitempty"` - Color string `json:"color,omitempty"` - } - Assignee User `json:"assignee,omitempty"` - Milestone struct { - URL string `json:"url,omitempty"` - Number int `json:"number,omitempty"` - State string `json:"state,omitempty"` - Title string `json:"title,omitempty"` - Description string `json:"description,omitempty"` - Creator User `json:"creator,omitempty"` - OpenIssues int `json:"open_issues,omitempty"` - ClosedIssues int `json:"closed_issues,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` - DueOn *time.Time `json:"due_on,omitempty"` - } - Comments int `json:"comments,omitempty"` - PullRequest struct { - HTMLURL string `json:"html_url,omitempty"` - DiffURL string `json:"diff_url,omitempty"` - PatchURL string `json:"patch_url,omitempty"` - } `json:"pull_request,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` - ClosedAt *time.Time `json:"closed_at,omitempty"` - UpdatedAt time.Time `json:"updated_at,omitempty"` -} - -type IssueParams struct { - Title string `json:"title,omitempty"` - Body string `json:"body,omitempty"` - Assignee string `json:"assignee,omitempty"` - State string `json:"state,omitempty"` - Milestone uint64 `json:"milestone,omitempty"` - Labels []string `json:"labels,omitempty"` -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues_test.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues_test.go deleted file mode 100644 index 6a92483..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues_test.go +++ /dev/null @@ -1,145 +0,0 @@ -package octokit - -import ( - "net/http" - "testing" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestIssuesService_All(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/octocat/Hello-World/issues", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("issues.json")) - }) - - url, err := RepoIssuesURL.Expand(M{"owner": "octocat", "repo": "Hello-World"}) - assert.Equal(t, nil, err) - - issues, result := client.Issues(url).All() - assert.T(t, !result.HasError()) - assert.Equal(t, 1, len(issues)) - - issue := issues[0] - validateIssue(t, issue) -} - -func TestIssuesService_One(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/octocat/Hello-World/issues/1347", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("issue.json")) - }) - - url, err := RepoIssuesURL.Expand(M{"owner": "octocat", "repo": "Hello-World", "number": 1347}) - assert.Equal(t, nil, err) - - issue, result := client.Issues(url).One() - - assert.T(t, !result.HasError()) - validateIssue(t, *issue) -} - -func TestIssuesService_Create(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/octocat/Hello-World/issues", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "POST") - testBody(t, r, "{\"title\":\"title\",\"body\":\"body\"}\n") - respondWithJSON(w, loadFixture("issue.json")) - }) - - url, err := RepoIssuesURL.Expand(M{"owner": "octocat", "repo": "Hello-World"}) - assert.Equal(t, nil, err) - - params := IssueParams{ - Title: "title", - Body: "body", - } - issue, result := client.Issues(url).Create(params) - - assert.T(t, !result.HasError()) - validateIssue(t, *issue) -} - -func TestIssuesService_Update(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/octocat/Hello-World/issues/1347", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "PATCH") - testBody(t, r, "{\"title\":\"title\",\"body\":\"body\"}\n") - respondWithJSON(w, loadFixture("issue.json")) - }) - - url, err := RepoIssuesURL.Expand(M{"owner": "octocat", "repo": "Hello-World", "number": 1347}) - assert.Equal(t, nil, err) - - params := IssueParams{ - Title: "title", - Body: "body", - } - issue, result := client.Issues(url).Update(params) - - assert.T(t, !result.HasError()) - validateIssue(t, *issue) -} - -func validateIssue(t *testing.T, issue Issue) { - - assert.Equal(t, "https://api.github.com/repos/octocat/Hello-World/issues/1347", issue.URL) - assert.Equal(t, "https://github.com/octocat/Hello-World/issues/1347", issue.HTMLURL) - assert.Equal(t, 1347, issue.Number) - assert.Equal(t, "open", issue.State) - assert.Equal(t, "Found a bug", issue.Title) - assert.Equal(t, "I'm having a problem with this.", issue.Body) - - assert.Equal(t, "octocat", issue.User.Login) - assert.Equal(t, 1, issue.User.ID) - assert.Equal(t, "https://github.com/images/error/octocat_happy.gif", issue.User.AvatarURL) - assert.Equal(t, "somehexcode", issue.User.GravatarID) - assert.Equal(t, "https://api.github.com/users/octocat", issue.User.URL) - - assert.Equal(t, 1, len(issue.Labels)) - assert.Equal(t, "https://api.github.com/repos/octocat/Hello-World/labels/bug", issue.Labels[0].URL) - assert.Equal(t, "bug", issue.Labels[0].Name) - - assert.Equal(t, "octocat", issue.Assignee.Login) - assert.Equal(t, 1, issue.Assignee.ID) - assert.Equal(t, "https://github.com/images/error/octocat_happy.gif", issue.Assignee.AvatarURL) - assert.Equal(t, "somehexcode", issue.Assignee.GravatarID) - assert.Equal(t, "https://api.github.com/users/octocat", issue.Assignee.URL) - - assert.Equal(t, "https://api.github.com/repos/octocat/Hello-World/milestones/1", issue.Milestone.URL) - assert.Equal(t, 1, issue.Milestone.Number) - assert.Equal(t, "open", issue.Milestone.State) - assert.Equal(t, "v1.0", issue.Milestone.Title) - assert.Equal(t, "", issue.Milestone.Description) - - assert.Equal(t, "octocat", issue.Milestone.Creator.Login) - assert.Equal(t, 1, issue.Milestone.Creator.ID) - assert.Equal(t, "https://github.com/images/error/octocat_happy.gif", issue.Milestone.Creator.AvatarURL) - assert.Equal(t, "somehexcode", issue.Milestone.Creator.GravatarID) - assert.Equal(t, "https://api.github.com/users/octocat", issue.Milestone.Creator.URL) - - assert.Equal(t, 4, issue.Milestone.OpenIssues) - assert.Equal(t, 8, issue.Milestone.ClosedIssues) - assert.Equal(t, "2011-04-10 20:09:31 +0000 UTC", issue.Milestone.CreatedAt.String()) - assert.Equal(t, (*time.Time)(nil), issue.Milestone.DueOn) - - assert.Equal(t, 0, issue.Comments) - assert.Equal(t, "https://github.com/octocat/Hello-World/pull/1347", issue.PullRequest.HTMLURL) - assert.Equal(t, "https://github.com/octocat/Hello-World/pull/1347.diff", issue.PullRequest.DiffURL) - assert.Equal(t, "https://github.com/octocat/Hello-World/pull/1347.patch", issue.PullRequest.PatchURL) - - assert.Equal(t, (*time.Time)(nil), issue.ClosedAt) - assert.Equal(t, "2011-04-22 13:33:48 +0000 UTC", issue.CreatedAt.String()) - assert.Equal(t, "2011-04-22 13:33:48 +0000 UTC", issue.UpdatedAt.String()) -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit.go deleted file mode 100644 index 5a4aeac..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit.go +++ /dev/null @@ -1,11 +0,0 @@ -package octokit - -const ( - gitHubAPIURL = "https://api.github.com" - userAgent = "Octokit Go " + version - version = "0.3.0" - defaultMediaType = "application/vnd.github.v3+json;charset=utf-8" - diffMediaType = "application/vnd.github.v3.diff;charset=utf-8" - patchMediaType = "application/vnd.github.v3.patch;charset=utf-8" - textMediaType = "text/plain;charset=utf-8" -) diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/organizations.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/organizations.go deleted file mode 100644 index 4c422d3..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/organizations.go +++ /dev/null @@ -1,12 +0,0 @@ -package octokit - -type Organization struct { - AvatarURL string `json:"avatar_url,omitempty"` - PublicMembersURL string `json:"public_member_url,omitempty"` - MembersURL string `json:"members_url,omitempty"` - EventsURL string `json:"events_url,omitempty"` - ReposURL string `json:"repos_url,omitempty"` - URL string `json:"url,omitempty"` - ID int `json:"id,omitempty"` - Login string `json:"login,omitempty"` -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases_test.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases_test.go deleted file mode 100644 index 0b9748a..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases_test.go +++ /dev/null @@ -1,101 +0,0 @@ -package octokit - -import ( - "net/http" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestReleasesService_All(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/jingweno/gh/releases", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("releases.json")) - }) - - url, err := ReleasesURL.Expand(M{"owner": "jingweno", "repo": "gh"}) - assert.Equal(t, nil, err) - - releases, result := client.Releases(url).All() - assert.T(t, !result.HasError()) - assert.Equal(t, 1, len(releases)) - - firstRelease := releases[0] - assert.Equal(t, 50013, firstRelease.ID) - assert.Equal(t, "v0.23.0", firstRelease.TagName) - assert.Equal(t, "master", firstRelease.TargetCommitish) - assert.Equal(t, "v0.23.0", firstRelease.Name) - assert.T(t, !firstRelease.Draft) - assert.T(t, !firstRelease.Prerelease) - assert.Equal(t, "* Windows works!: https://github.com/jingweno/gh/commit/6cb80cb09fd9f624a64d85438157955751a9ac70", firstRelease.Body) - assert.Equal(t, "https://api.github.com/repos/jingweno/gh/releases/50013", firstRelease.URL) - assert.Equal(t, "https://api.github.com/repos/jingweno/gh/releases/50013/assets", firstRelease.AssetsURL) - assert.Equal(t, "https://uploads.github.com/repos/jingweno/gh/releases/50013/assets{?name}", string(firstRelease.UploadURL)) - assert.Equal(t, "https://github.com/jingweno/gh/releases/v0.23.0", firstRelease.HTMLURL) - assert.Equal(t, "2013-09-23 00:59:10 +0000 UTC", firstRelease.CreatedAt.String()) - assert.Equal(t, "2013-09-23 01:07:56 +0000 UTC", firstRelease.PublishedAt.String()) - - firstReleaseAssets := firstRelease.Assets - assert.Equal(t, 8, len(firstReleaseAssets)) - - firstAsset := firstReleaseAssets[0] - assert.Equal(t, 20428, firstAsset.ID) - assert.Equal(t, "gh_0.23.0-snapshot_amd64.deb", firstAsset.Name) - assert.Equal(t, "gh_0.23.0-snapshot_amd64.deb", firstAsset.Label) - assert.Equal(t, "application/x-deb", firstAsset.ContentType) - assert.Equal(t, "uploaded", firstAsset.State) - assert.Equal(t, 1562984, firstAsset.Size) - assert.Equal(t, 0, firstAsset.DownloadCount) - assert.Equal(t, "https://api.github.com/repos/jingweno/gh/releases/assets/20428", firstAsset.URL) - assert.Equal(t, "2013-09-23 01:05:20 +0000 UTC", firstAsset.CreatedAt.String()) - assert.Equal(t, "2013-09-23 01:07:56 +0000 UTC", firstAsset.UpdatedAt.String()) -} - -func TestCreateRelease(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/octokit/Hello-World/releases", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "POST") - testBody(t, r, "{\"tag_name\":\"v1.0.0\",\"target_commitish\":\"master\"}\n") - respondWithJSON(w, loadFixture("create_release.json")) - }) - - url, err := ReleasesURL.Expand(M{"owner": "octokit", "repo": "Hello-World"}) - assert.Equal(t, nil, err) - - params := Release{ - TagName: "v1.0.0", - TargetCommitish: "master", - } - release, result := client.Releases(url).Create(params) - - assert.T(t, !result.HasError()) - assert.Equal(t, "v1.0.0", release.TagName) -} - -func TestUpdateRelease(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/octokit/Hello-World/releases/123", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "PATCH") - testBody(t, r, "{\"tag_name\":\"v1.0.0\",\"target_commitish\":\"master\"}\n") - respondWithJSON(w, loadFixture("create_release.json")) - }) - - url, err := ReleasesURL.Expand(M{"owner": "octokit", "repo": "Hello-World", "id": "123"}) - assert.Equal(t, nil, err) - - params := Release{ - TagName: "v1.0.0", - TargetCommitish: "master", - } - release, result := client.Releases(url).Update(params) - - assert.T(t, !result.HasError()) - assert.Equal(t, "v1.0.0", release.TagName) -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories.go deleted file mode 100644 index b026441..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories.go +++ /dev/null @@ -1,85 +0,0 @@ -package octokit - -import ( - "net/url" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -var ( - RepositoryURL = Hyperlink("repos/{owner}/{repo}") - ForksURL = Hyperlink("repos/{owner}/{repo}/forks") - UserRepositoriesURL = Hyperlink("user/repos") - OrgRepositoriesURL = Hyperlink("orgs/{org}/repos") -) - -// Create a RepositoriesService with the base url.URL -func (c *Client) Repositories(url *url.URL) (repos *RepositoriesService) { - repos = &RepositoriesService{client: c, URL: url} - return -} - -type RepositoriesService struct { - client *Client - URL *url.URL -} - -func (r *RepositoriesService) One() (repo *Repository, result *Result) { - result = r.client.get(r.URL, &repo) - return -} - -func (r *RepositoriesService) All() (repos []Repository, result *Result) { - result = r.client.get(r.URL, &repos) - return -} - -func (r *RepositoriesService) Create(params interface{}) (repo *Repository, result *Result) { - result = r.client.post(r.URL, params, &repo) - return -} - -type Repository struct { - *hypermedia.HALResource - - ID int `json:"id,omitempty"` - Owner User `json:"owner,omitempty"` - Name string `json:"name,omitempty"` - FullName string `json:"full_name,omitempty"` - Description string `json:"description,omitempty"` - Private bool `json:"private"` - Fork bool `json:"fork,omitempty"` - URL string `json:"url,omitempty"` - HTMLURL string `json:"html_url,omitempty"` - CloneURL string `json:"clone_url,omitempty"` - GitURL string `json:"git_url,omitempty"` - SSHURL string `json:"ssh_url,omitempty"` - SVNURL string `json:"svn_url,omitempty"` - MirrorURL string `json:"mirror_url,omitempty"` - Homepage string `json:"homepage,omitempty"` - Language string `json:"language,omitempty"` - Forks int `json:"forks,omitempty"` - ForksCount int `json:"forks_count,omitempty"` - Watchers int `json:"watchers,omitempty"` - WatchersCount int `json:"watchers_count,omitempty"` - Size int `json:"size,omitempty"` - MasterBranch string `json:"master_branch,omitempty"` - OpenIssues int `json:"open_issues,omitempty"` - PushedAt time.Time `json:"pushed_at,omitempty"` - CreatedAt time.Time `json:"created_at,omitempty"` - UpdatedAt time.Time `json:"updated_at,omitempty"` - Permissions Permissions `json:"permissions,omitempty"` - Organization *Organization `json:"organization,omitempty"` - Parent *Repository `json:"parent,omitempty"` - Source *Repository `json:"source,omitempty"` - HasIssues bool `json:"has_issues,omitempty"` - HasWiki bool `json:"has_wiki,omitempty"` - HasDownloads bool `json:"has_downloads,omitempty"` -} - -type Permissions struct { - Admin bool - Push bool - Pull bool -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories_test.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories_test.go deleted file mode 100644 index b9e3619..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories_test.go +++ /dev/null @@ -1,144 +0,0 @@ -package octokit - -import ( - "encoding/json" - "fmt" - "net/http" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestRepositoresService_One(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/jingweno/octokat", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - respondWithJSON(w, loadFixture("repository.json")) - }) - - url, err := RepositoryURL.Expand(M{"owner": "jingweno", "repo": "octokat"}) - assert.Equal(t, nil, err) - - repo, result := client.Repositories(url).One() - - assert.T(t, !result.HasError()) - assert.Equal(t, 10575811, repo.ID) - assert.Equal(t, "octokat", repo.Name) - assert.Equal(t, "jingweno/octokat", repo.FullName) - assert.T(t, !repo.Private) - assert.T(t, !repo.Fork) - assert.Equal(t, "https://api.github.com/repos/jingweno/octokat", repo.URL) - assert.Equal(t, "https://github.com/jingweno/octokat", repo.HTMLURL) - assert.Equal(t, "https://github.com/jingweno/octokat.git", repo.CloneURL) - assert.Equal(t, "git://github.com/jingweno/octokat.git", repo.GitURL) - assert.Equal(t, "git@github.com:jingweno/octokat.git", repo.SSHURL) - assert.Equal(t, "master", repo.MasterBranch) - assert.T(t, !repo.Permissions.Admin) - assert.T(t, !repo.Permissions.Push) - assert.T(t, repo.Permissions.Pull) -} - -func TestRepositoresService_All(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/orgs/rails/repos", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "GET") - - header := w.Header() - link := fmt.Sprintf(`<%s>; rel="next", <%s>; rel="last"`, testURLOf("organizations/4223/repos?page=2"), testURLOf("organizations/4223/repos?page=3")) - header.Set("Link", link) - - respondWithJSON(w, loadFixture("repositories.json")) - }) - - url, err := OrgRepositoriesURL.Expand(M{"org": "rails"}) - assert.Equal(t, nil, err) - - repos, result := client.Repositories(url).All() - - assert.T(t, !result.HasError()) - assert.Equal(t, 30, len(repos)) - assert.Equal(t, testURLStringOf("organizations/4223/repos?page=2"), string(*result.NextPage)) - assert.Equal(t, testURLStringOf("organizations/4223/repos?page=3"), string(*result.LastPage)) -} - -func TestRepositoresService_Create(t *testing.T) { - setup() - defer tearDown() - - params := Repository{} - params.Name = "Hello-World" - params.Description = "This is your first repo" - params.Homepage = "https://github.com" - params.Private = false - params.HasIssues = true - params.HasWiki = true - params.HasDownloads = true - - mux.HandleFunc("/user/repos", func(w http.ResponseWriter, r *http.Request) { - var repoParams Repository - json.NewDecoder(r.Body).Decode(&repoParams) - assert.Equal(t, params.Name, repoParams.Name) - assert.Equal(t, params.Description, repoParams.Description) - assert.Equal(t, params.Homepage, repoParams.Homepage) - assert.Equal(t, params.Private, repoParams.Private) - assert.Equal(t, params.HasIssues, repoParams.HasIssues) - assert.Equal(t, params.HasWiki, repoParams.HasWiki) - assert.Equal(t, params.HasDownloads, repoParams.HasDownloads) - - testMethod(t, r, "POST") - respondWithJSON(w, loadFixture("create_repository.json")) - }) - - url, err := UserRepositoriesURL.Expand(nil) - assert.Equal(t, nil, err) - - repo, result := client.Repositories(url).Create(params) - - assert.T(t, !result.HasError()) - assert.Equal(t, 1296269, repo.ID) - assert.Equal(t, "Hello-World", repo.Name) - assert.Equal(t, "octocat/Hello-World", repo.FullName) - assert.Equal(t, "This is your first repo", repo.Description) - assert.T(t, !repo.Private) - assert.T(t, repo.Fork) - assert.Equal(t, "https://api.github.com/repos/octocat/Hello-World", repo.URL) - assert.Equal(t, "https://github.com/octocat/Hello-World", repo.HTMLURL) - assert.Equal(t, "https://github.com/octocat/Hello-World.git", repo.CloneURL) - assert.Equal(t, "git://github.com/octocat/Hello-World.git", repo.GitURL) - assert.Equal(t, "git@github.com:octocat/Hello-World.git", repo.SSHURL) - assert.Equal(t, "master", repo.MasterBranch) -} - -func TestRepositoresService_CreateFork(t *testing.T) { - setup() - defer tearDown() - - mux.HandleFunc("/repos/jingweno/octokat/forks", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "POST") - testBody(t, r, "{\"organization\":\"github\"}\n") - respondWithJSON(w, loadFixture("create_repository.json")) - }) - - url, err := ForksURL.Expand(M{"owner": "jingweno", "repo": "octokat"}) - assert.Equal(t, nil, err) - - repo, result := client.Repositories(url).Create(M{"organization": "github"}) - - assert.T(t, !result.HasError()) - assert.Equal(t, 1296269, repo.ID) - assert.Equal(t, "Hello-World", repo.Name) - assert.Equal(t, "octocat/Hello-World", repo.FullName) - assert.Equal(t, "This is your first repo", repo.Description) - assert.T(t, !repo.Private) - assert.T(t, repo.Fork) - assert.Equal(t, "https://api.github.com/repos/octocat/Hello-World", repo.URL) - assert.Equal(t, "https://github.com/octocat/Hello-World", repo.HTMLURL) - assert.Equal(t, "https://github.com/octocat/Hello-World.git", repo.CloneURL) - assert.Equal(t, "git://github.com/octocat/Hello-World.git", repo.GitURL) - assert.Equal(t, "git@github.com:octocat/Hello-World.git", repo.SSHURL) - assert.Equal(t, "master", repo.MasterBranch) -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/response.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/response.go deleted file mode 100644 index 9b99ef0..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/response.go +++ /dev/null @@ -1,31 +0,0 @@ -package octokit - -import ( - "net/http" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype" -) - -type Response struct { - MediaType *mediatype.MediaType - MediaHeader *mediaheader.MediaHeader - *http.Response -} - -func NewResponse(sawyerResp *sawyer.Response) (resp *Response, err error) { - if sawyerResp.IsError() { - err = sawyerResp.ResponseError - return - } - - if sawyerResp.IsApiError() { - err = NewResponseError(sawyerResp) - return - } - - resp = &Response{Response: sawyerResp.Response, MediaType: sawyerResp.MediaType, MediaHeader: sawyerResp.MediaHeader} - - return -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result_test.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result_test.go deleted file mode 100644 index 163f12e..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result_test.go +++ /dev/null @@ -1,19 +0,0 @@ -package octokit - -import ( - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader" -) - -func TestNewResult_Pageable(t *testing.T) { - resp := &Response{MediaHeader: &mediaheader.MediaHeader{Relations: hypermedia.Relations{"next": hypermedia.Hyperlink("/path")}}} - result := newResult(resp, nil) - - assert.Equal(t, "/path", string(*result.NextPage)) - assert.T(t, result.PrevPage == nil) - assert.T(t, result.LastPage == nil) - assert.T(t, result.FirstPage == nil) -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses.go deleted file mode 100644 index 01527d7..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses.go +++ /dev/null @@ -1,41 +0,0 @@ -package octokit - -import ( - "net/url" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -var ( - StatusesURL = Hyperlink("repos/{owner}/{repo}/statuses/{ref}") -) - -// Create a StatusesService with the base url.URL -func (c *Client) Statuses(url *url.URL) (statuses *StatusesService) { - statuses = &StatusesService{client: c, URL: url} - return -} - -type StatusesService struct { - client *Client - URL *url.URL -} - -func (s *StatusesService) All() (statuses []Status, result *Result) { - result = s.client.get(s.URL, &statuses) - return -} - -type Status struct { - *hypermedia.HALResource - - CreatedAt time.Time `json:"created_at,omitempty"` - UpdatedAt time.Time `json:"updated_at,omitempty"` - State string `json:"state,omitempty"` - TargetURL string `json:"target_url,omitempty"` - Description string `json:"description,omitempty"` - ID int `json:"id,omitempty"` - URL string `json:"url,omitempty"` - Creator User `json:"creator,omitempty"` -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads.go deleted file mode 100644 index 62c1b6c..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads.go +++ /dev/null @@ -1,20 +0,0 @@ -package octokit - -import ( - "io" - "net/url" -) - -// Create an UploadsService with the base url.URL -func (c *Client) Uploads(url *url.URL) *UploadsService { - return &UploadsService{client: c, URL: url} -} - -type UploadsService struct { - client *Client - URL *url.URL -} - -func (u *UploadsService) UploadAsset(asset io.ReadCloser, contentType string, contentLength int64) (result *Result) { - return u.client.upload(u.URL, asset, contentType, contentLength) -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads_test.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads_test.go deleted file mode 100644 index 3fbdea5..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads_test.go +++ /dev/null @@ -1,42 +0,0 @@ -package octokit - -import ( - "fmt" - "io/ioutil" - "net/http" - "os" - "testing" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" -) - -func TestUploadsService_UploadAsset(t *testing.T) { - setup() - defer tearDown() - - file, err := ioutil.TempFile("", "octokit-test-upload-") - assert.Equal(t, nil, err) - file.WriteString("this is a test") - - fi, err := file.Stat() - assert.Equal(t, nil, err) - file.Close() - - mux.HandleFunc("/repos/octokit/Hello-World/releases/123/assets", func(w http.ResponseWriter, r *http.Request) { - testMethod(t, r, "POST") - testHeader(t, r, "Content-Type", "text/plain") - assert.Equal(t, fi.Size(), r.ContentLength) - respondWithStatus(w, 201) - }) - - link := Hyperlink("/repos/octokit/Hello-World/releases/123/assets{?name}") - url, err := link.Expand(M{"name": fi.Name()}) - assert.Equal(t, nil, err) - - open, _ := os.Open(file.Name()) - result := client.Uploads(url).UploadAsset(open, "text/plain", fi.Size()) - fmt.Println(result) - assert.T(t, !result.HasError()) - - assert.Equal(t, 201, result.Response.StatusCode) -} diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users.go b/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users.go deleted file mode 100644 index 08341f0..0000000 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users.go +++ /dev/null @@ -1,77 +0,0 @@ -package octokit - -import ( - "net/url" - "time" - - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" -) - -var ( - CurrentUserURL = Hyperlink("user") - UserURL = Hyperlink("users{/user}") -) - -// Create a UsersService with the base url.URL -func (c *Client) Users(url *url.URL) (users *UsersService) { - users = &UsersService{client: c, URL: url} - return -} - -// A service to return user records -type UsersService struct { - client *Client - URL *url.URL -} - -// Get a user based on UserService#URL -func (u *UsersService) One() (user *User, result *Result) { - result = u.client.get(u.URL, &user) - return -} - -// Update a user based on UserService#URL -func (u *UsersService) Update(params interface{}) (user *User, result *Result) { - result = u.client.put(u.URL, params, &user) - return -} - -// Get a list of users based on UserService#URL -func (u *UsersService) All() (users []User, result *Result) { - result = u.client.get(u.URL, &users) - return -} - -type User struct { - *hypermedia.HALResource - - SiteAdmin bool `json:"site_admin,omitempty"` - Login string `json:"login,omitempty"` - ID int `json:"id,omitempty"` - AvatarURL string `json:"avatar_url,omitempty"` - GravatarID string `json:"gravatar_id,omitempty"` - URL string `json:"url,omitempty"` - Name string `json:"name,omitempty"` - Company string `json:"company,omitempty"` - Blog string `json:"blog,omitempty"` - Location string `json:"location,omitempty"` - Email string `json:"email,omitempty"` - Hireable bool `json:"hireable,omitempty"` - Bio string `json:"bio,omitempty"` - PublicRepos int `json:"public_repos,omitempty"` - Followers int `json:"followers,omitempty"` - Following int `json:"following,omitempty"` - HTMLURL string `json:"html_url,omitempty"` - CreatedAt *time.Time `json:"created_at,omitempty"` - UpdatedAt *time.Time `json:"updated_at,omitempty"` - Type string `json:"type,omitempty"` - FollowingURL Hyperlink `json:"following_url,omitempty"` - FollowersURL Hyperlink `json:"followers_url,omitempty"` - GistsURL Hyperlink `json:"gists_url,omitempty"` - StarredURL Hyperlink `json:"starred_url,omitempty"` - SubscriptionsURL Hyperlink `json:"subscriptions_url,omitempty"` - OrganizationsURL Hyperlink `json:"organizations_url,omitempty"` - ReposURL Hyperlink `json:"repos_url,omitempty"` - EventsURL Hyperlink `json:"events_url,omitempty"` - ReceivedEventsURL Hyperlink `json:"received_events_url,omitempty"` -} diff --git a/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/terminal.go b/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/terminal.go deleted file mode 100644 index 965f0cf..0000000 --- a/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/terminal.go +++ /dev/null @@ -1,888 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package terminal - -import ( - "bytes" - "io" - "sync" - "unicode/utf8" -) - -// EscapeCodes contains escape sequences that can be written to the terminal in -// order to achieve different styles of text. -type EscapeCodes struct { - // Foreground colors - Black, Red, Green, Yellow, Blue, Magenta, Cyan, White []byte - - // Reset all attributes - Reset []byte -} - -var vt100EscapeCodes = EscapeCodes{ - Black: []byte{keyEscape, '[', '3', '0', 'm'}, - Red: []byte{keyEscape, '[', '3', '1', 'm'}, - Green: []byte{keyEscape, '[', '3', '2', 'm'}, - Yellow: []byte{keyEscape, '[', '3', '3', 'm'}, - Blue: []byte{keyEscape, '[', '3', '4', 'm'}, - Magenta: []byte{keyEscape, '[', '3', '5', 'm'}, - Cyan: []byte{keyEscape, '[', '3', '6', 'm'}, - White: []byte{keyEscape, '[', '3', '7', 'm'}, - - Reset: []byte{keyEscape, '[', '0', 'm'}, -} - -// Terminal contains the state for running a VT100 terminal that is capable of -// reading lines of input. -type Terminal struct { - // AutoCompleteCallback, if non-null, is called for each keypress with - // the full input line and the current position of the cursor (in - // bytes, as an index into |line|). If it returns ok=false, the key - // press is processed normally. Otherwise it returns a replacement line - // and the new cursor position. - AutoCompleteCallback func(line string, pos int, key rune) (newLine string, newPos int, ok bool) - - // Escape contains a pointer to the escape codes for this terminal. - // It's always a valid pointer, although the escape codes themselves - // may be empty if the terminal doesn't support them. - Escape *EscapeCodes - - // lock protects the terminal and the state in this object from - // concurrent processing of a key press and a Write() call. - lock sync.Mutex - - c io.ReadWriter - prompt []rune - - // line is the current line being entered. - line []rune - // pos is the logical position of the cursor in line - pos int - // echo is true if local echo is enabled - echo bool - // pasteActive is true iff there is a bracketed paste operation in - // progress. - pasteActive bool - - // cursorX contains the current X value of the cursor where the left - // edge is 0. cursorY contains the row number where the first row of - // the current line is 0. - cursorX, cursorY int - // maxLine is the greatest value of cursorY so far. - maxLine int - - termWidth, termHeight int - - // outBuf contains the terminal data to be sent. - outBuf []byte - // remainder contains the remainder of any partial key sequences after - // a read. It aliases into inBuf. - remainder []byte - inBuf [256]byte - - // history contains previously entered commands so that they can be - // accessed with the up and down keys. - history stRingBuffer - // historyIndex stores the currently accessed history entry, where zero - // means the immediately previous entry. - historyIndex int - // When navigating up and down the history it's possible to return to - // the incomplete, initial line. That value is stored in - // historyPending. - historyPending string -} - -// NewTerminal runs a VT100 terminal on the given ReadWriter. If the ReadWriter is -// a local terminal, that terminal must first have been put into raw mode. -// prompt is a string that is written at the start of each input line (i.e. -// "> "). -func NewTerminal(c io.ReadWriter, prompt string) *Terminal { - return &Terminal{ - Escape: &vt100EscapeCodes, - c: c, - prompt: []rune(prompt), - termWidth: 80, - termHeight: 24, - echo: true, - historyIndex: -1, - } -} - -const ( - keyCtrlD = 4 - keyCtrlU = 21 - keyEnter = '\r' - keyEscape = 27 - keyBackspace = 127 - keyUnknown = 0xd800 /* UTF-16 surrogate area */ + iota - keyUp - keyDown - keyLeft - keyRight - keyAltLeft - keyAltRight - keyHome - keyEnd - keyDeleteWord - keyDeleteLine - keyClearScreen - keyPasteStart - keyPasteEnd -) - -var pasteStart = []byte{keyEscape, '[', '2', '0', '0', '~'} -var pasteEnd = []byte{keyEscape, '[', '2', '0', '1', '~'} - -// bytesToKey tries to parse a key sequence from b. If successful, it returns -// the key and the remainder of the input. Otherwise it returns utf8.RuneError. -func bytesToKey(b []byte, pasteActive bool) (rune, []byte) { - if len(b) == 0 { - return utf8.RuneError, nil - } - - if !pasteActive { - switch b[0] { - case 1: // ^A - return keyHome, b[1:] - case 5: // ^E - return keyEnd, b[1:] - case 8: // ^H - return keyBackspace, b[1:] - case 11: // ^K - return keyDeleteLine, b[1:] - case 12: // ^L - return keyClearScreen, b[1:] - case 23: // ^W - return keyDeleteWord, b[1:] - } - } - - if b[0] != keyEscape { - if !utf8.FullRune(b) { - return utf8.RuneError, b - } - r, l := utf8.DecodeRune(b) - return r, b[l:] - } - - if !pasteActive && len(b) >= 3 && b[0] == keyEscape && b[1] == '[' { - switch b[2] { - case 'A': - return keyUp, b[3:] - case 'B': - return keyDown, b[3:] - case 'C': - return keyRight, b[3:] - case 'D': - return keyLeft, b[3:] - case 'H': - return keyHome, b[3:] - case 'F': - return keyEnd, b[3:] - } - } - - if !pasteActive && len(b) >= 6 && b[0] == keyEscape && b[1] == '[' && b[2] == '1' && b[3] == ';' && b[4] == '3' { - switch b[5] { - case 'C': - return keyAltRight, b[6:] - case 'D': - return keyAltLeft, b[6:] - } - } - - if !pasteActive && len(b) >= 6 && bytes.Equal(b[:6], pasteStart) { - return keyPasteStart, b[6:] - } - - if pasteActive && len(b) >= 6 && bytes.Equal(b[:6], pasteEnd) { - return keyPasteEnd, b[6:] - } - - // If we get here then we have a key that we don't recognise, or a - // partial sequence. It's not clear how one should find the end of a - // sequence without knowing them all, but it seems that [a-zA-Z~] only - // appears at the end of a sequence. - for i, c := range b[0:] { - if c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z' || c == '~' { - return keyUnknown, b[i+1:] - } - } - - return utf8.RuneError, b -} - -// queue appends data to the end of t.outBuf -func (t *Terminal) queue(data []rune) { - t.outBuf = append(t.outBuf, []byte(string(data))...) -} - -var eraseUnderCursor = []rune{' ', keyEscape, '[', 'D'} -var space = []rune{' '} - -func isPrintable(key rune) bool { - isInSurrogateArea := key >= 0xd800 && key <= 0xdbff - return key >= 32 && !isInSurrogateArea -} - -// moveCursorToPos appends data to t.outBuf which will move the cursor to the -// given, logical position in the text. -func (t *Terminal) moveCursorToPos(pos int) { - if !t.echo { - return - } - - x := visualLength(t.prompt) + pos - y := x / t.termWidth - x = x % t.termWidth - - up := 0 - if y < t.cursorY { - up = t.cursorY - y - } - - down := 0 - if y > t.cursorY { - down = y - t.cursorY - } - - left := 0 - if x < t.cursorX { - left = t.cursorX - x - } - - right := 0 - if x > t.cursorX { - right = x - t.cursorX - } - - t.cursorX = x - t.cursorY = y - t.move(up, down, left, right) -} - -func (t *Terminal) move(up, down, left, right int) { - movement := make([]rune, 3*(up+down+left+right)) - m := movement - for i := 0; i < up; i++ { - m[0] = keyEscape - m[1] = '[' - m[2] = 'A' - m = m[3:] - } - for i := 0; i < down; i++ { - m[0] = keyEscape - m[1] = '[' - m[2] = 'B' - m = m[3:] - } - for i := 0; i < left; i++ { - m[0] = keyEscape - m[1] = '[' - m[2] = 'D' - m = m[3:] - } - for i := 0; i < right; i++ { - m[0] = keyEscape - m[1] = '[' - m[2] = 'C' - m = m[3:] - } - - t.queue(movement) -} - -func (t *Terminal) clearLineToRight() { - op := []rune{keyEscape, '[', 'K'} - t.queue(op) -} - -const maxLineLength = 4096 - -func (t *Terminal) setLine(newLine []rune, newPos int) { - if t.echo { - t.moveCursorToPos(0) - t.writeLine(newLine) - for i := len(newLine); i < len(t.line); i++ { - t.writeLine(space) - } - t.moveCursorToPos(newPos) - } - t.line = newLine - t.pos = newPos -} - -func (t *Terminal) advanceCursor(places int) { - t.cursorX += places - t.cursorY += t.cursorX / t.termWidth - if t.cursorY > t.maxLine { - t.maxLine = t.cursorY - } - t.cursorX = t.cursorX % t.termWidth - - if places > 0 && t.cursorX == 0 { - // Normally terminals will advance the current position - // when writing a character. But that doesn't happen - // for the last character in a line. However, when - // writing a character (except a new line) that causes - // a line wrap, the position will be advanced two - // places. - // - // So, if we are stopping at the end of a line, we - // need to write a newline so that our cursor can be - // advanced to the next line. - t.outBuf = append(t.outBuf, '\n') - } -} - -func (t *Terminal) eraseNPreviousChars(n int) { - if n == 0 { - return - } - - if t.pos < n { - n = t.pos - } - t.pos -= n - t.moveCursorToPos(t.pos) - - copy(t.line[t.pos:], t.line[n+t.pos:]) - t.line = t.line[:len(t.line)-n] - if t.echo { - t.writeLine(t.line[t.pos:]) - for i := 0; i < n; i++ { - t.queue(space) - } - t.advanceCursor(n) - t.moveCursorToPos(t.pos) - } -} - -// countToLeftWord returns then number of characters from the cursor to the -// start of the previous word. -func (t *Terminal) countToLeftWord() int { - if t.pos == 0 { - return 0 - } - - pos := t.pos - 1 - for pos > 0 { - if t.line[pos] != ' ' { - break - } - pos-- - } - for pos > 0 { - if t.line[pos] == ' ' { - pos++ - break - } - pos-- - } - - return t.pos - pos -} - -// countToRightWord returns then number of characters from the cursor to the -// start of the next word. -func (t *Terminal) countToRightWord() int { - pos := t.pos - for pos < len(t.line) { - if t.line[pos] == ' ' { - break - } - pos++ - } - for pos < len(t.line) { - if t.line[pos] != ' ' { - break - } - pos++ - } - return pos - t.pos -} - -// visualLength returns the number of visible glyphs in s. -func visualLength(runes []rune) int { - inEscapeSeq := false - length := 0 - - for _, r := range runes { - switch { - case inEscapeSeq: - if (r >= 'a' && r <= 'z') || (r >= 'A' && r <= 'Z') { - inEscapeSeq = false - } - case r == '\x1b': - inEscapeSeq = true - default: - length++ - } - } - - return length -} - -// handleKey processes the given key and, optionally, returns a line of text -// that the user has entered. -func (t *Terminal) handleKey(key rune) (line string, ok bool) { - if t.pasteActive && key != keyEnter { - t.addKeyToLine(key) - return - } - - switch key { - case keyBackspace: - if t.pos == 0 { - return - } - t.eraseNPreviousChars(1) - case keyAltLeft: - // move left by a word. - t.pos -= t.countToLeftWord() - t.moveCursorToPos(t.pos) - case keyAltRight: - // move right by a word. - t.pos += t.countToRightWord() - t.moveCursorToPos(t.pos) - case keyLeft: - if t.pos == 0 { - return - } - t.pos-- - t.moveCursorToPos(t.pos) - case keyRight: - if t.pos == len(t.line) { - return - } - t.pos++ - t.moveCursorToPos(t.pos) - case keyHome: - if t.pos == 0 { - return - } - t.pos = 0 - t.moveCursorToPos(t.pos) - case keyEnd: - if t.pos == len(t.line) { - return - } - t.pos = len(t.line) - t.moveCursorToPos(t.pos) - case keyUp: - entry, ok := t.history.NthPreviousEntry(t.historyIndex + 1) - if !ok { - return "", false - } - if t.historyIndex == -1 { - t.historyPending = string(t.line) - } - t.historyIndex++ - runes := []rune(entry) - t.setLine(runes, len(runes)) - case keyDown: - switch t.historyIndex { - case -1: - return - case 0: - runes := []rune(t.historyPending) - t.setLine(runes, len(runes)) - t.historyIndex-- - default: - entry, ok := t.history.NthPreviousEntry(t.historyIndex - 1) - if ok { - t.historyIndex-- - runes := []rune(entry) - t.setLine(runes, len(runes)) - } - } - case keyEnter: - t.moveCursorToPos(len(t.line)) - t.queue([]rune("\r\n")) - line = string(t.line) - ok = true - t.line = t.line[:0] - t.pos = 0 - t.cursorX = 0 - t.cursorY = 0 - t.maxLine = 0 - case keyDeleteWord: - // Delete zero or more spaces and then one or more characters. - t.eraseNPreviousChars(t.countToLeftWord()) - case keyDeleteLine: - // Delete everything from the current cursor position to the - // end of line. - for i := t.pos; i < len(t.line); i++ { - t.queue(space) - t.advanceCursor(1) - } - t.line = t.line[:t.pos] - t.moveCursorToPos(t.pos) - case keyCtrlD: - // Erase the character under the current position. - // The EOF case when the line is empty is handled in - // readLine(). - if t.pos < len(t.line) { - t.pos++ - t.eraseNPreviousChars(1) - } - case keyCtrlU: - t.eraseNPreviousChars(t.pos) - case keyClearScreen: - // Erases the screen and moves the cursor to the home position. - t.queue([]rune("\x1b[2J\x1b[H")) - t.queue(t.prompt) - t.cursorX, t.cursorY = 0, 0 - t.advanceCursor(visualLength(t.prompt)) - t.setLine(t.line, t.pos) - default: - if t.AutoCompleteCallback != nil { - prefix := string(t.line[:t.pos]) - suffix := string(t.line[t.pos:]) - - t.lock.Unlock() - newLine, newPos, completeOk := t.AutoCompleteCallback(prefix+suffix, len(prefix), key) - t.lock.Lock() - - if completeOk { - t.setLine([]rune(newLine), utf8.RuneCount([]byte(newLine)[:newPos])) - return - } - } - if !isPrintable(key) { - return - } - if len(t.line) == maxLineLength { - return - } - t.addKeyToLine(key) - } - return -} - -// addKeyToLine inserts the given key at the current position in the current -// line. -func (t *Terminal) addKeyToLine(key rune) { - if len(t.line) == cap(t.line) { - newLine := make([]rune, len(t.line), 2*(1+len(t.line))) - copy(newLine, t.line) - t.line = newLine - } - t.line = t.line[:len(t.line)+1] - copy(t.line[t.pos+1:], t.line[t.pos:]) - t.line[t.pos] = key - if t.echo { - t.writeLine(t.line[t.pos:]) - } - t.pos++ - t.moveCursorToPos(t.pos) -} - -func (t *Terminal) writeLine(line []rune) { - for len(line) != 0 { - remainingOnLine := t.termWidth - t.cursorX - todo := len(line) - if todo > remainingOnLine { - todo = remainingOnLine - } - t.queue(line[:todo]) - t.advanceCursor(visualLength(line[:todo])) - line = line[todo:] - } -} - -func (t *Terminal) Write(buf []byte) (n int, err error) { - t.lock.Lock() - defer t.lock.Unlock() - - if t.cursorX == 0 && t.cursorY == 0 { - // This is the easy case: there's nothing on the screen that we - // have to move out of the way. - return t.c.Write(buf) - } - - // We have a prompt and possibly user input on the screen. We - // have to clear it first. - t.move(0 /* up */, 0 /* down */, t.cursorX /* left */, 0 /* right */) - t.cursorX = 0 - t.clearLineToRight() - - for t.cursorY > 0 { - t.move(1 /* up */, 0, 0, 0) - t.cursorY-- - t.clearLineToRight() - } - - if _, err = t.c.Write(t.outBuf); err != nil { - return - } - t.outBuf = t.outBuf[:0] - - if n, err = t.c.Write(buf); err != nil { - return - } - - t.writeLine(t.prompt) - if t.echo { - t.writeLine(t.line) - } - - t.moveCursorToPos(t.pos) - - if _, err = t.c.Write(t.outBuf); err != nil { - return - } - t.outBuf = t.outBuf[:0] - return -} - -// ReadPassword temporarily changes the prompt and reads a password, without -// echo, from the terminal. -func (t *Terminal) ReadPassword(prompt string) (line string, err error) { - t.lock.Lock() - defer t.lock.Unlock() - - oldPrompt := t.prompt - t.prompt = []rune(prompt) - t.echo = false - - line, err = t.readLine() - - t.prompt = oldPrompt - t.echo = true - - return -} - -// ReadLine returns a line of input from the terminal. -func (t *Terminal) ReadLine() (line string, err error) { - t.lock.Lock() - defer t.lock.Unlock() - - return t.readLine() -} - -func (t *Terminal) readLine() (line string, err error) { - // t.lock must be held at this point - - if t.cursorX == 0 && t.cursorY == 0 { - t.writeLine(t.prompt) - t.c.Write(t.outBuf) - t.outBuf = t.outBuf[:0] - } - - lineIsPasted := t.pasteActive - - for { - rest := t.remainder - lineOk := false - for !lineOk { - var key rune - key, rest = bytesToKey(rest, t.pasteActive) - if key == utf8.RuneError { - break - } - if !t.pasteActive { - if key == keyCtrlD { - if len(t.line) == 0 { - return "", io.EOF - } - } - if key == keyPasteStart { - t.pasteActive = true - if len(t.line) == 0 { - lineIsPasted = true - } - continue - } - } else if key == keyPasteEnd { - t.pasteActive = false - continue - } - if !t.pasteActive { - lineIsPasted = false - } - line, lineOk = t.handleKey(key) - } - if len(rest) > 0 { - n := copy(t.inBuf[:], rest) - t.remainder = t.inBuf[:n] - } else { - t.remainder = nil - } - t.c.Write(t.outBuf) - t.outBuf = t.outBuf[:0] - if lineOk { - if t.echo { - t.historyIndex = -1 - t.history.Add(line) - } - if lineIsPasted { - err = ErrPasteIndicator - } - return - } - - // t.remainder is a slice at the beginning of t.inBuf - // containing a partial key sequence - readBuf := t.inBuf[len(t.remainder):] - var n int - - t.lock.Unlock() - n, err = t.c.Read(readBuf) - t.lock.Lock() - - if err != nil { - return - } - - t.remainder = t.inBuf[:n+len(t.remainder)] - } - - panic("unreachable") // for Go 1.0. -} - -// SetPrompt sets the prompt to be used when reading subsequent lines. -func (t *Terminal) SetPrompt(prompt string) { - t.lock.Lock() - defer t.lock.Unlock() - - t.prompt = []rune(prompt) -} - -func (t *Terminal) clearAndRepaintLinePlusNPrevious(numPrevLines int) { - // Move cursor to column zero at the start of the line. - t.move(t.cursorY, 0, t.cursorX, 0) - t.cursorX, t.cursorY = 0, 0 - t.clearLineToRight() - for t.cursorY < numPrevLines { - // Move down a line - t.move(0, 1, 0, 0) - t.cursorY++ - t.clearLineToRight() - } - // Move back to beginning. - t.move(t.cursorY, 0, 0, 0) - t.cursorX, t.cursorY = 0, 0 - - t.queue(t.prompt) - t.advanceCursor(visualLength(t.prompt)) - t.writeLine(t.line) - t.moveCursorToPos(t.pos) -} - -func (t *Terminal) SetSize(width, height int) error { - t.lock.Lock() - defer t.lock.Unlock() - - if width == 0 { - width = 1 - } - - oldWidth := t.termWidth - t.termWidth, t.termHeight = width, height - - switch { - case width == oldWidth: - // If the width didn't change then nothing else needs to be - // done. - return nil - case width < oldWidth: - // Some terminals (e.g. xterm) will truncate lines that were - // too long when shinking. Others, (e.g. gnome-terminal) will - // attempt to wrap them. For the former, repainting t.maxLine - // works great, but that behaviour goes badly wrong in the case - // of the latter because they have doubled every full line. - - // We assume that we are working on a terminal that wraps lines - // and adjust the cursor position based on every previous line - // wrapping and turning into two. This causes the prompt on - // xterms to move upwards, which isn't great, but it avoids a - // huge mess with gnome-terminal. - if t.cursorX >= t.termWidth { - t.cursorX = t.termWidth - 1 - } - t.cursorY *= 2 - t.clearAndRepaintLinePlusNPrevious(t.maxLine * 2) - case width > oldWidth: - // If the terminal expands then our position calculations will - // be wrong in the future because we think the cursor is - // |t.pos| chars into the string, but there will be a gap at - // the end of any wrapped line. - // - // But the position will actually be correct until we move, so - // we can move back to the beginning and repaint everything. - t.clearAndRepaintLinePlusNPrevious(t.maxLine) - } - - _, err := t.c.Write(t.outBuf) - t.outBuf = t.outBuf[:0] - return err -} - -type pasteIndicatorError struct{} - -func (pasteIndicatorError) Error() string { - return "terminal: ErrPasteIndicator not correctly handled" -} - -// ErrPasteIndicator may be returned from ReadLine as the error, in addition -// to valid line data. It indicates that bracketed paste mode is enabled and -// that the returned line consists only of pasted data. Programs may wish to -// interpret pasted data more literally than typed data. -var ErrPasteIndicator = pasteIndicatorError{} - -// SetBracketedPasteMode requests that the terminal bracket paste operations -// with markers. Not all terminals support this but, if it is supported, then -// enabling this mode will stop any autocomplete callback from running due to -// pastes. Additionally, any lines that are completely pasted will be returned -// from ReadLine with the error set to ErrPasteIndicator. -func (t *Terminal) SetBracketedPasteMode(on bool) { - if on { - io.WriteString(t.c, "\x1b[?2004h") - } else { - io.WriteString(t.c, "\x1b[?2004l") - } -} - -// stRingBuffer is a ring buffer of strings. -type stRingBuffer struct { - // entries contains max elements. - entries []string - max int - // head contains the index of the element most recently added to the ring. - head int - // size contains the number of elements in the ring. - size int -} - -func (s *stRingBuffer) Add(a string) { - if s.entries == nil { - const defaultNumEntries = 100 - s.entries = make([]string, defaultNumEntries) - s.max = defaultNumEntries - } - - s.head = (s.head + 1) % s.max - s.entries[s.head] = a - if s.size < s.max { - s.size++ - } -} - -// NthPreviousEntry returns the value passed to the nth previous call to Add. -// If n is zero then the immediately prior value is returned, if one, then the -// next most recent, and so on. If such an element doesn't exist then ok is -// false. -func (s *stRingBuffer) NthPreviousEntry(n int) (value string, ok bool) { - if n >= s.size { - return "", false - } - index := s.head - n - if index < 0 { - index += s.max - } - return s.entries[index], true -} diff --git a/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/terminal_test.go b/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/terminal_test.go deleted file mode 100644 index 6579801..0000000 --- a/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/terminal_test.go +++ /dev/null @@ -1,243 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package terminal - -import ( - "io" - "testing" -) - -type MockTerminal struct { - toSend []byte - bytesPerRead int - received []byte -} - -func (c *MockTerminal) Read(data []byte) (n int, err error) { - n = len(data) - if n == 0 { - return - } - if n > len(c.toSend) { - n = len(c.toSend) - } - if n == 0 { - return 0, io.EOF - } - if c.bytesPerRead > 0 && n > c.bytesPerRead { - n = c.bytesPerRead - } - copy(data, c.toSend[:n]) - c.toSend = c.toSend[n:] - return -} - -func (c *MockTerminal) Write(data []byte) (n int, err error) { - c.received = append(c.received, data...) - return len(data), nil -} - -func TestClose(t *testing.T) { - c := &MockTerminal{} - ss := NewTerminal(c, "> ") - line, err := ss.ReadLine() - if line != "" { - t.Errorf("Expected empty line but got: %s", line) - } - if err != io.EOF { - t.Errorf("Error should have been EOF but got: %s", err) - } -} - -var keyPressTests = []struct { - in string - line string - err error - throwAwayLines int -}{ - { - err: io.EOF, - }, - { - in: "\r", - line: "", - }, - { - in: "foo\r", - line: "foo", - }, - { - in: "a\x1b[Cb\r", // right - line: "ab", - }, - { - in: "a\x1b[Db\r", // left - line: "ba", - }, - { - in: "a\177b\r", // backspace - line: "b", - }, - { - in: "\x1b[A\r", // up - }, - { - in: "\x1b[B\r", // down - }, - { - in: "line\x1b[A\x1b[B\r", // up then down - line: "line", - }, - { - in: "line1\rline2\x1b[A\r", // recall previous line. - line: "line1", - throwAwayLines: 1, - }, - { - // recall two previous lines and append. - in: "line1\rline2\rline3\x1b[A\x1b[Axxx\r", - line: "line1xxx", - throwAwayLines: 2, - }, - { - // Ctrl-A to move to beginning of line followed by ^K to kill - // line. - in: "a b \001\013\r", - line: "", - }, - { - // Ctrl-A to move to beginning of line, Ctrl-E to move to end, - // finally ^K to kill nothing. - in: "a b \001\005\013\r", - line: "a b ", - }, - { - in: "\027\r", - line: "", - }, - { - in: "a\027\r", - line: "", - }, - { - in: "a \027\r", - line: "", - }, - { - in: "a b\027\r", - line: "a ", - }, - { - in: "a b \027\r", - line: "a ", - }, - { - in: "one two thr\x1b[D\027\r", - line: "one two r", - }, - { - in: "\013\r", - line: "", - }, - { - in: "a\013\r", - line: "a", - }, - { - in: "ab\x1b[D\013\r", - line: "a", - }, - { - in: "Ξεσκεπάζω\r", - line: "Ξεσκεπάζω", - }, - { - in: "£\r\x1b[A\177\r", // non-ASCII char, enter, up, backspace. - line: "", - throwAwayLines: 1, - }, - { - in: "£\r££\x1b[A\x1b[B\177\r", // non-ASCII char, enter, 2x non-ASCII, up, down, backspace, enter. - line: "£", - throwAwayLines: 1, - }, - { - // Ctrl-D at the end of the line should be ignored. - in: "a\004\r", - line: "a", - }, - { - // a, b, left, Ctrl-D should erase the b. - in: "ab\x1b[D\004\r", - line: "a", - }, - { - // a, b, c, d, left, left, ^U should erase to the beginning of - // the line. - in: "abcd\x1b[D\x1b[D\025\r", - line: "cd", - }, - { - // Bracketed paste mode: control sequences should be returned - // verbatim in paste mode. - in: "abc\x1b[200~de\177f\x1b[201~\177\r", - line: "abcde\177", - }, - { - // Enter in bracketed paste mode should still work. - in: "abc\x1b[200~d\refg\x1b[201~h\r", - line: "efgh", - throwAwayLines: 1, - }, - { - // Lines consisting entirely of pasted data should be indicated as such. - in: "\x1b[200~a\r", - line: "a", - err: ErrPasteIndicator, - }, -} - -func TestKeyPresses(t *testing.T) { - for i, test := range keyPressTests { - for j := 1; j < len(test.in); j++ { - c := &MockTerminal{ - toSend: []byte(test.in), - bytesPerRead: j, - } - ss := NewTerminal(c, "> ") - for k := 0; k < test.throwAwayLines; k++ { - _, err := ss.ReadLine() - if err != nil { - t.Errorf("Throwaway line %d from test %d resulted in error: %s", k, i, err) - } - } - line, err := ss.ReadLine() - if line != test.line { - t.Errorf("Line resulting from test %d (%d bytes per read) was '%s', expected '%s'", i, j, line, test.line) - break - } - if err != test.err { - t.Errorf("Error resulting from test %d (%d bytes per read) was '%v', expected '%v'", i, j, err, test.err) - break - } - } - } -} - -func TestPasswordNotSaved(t *testing.T) { - c := &MockTerminal{ - toSend: []byte("password\r\x1b[A\r"), - bytesPerRead: 1, - } - ss := NewTerminal(c, "> ") - pw, _ := ss.ReadPassword("> ") - if pw != "password" { - t.Fatalf("failed to read password, got %s", pw) - } - line, _ := ss.ReadLine() - if len(line) > 0 { - t.Fatalf("password was saved in history") - } -} diff --git a/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util.go b/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util.go deleted file mode 100644 index 0763c9a..0000000 --- a/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util.go +++ /dev/null @@ -1,128 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build darwin dragonfly freebsd linux,!appengine netbsd openbsd - -// Package terminal provides support functions for dealing with terminals, as -// commonly found on UNIX systems. -// -// Putting a terminal into raw mode is the most common requirement: -// -// oldState, err := terminal.MakeRaw(0) -// if err != nil { -// panic(err) -// } -// defer terminal.Restore(0, oldState) -package terminal - -import ( - "io" - "syscall" - "unsafe" -) - -// State contains the state of a terminal. -type State struct { - termios syscall.Termios -} - -// IsTerminal returns true if the given file descriptor is a terminal. -func IsTerminal(fd int) bool { - var termios syscall.Termios - _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&termios)), 0, 0, 0) - return err == 0 -} - -// MakeRaw put the terminal connected to the given file descriptor into raw -// mode and returns the previous state of the terminal so that it can be -// restored. -func MakeRaw(fd int) (*State, error) { - var oldState State - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&oldState.termios)), 0, 0, 0); err != 0 { - return nil, err - } - - newState := oldState.termios - newState.Iflag &^= syscall.ISTRIP | syscall.INLCR | syscall.ICRNL | syscall.IGNCR | syscall.IXON | syscall.IXOFF - newState.Lflag &^= syscall.ECHO | syscall.ICANON | syscall.ISIG - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&newState)), 0, 0, 0); err != 0 { - return nil, err - } - - return &oldState, nil -} - -// GetState returns the current state of a terminal which may be useful to -// restore the terminal after a signal. -func GetState(fd int) (*State, error) { - var oldState State - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&oldState.termios)), 0, 0, 0); err != 0 { - return nil, err - } - - return &oldState, nil -} - -// Restore restores the terminal connected to the given file descriptor to a -// previous state. -func Restore(fd int, state *State) error { - _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&state.termios)), 0, 0, 0) - return err -} - -// GetSize returns the dimensions of the given terminal. -func GetSize(fd int) (width, height int, err error) { - var dimensions [4]uint16 - - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), uintptr(syscall.TIOCGWINSZ), uintptr(unsafe.Pointer(&dimensions)), 0, 0, 0); err != 0 { - return -1, -1, err - } - return int(dimensions[1]), int(dimensions[0]), nil -} - -// ReadPassword reads a line of input from a terminal without local echo. This -// is commonly used for inputting passwords and other sensitive data. The slice -// returned does not include the \n. -func ReadPassword(fd int) ([]byte, error) { - var oldState syscall.Termios - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlReadTermios, uintptr(unsafe.Pointer(&oldState)), 0, 0, 0); err != 0 { - return nil, err - } - - newState := oldState - newState.Lflag &^= syscall.ECHO - newState.Lflag |= syscall.ICANON | syscall.ISIG - newState.Iflag |= syscall.ICRNL - if _, _, err := syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&newState)), 0, 0, 0); err != 0 { - return nil, err - } - - defer func() { - syscall.Syscall6(syscall.SYS_IOCTL, uintptr(fd), ioctlWriteTermios, uintptr(unsafe.Pointer(&oldState)), 0, 0, 0) - }() - - var buf [16]byte - var ret []byte - for { - n, err := syscall.Read(fd, buf[:]) - if err != nil { - return nil, err - } - if n == 0 { - if len(ret) == 0 { - return nil, io.EOF - } - break - } - if buf[n-1] == '\n' { - n-- - } - ret = append(ret, buf[:n]...) - if n < len(buf) { - break - } - } - - return ret, nil -} diff --git a/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_bsd.go b/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_bsd.go deleted file mode 100644 index 9c1ffd1..0000000 --- a/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_bsd.go +++ /dev/null @@ -1,12 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build darwin dragonfly freebsd netbsd openbsd - -package terminal - -import "syscall" - -const ioctlReadTermios = syscall.TIOCGETA -const ioctlWriteTermios = syscall.TIOCSETA diff --git a/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_linux.go b/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_linux.go deleted file mode 100644 index 5883b22..0000000 --- a/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_linux.go +++ /dev/null @@ -1,11 +0,0 @@ -// Copyright 2013 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -package terminal - -// These constants are declared here, rather than importing -// them from the syscall package as some syscall packages, even -// on linux, for example gccgo, do not declare them. -const ioctlReadTermios = 0x5401 // syscall.TCGETS -const ioctlWriteTermios = 0x5402 // syscall.TCSETS diff --git a/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_windows.go b/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_windows.go deleted file mode 100644 index 2dd6c3d..0000000 --- a/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_windows.go +++ /dev/null @@ -1,174 +0,0 @@ -// Copyright 2011 The Go Authors. All rights reserved. -// Use of this source code is governed by a BSD-style -// license that can be found in the LICENSE file. - -// +build windows - -// Package terminal provides support functions for dealing with terminals, as -// commonly found on UNIX systems. -// -// Putting a terminal into raw mode is the most common requirement: -// -// oldState, err := terminal.MakeRaw(0) -// if err != nil { -// panic(err) -// } -// defer terminal.Restore(0, oldState) -package terminal - -import ( - "io" - "syscall" - "unsafe" -) - -const ( - enableLineInput = 2 - enableEchoInput = 4 - enableProcessedInput = 1 - enableWindowInput = 8 - enableMouseInput = 16 - enableInsertMode = 32 - enableQuickEditMode = 64 - enableExtendedFlags = 128 - enableAutoPosition = 256 - enableProcessedOutput = 1 - enableWrapAtEolOutput = 2 -) - -var kernel32 = syscall.NewLazyDLL("kernel32.dll") - -var ( - procGetConsoleMode = kernel32.NewProc("GetConsoleMode") - procSetConsoleMode = kernel32.NewProc("SetConsoleMode") - procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo") -) - -type ( - short int16 - word uint16 - - coord struct { - x short - y short - } - smallRect struct { - left short - top short - right short - bottom short - } - consoleScreenBufferInfo struct { - size coord - cursorPosition coord - attributes word - window smallRect - maximumWindowSize coord - } -) - -type State struct { - mode uint32 -} - -// IsTerminal returns true if the given file descriptor is a terminal. -func IsTerminal(fd int) bool { - var st uint32 - r, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&st)), 0) - return r != 0 && e == 0 -} - -// MakeRaw put the terminal connected to the given file descriptor into raw -// mode and returns the previous state of the terminal so that it can be -// restored. -func MakeRaw(fd int) (*State, error) { - var st uint32 - _, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&st)), 0) - if e != 0 { - return nil, error(e) - } - st &^= (enableEchoInput | enableProcessedInput | enableLineInput | enableProcessedOutput) - _, _, e = syscall.Syscall(procSetConsoleMode.Addr(), 2, uintptr(fd), uintptr(st), 0) - if e != 0 { - return nil, error(e) - } - return &State{st}, nil -} - -// GetState returns the current state of a terminal which may be useful to -// restore the terminal after a signal. -func GetState(fd int) (*State, error) { - var st uint32 - _, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&st)), 0) - if e != 0 { - return nil, error(e) - } - return &State{st}, nil -} - -// Restore restores the terminal connected to the given file descriptor to a -// previous state. -func Restore(fd int, state *State) error { - _, _, err := syscall.Syscall(procSetConsoleMode.Addr(), 2, uintptr(fd), uintptr(state.mode), 0) - return err -} - -// GetSize returns the dimensions of the given terminal. -func GetSize(fd int) (width, height int, err error) { - var info consoleScreenBufferInfo - _, _, e := syscall.Syscall(procGetConsoleScreenBufferInfo.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&info)), 0) - if e != 0 { - return 0, 0, error(e) - } - return int(info.size.x), int(info.size.y), nil -} - -// ReadPassword reads a line of input from a terminal without local echo. This -// is commonly used for inputting passwords and other sensitive data. The slice -// returned does not include the \n. -func ReadPassword(fd int) ([]byte, error) { - var st uint32 - _, _, e := syscall.Syscall(procGetConsoleMode.Addr(), 2, uintptr(fd), uintptr(unsafe.Pointer(&st)), 0) - if e != 0 { - return nil, error(e) - } - old := st - - st &^= (enableEchoInput) - st |= (enableProcessedInput | enableLineInput | enableProcessedOutput) - _, _, e = syscall.Syscall(procSetConsoleMode.Addr(), 2, uintptr(fd), uintptr(st), 0) - if e != 0 { - return nil, error(e) - } - - defer func() { - syscall.Syscall(procSetConsoleMode.Addr(), 2, uintptr(fd), uintptr(old), 0) - }() - - var buf [16]byte - var ret []byte - for { - n, err := syscall.Read(syscall.Handle(fd), buf[:]) - if err != nil { - return nil, err - } - if n == 0 { - if len(ret) == 0 { - return nil, io.EOF - } - break - } - if buf[n-1] == '\n' { - n-- - } - if n > 0 && buf[n-1] == '\r' { - n-- - } - ret = append(ret, buf[:n]...) - if n < len(buf) { - break - } - } - - return ret, nil -} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/LICENSE b/Godeps/_workspace/src/gopkg.in/yaml.v1/LICENSE deleted file mode 100644 index a68e67f..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/LICENSE +++ /dev/null @@ -1,188 +0,0 @@ - -Copyright (c) 2011-2014 - Canonical Inc. - -This software is licensed under the LGPLv3, included below. - -As a special exception to the GNU Lesser General Public License version 3 -("LGPL3"), the copyright holders of this Library give you permission to -convey to a third party a Combined Work that links statically or dynamically -to this Library without providing any Minimal Corresponding Source or -Minimal Application Code as set out in 4d or providing the installation -information set out in section 4e, provided that you comply with the other -provisions of LGPL3 and provided that you meet, for the Application the -terms and conditions of the license(s) which apply to the Application. - -Except as stated in this special exception, the provisions of LGPL3 will -continue to comply in full to this Library. If you modify this Library, you -may apply this exception to your version of this Library, but you are not -obliged to do so. If you do not wish to do so, delete this exception -statement from your version. This exception does not (and cannot) modify any -license terms which apply to the Application, with which you must still -comply. - - - GNU LESSER GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - - This version of the GNU Lesser General Public License incorporates -the terms and conditions of version 3 of the GNU General Public -License, supplemented by the additional permissions listed below. - - 0. Additional Definitions. - - As used herein, "this License" refers to version 3 of the GNU Lesser -General Public License, and the "GNU GPL" refers to version 3 of the GNU -General Public License. - - "The Library" refers to a covered work governed by this License, -other than an Application or a Combined Work as defined below. - - An "Application" is any work that makes use of an interface provided -by the Library, but which is not otherwise based on the Library. -Defining a subclass of a class defined by the Library is deemed a mode -of using an interface provided by the Library. - - A "Combined Work" is a work produced by combining or linking an -Application with the Library. The particular version of the Library -with which the Combined Work was made is also called the "Linked -Version". - - The "Minimal Corresponding Source" for a Combined Work means the -Corresponding Source for the Combined Work, excluding any source code -for portions of the Combined Work that, considered in isolation, are -based on the Application, and not on the Linked Version. - - The "Corresponding Application Code" for a Combined Work means the -object code and/or source code for the Application, including any data -and utility programs needed for reproducing the Combined Work from the -Application, but excluding the System Libraries of the Combined Work. - - 1. Exception to Section 3 of the GNU GPL. - - You may convey a covered work under sections 3 and 4 of this License -without being bound by section 3 of the GNU GPL. - - 2. Conveying Modified Versions. - - If you modify a copy of the Library, and, in your modifications, a -facility refers to a function or data to be supplied by an Application -that uses the facility (other than as an argument passed when the -facility is invoked), then you may convey a copy of the modified -version: - - a) under this License, provided that you make a good faith effort to - ensure that, in the event an Application does not supply the - function or data, the facility still operates, and performs - whatever part of its purpose remains meaningful, or - - b) under the GNU GPL, with none of the additional permissions of - this License applicable to that copy. - - 3. Object Code Incorporating Material from Library Header Files. - - The object code form of an Application may incorporate material from -a header file that is part of the Library. You may convey such object -code under terms of your choice, provided that, if the incorporated -material is not limited to numerical parameters, data structure -layouts and accessors, or small macros, inline functions and templates -(ten or fewer lines in length), you do both of the following: - - a) Give prominent notice with each copy of the object code that the - Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the object code with a copy of the GNU GPL and this license - document. - - 4. Combined Works. - - You may convey a Combined Work under terms of your choice that, -taken together, effectively do not restrict modification of the -portions of the Library contained in the Combined Work and reverse -engineering for debugging such modifications, if you also do each of -the following: - - a) Give prominent notice with each copy of the Combined Work that - the Library is used in it and that the Library and its use are - covered by this License. - - b) Accompany the Combined Work with a copy of the GNU GPL and this license - document. - - c) For a Combined Work that displays copyright notices during - execution, include the copyright notice for the Library among - these notices, as well as a reference directing the user to the - copies of the GNU GPL and this license document. - - d) Do one of the following: - - 0) Convey the Minimal Corresponding Source under the terms of this - License, and the Corresponding Application Code in a form - suitable for, and under terms that permit, the user to - recombine or relink the Application with a modified version of - the Linked Version to produce a modified Combined Work, in the - manner specified by section 6 of the GNU GPL for conveying - Corresponding Source. - - 1) Use a suitable shared library mechanism for linking with the - Library. A suitable mechanism is one that (a) uses at run time - a copy of the Library already present on the user's computer - system, and (b) will operate properly with a modified version - of the Library that is interface-compatible with the Linked - Version. - - e) Provide Installation Information, but only if you would otherwise - be required to provide such information under section 6 of the - GNU GPL, and only to the extent that such information is - necessary to install and execute a modified version of the - Combined Work produced by recombining or relinking the - Application with a modified version of the Linked Version. (If - you use option 4d0, the Installation Information must accompany - the Minimal Corresponding Source and Corresponding Application - Code. If you use option 4d1, you must provide the Installation - Information in the manner specified by section 6 of the GNU GPL - for conveying Corresponding Source.) - - 5. Combined Libraries. - - You may place library facilities that are a work based on the -Library side by side in a single library together with other library -facilities that are not Applications and are not covered by this -License, and convey such a combined library under terms of your -choice, if you do both of the following: - - a) Accompany the combined library with a copy of the same work based - on the Library, uncombined with any other library facilities, - conveyed under the terms of this License. - - b) Give prominent notice with the combined library that part of it - is a work based on the Library, and explaining where to find the - accompanying uncombined form of the same work. - - 6. Revised Versions of the GNU Lesser General Public License. - - The Free Software Foundation may publish revised and/or new versions -of the GNU Lesser General Public License from time to time. Such new -versions will be similar in spirit to the present version, but may -differ in detail to address new problems or concerns. - - Each version is given a distinguishing version number. If the -Library as you received it specifies that a certain numbered version -of the GNU Lesser General Public License "or any later version" -applies to it, you have the option of following the terms and -conditions either of that published version or of any later version -published by the Free Software Foundation. If the Library as you -received it does not specify a version number of the GNU Lesser -General Public License, you may choose any version of the GNU Lesser -General Public License ever published by the Free Software Foundation. - - If the Library as you received it specifies that a proxy can decide -whether future versions of the GNU Lesser General Public License shall -apply, that proxy's public statement of acceptance of any version is -permanent authorization for you to choose that version for the -Library. diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/LICENSE.libyaml b/Godeps/_workspace/src/gopkg.in/yaml.v1/LICENSE.libyaml deleted file mode 100644 index 8da58fb..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/LICENSE.libyaml +++ /dev/null @@ -1,31 +0,0 @@ -The following files were ported to Go from C files of libyaml, and thus -are still covered by their original copyright and license: - - apic.go - emitterc.go - parserc.go - readerc.go - scannerc.go - writerc.go - yamlh.go - yamlprivateh.go - -Copyright (c) 2006 Kirill Simonov - -Permission is hereby granted, free of charge, to any person obtaining a copy of -this software and associated documentation files (the "Software"), to deal in -the Software without restriction, including without limitation the rights to -use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -of the Software, and to permit persons to whom the Software is furnished to do -so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/README.md b/Godeps/_workspace/src/gopkg.in/yaml.v1/README.md deleted file mode 100644 index af07056..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/README.md +++ /dev/null @@ -1,128 +0,0 @@ -# YAML support for the Go language - -Introduction ------------- - -The yaml package enables Go programs to comfortably encode and decode YAML -values. It was developed within [Canonical](https://www.canonical.com) as -part of the [juju](https://juju.ubuntu.com) project, and is based on a -pure Go port of the well-known [libyaml](http://pyyaml.org/wiki/LibYAML) -C library to parse and generate YAML data quickly and reliably. - -Compatibility -------------- - -The yaml package supports most of YAML 1.1 and 1.2, including support for -anchors, tags, map merging, etc. Multi-document unmarshalling is not yet -implemented, and base-60 floats from YAML 1.1 are purposefully not -supported since they're a poor design and are gone in YAML 1.2. - -Installation and usage ----------------------- - -The import path for the package is *gopkg.in/yaml.v1*. - -To install it, run: - - go get gopkg.in/yaml.v1 - -API documentation ------------------ - -If opened in a browser, the import path itself leads to the API documentation: - - * [https://gopkg.in/yaml.v1](https://gopkg.in/yaml.v1) - -API stability -------------- - -The package API for yaml v1 will remain stable as described in [gopkg.in](https://gopkg.in). - - -License -------- - -The yaml package is licensed under the LGPL with an exception that allows it to be linked statically. Please see the LICENSE file for details. - - -Example -------- - -```Go -package main - -import ( - "fmt" - "log" - - "gopkg.in/yaml.v1" -) - -var data = ` -a: Easy! -b: - c: 2 - d: [3, 4] -` - -type T struct { - A string - B struct{C int; D []int ",flow"} -} - -func main() { - t := T{} - - err := yaml.Unmarshal([]byte(data), &t) - if err != nil { - log.Fatalf("error: %v", err) - } - fmt.Printf("--- t:\n%v\n\n", t) - - d, err := yaml.Marshal(&t) - if err != nil { - log.Fatalf("error: %v", err) - } - fmt.Printf("--- t dump:\n%s\n\n", string(d)) - - m := make(map[interface{}]interface{}) - - err = yaml.Unmarshal([]byte(data), &m) - if err != nil { - log.Fatalf("error: %v", err) - } - fmt.Printf("--- m:\n%v\n\n", m) - - d, err = yaml.Marshal(&m) - if err != nil { - log.Fatalf("error: %v", err) - } - fmt.Printf("--- m dump:\n%s\n\n", string(d)) -} -``` - -This example will generate the following output: - -``` ---- t: -{Easy! {2 [3 4]}} - ---- t dump: -a: Easy! -b: - c: 2 - d: [3, 4] - - ---- m: -map[a:Easy! b:map[c:2 d:[3 4]]] - ---- m dump: -a: Easy! -b: - c: 2 - d: - - 3 - - 4 -``` - diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/apic.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/apic.go deleted file mode 100644 index 95ec014..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/apic.go +++ /dev/null @@ -1,742 +0,0 @@ -package yaml - -import ( - "io" - "os" -) - -func yaml_insert_token(parser *yaml_parser_t, pos int, token *yaml_token_t) { - //fmt.Println("yaml_insert_token", "pos:", pos, "typ:", token.typ, "head:", parser.tokens_head, "len:", len(parser.tokens)) - - // Check if we can move the queue at the beginning of the buffer. - if parser.tokens_head > 0 && len(parser.tokens) == cap(parser.tokens) { - if parser.tokens_head != len(parser.tokens) { - copy(parser.tokens, parser.tokens[parser.tokens_head:]) - } - parser.tokens = parser.tokens[:len(parser.tokens)-parser.tokens_head] - parser.tokens_head = 0 - } - parser.tokens = append(parser.tokens, *token) - if pos < 0 { - return - } - copy(parser.tokens[parser.tokens_head+pos+1:], parser.tokens[parser.tokens_head+pos:]) - parser.tokens[parser.tokens_head+pos] = *token -} - -// Create a new parser object. -func yaml_parser_initialize(parser *yaml_parser_t) bool { - *parser = yaml_parser_t{ - raw_buffer: make([]byte, 0, input_raw_buffer_size), - buffer: make([]byte, 0, input_buffer_size), - } - return true -} - -// Destroy a parser object. -func yaml_parser_delete(parser *yaml_parser_t) { - *parser = yaml_parser_t{} -} - -// String read handler. -func yaml_string_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { - if parser.input_pos == len(parser.input) { - return 0, io.EOF - } - n = copy(buffer, parser.input[parser.input_pos:]) - parser.input_pos += n - return n, nil -} - -// File read handler. -func yaml_file_read_handler(parser *yaml_parser_t, buffer []byte) (n int, err error) { - return parser.input_file.Read(buffer) -} - -// Set a string input. -func yaml_parser_set_input_string(parser *yaml_parser_t, input []byte) { - if parser.read_handler != nil { - panic("must set the input source only once") - } - parser.read_handler = yaml_string_read_handler - parser.input = input - parser.input_pos = 0 -} - -// Set a file input. -func yaml_parser_set_input_file(parser *yaml_parser_t, file *os.File) { - if parser.read_handler != nil { - panic("must set the input source only once") - } - parser.read_handler = yaml_file_read_handler - parser.input_file = file -} - -// Set the source encoding. -func yaml_parser_set_encoding(parser *yaml_parser_t, encoding yaml_encoding_t) { - if parser.encoding != yaml_ANY_ENCODING { - panic("must set the encoding only once") - } - parser.encoding = encoding -} - -// Create a new emitter object. -func yaml_emitter_initialize(emitter *yaml_emitter_t) bool { - *emitter = yaml_emitter_t{ - buffer: make([]byte, output_buffer_size), - raw_buffer: make([]byte, 0, output_raw_buffer_size), - states: make([]yaml_emitter_state_t, 0, initial_stack_size), - events: make([]yaml_event_t, 0, initial_queue_size), - } - return true -} - -// Destroy an emitter object. -func yaml_emitter_delete(emitter *yaml_emitter_t) { - *emitter = yaml_emitter_t{} -} - -// String write handler. -func yaml_string_write_handler(emitter *yaml_emitter_t, buffer []byte) error { - *emitter.output_buffer = append(*emitter.output_buffer, buffer...) - return nil -} - -// File write handler. -func yaml_file_write_handler(emitter *yaml_emitter_t, buffer []byte) error { - _, err := emitter.output_file.Write(buffer) - return err -} - -// Set a string output. -func yaml_emitter_set_output_string(emitter *yaml_emitter_t, output_buffer *[]byte) { - if emitter.write_handler != nil { - panic("must set the output target only once") - } - emitter.write_handler = yaml_string_write_handler - emitter.output_buffer = output_buffer -} - -// Set a file output. -func yaml_emitter_set_output_file(emitter *yaml_emitter_t, file io.Writer) { - if emitter.write_handler != nil { - panic("must set the output target only once") - } - emitter.write_handler = yaml_file_write_handler - emitter.output_file = file -} - -// Set the output encoding. -func yaml_emitter_set_encoding(emitter *yaml_emitter_t, encoding yaml_encoding_t) { - if emitter.encoding != yaml_ANY_ENCODING { - panic("must set the output encoding only once") - } - emitter.encoding = encoding -} - -// Set the canonical output style. -func yaml_emitter_set_canonical(emitter *yaml_emitter_t, canonical bool) { - emitter.canonical = canonical -} - -//// Set the indentation increment. -func yaml_emitter_set_indent(emitter *yaml_emitter_t, indent int) { - if indent < 2 || indent > 9 { - indent = 2 - } - emitter.best_indent = indent -} - -// Set the preferred line width. -func yaml_emitter_set_width(emitter *yaml_emitter_t, width int) { - if width < 0 { - width = -1 - } - emitter.best_width = width -} - -// Set if unescaped non-ASCII characters are allowed. -func yaml_emitter_set_unicode(emitter *yaml_emitter_t, unicode bool) { - emitter.unicode = unicode -} - -// Set the preferred line break character. -func yaml_emitter_set_break(emitter *yaml_emitter_t, line_break yaml_break_t) { - emitter.line_break = line_break -} - -///* -// * Destroy a token object. -// */ -// -//YAML_DECLARE(void) -//yaml_token_delete(yaml_token_t *token) -//{ -// assert(token); // Non-NULL token object expected. -// -// switch (token.type) -// { -// case YAML_TAG_DIRECTIVE_TOKEN: -// yaml_free(token.data.tag_directive.handle); -// yaml_free(token.data.tag_directive.prefix); -// break; -// -// case YAML_ALIAS_TOKEN: -// yaml_free(token.data.alias.value); -// break; -// -// case YAML_ANCHOR_TOKEN: -// yaml_free(token.data.anchor.value); -// break; -// -// case YAML_TAG_TOKEN: -// yaml_free(token.data.tag.handle); -// yaml_free(token.data.tag.suffix); -// break; -// -// case YAML_SCALAR_TOKEN: -// yaml_free(token.data.scalar.value); -// break; -// -// default: -// break; -// } -// -// memset(token, 0, sizeof(yaml_token_t)); -//} -// -///* -// * Check if a string is a valid UTF-8 sequence. -// * -// * Check 'reader.c' for more details on UTF-8 encoding. -// */ -// -//static int -//yaml_check_utf8(yaml_char_t *start, size_t length) -//{ -// yaml_char_t *end = start+length; -// yaml_char_t *pointer = start; -// -// while (pointer < end) { -// unsigned char octet; -// unsigned int width; -// unsigned int value; -// size_t k; -// -// octet = pointer[0]; -// width = (octet & 0x80) == 0x00 ? 1 : -// (octet & 0xE0) == 0xC0 ? 2 : -// (octet & 0xF0) == 0xE0 ? 3 : -// (octet & 0xF8) == 0xF0 ? 4 : 0; -// value = (octet & 0x80) == 0x00 ? octet & 0x7F : -// (octet & 0xE0) == 0xC0 ? octet & 0x1F : -// (octet & 0xF0) == 0xE0 ? octet & 0x0F : -// (octet & 0xF8) == 0xF0 ? octet & 0x07 : 0; -// if (!width) return 0; -// if (pointer+width > end) return 0; -// for (k = 1; k < width; k ++) { -// octet = pointer[k]; -// if ((octet & 0xC0) != 0x80) return 0; -// value = (value << 6) + (octet & 0x3F); -// } -// if (!((width == 1) || -// (width == 2 && value >= 0x80) || -// (width == 3 && value >= 0x800) || -// (width == 4 && value >= 0x10000))) return 0; -// -// pointer += width; -// } -// -// return 1; -//} -// - -// Create STREAM-START. -func yaml_stream_start_event_initialize(event *yaml_event_t, encoding yaml_encoding_t) bool { - *event = yaml_event_t{ - typ: yaml_STREAM_START_EVENT, - encoding: encoding, - } - return true -} - -// Create STREAM-END. -func yaml_stream_end_event_initialize(event *yaml_event_t) bool { - *event = yaml_event_t{ - typ: yaml_STREAM_END_EVENT, - } - return true -} - -// Create DOCUMENT-START. -func yaml_document_start_event_initialize(event *yaml_event_t, version_directive *yaml_version_directive_t, - tag_directives []yaml_tag_directive_t, implicit bool) bool { - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - version_directive: version_directive, - tag_directives: tag_directives, - implicit: implicit, - } - return true -} - -// Create DOCUMENT-END. -func yaml_document_end_event_initialize(event *yaml_event_t, implicit bool) bool { - *event = yaml_event_t{ - typ: yaml_DOCUMENT_END_EVENT, - implicit: implicit, - } - return true -} - -///* -// * Create ALIAS. -// */ -// -//YAML_DECLARE(int) -//yaml_alias_event_initialize(event *yaml_event_t, anchor *yaml_char_t) -//{ -// mark yaml_mark_t = { 0, 0, 0 } -// anchor_copy *yaml_char_t = NULL -// -// assert(event) // Non-NULL event object is expected. -// assert(anchor) // Non-NULL anchor is expected. -// -// if (!yaml_check_utf8(anchor, strlen((char *)anchor))) return 0 -// -// anchor_copy = yaml_strdup(anchor) -// if (!anchor_copy) -// return 0 -// -// ALIAS_EVENT_INIT(*event, anchor_copy, mark, mark) -// -// return 1 -//} - -// Create SCALAR. -func yaml_scalar_event_initialize(event *yaml_event_t, anchor, tag, value []byte, plain_implicit, quoted_implicit bool, style yaml_scalar_style_t) bool { - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - anchor: anchor, - tag: tag, - value: value, - implicit: plain_implicit, - quoted_implicit: quoted_implicit, - style: yaml_style_t(style), - } - return true -} - -// Create SEQUENCE-START. -func yaml_sequence_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_sequence_style_t) bool { - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(style), - } - return true -} - -// Create SEQUENCE-END. -func yaml_sequence_end_event_initialize(event *yaml_event_t) bool { - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - } - return true -} - -// Create MAPPING-START. -func yaml_mapping_start_event_initialize(event *yaml_event_t, anchor, tag []byte, implicit bool, style yaml_mapping_style_t) bool { - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(style), - } - return true -} - -// Create MAPPING-END. -func yaml_mapping_end_event_initialize(event *yaml_event_t) bool { - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - } - return true -} - -// Destroy an event object. -func yaml_event_delete(event *yaml_event_t) { - *event = yaml_event_t{} -} - -///* -// * Create a document object. -// */ -// -//YAML_DECLARE(int) -//yaml_document_initialize(document *yaml_document_t, -// version_directive *yaml_version_directive_t, -// tag_directives_start *yaml_tag_directive_t, -// tag_directives_end *yaml_tag_directive_t, -// start_implicit int, end_implicit int) -//{ -// struct { -// error yaml_error_type_t -// } context -// struct { -// start *yaml_node_t -// end *yaml_node_t -// top *yaml_node_t -// } nodes = { NULL, NULL, NULL } -// version_directive_copy *yaml_version_directive_t = NULL -// struct { -// start *yaml_tag_directive_t -// end *yaml_tag_directive_t -// top *yaml_tag_directive_t -// } tag_directives_copy = { NULL, NULL, NULL } -// value yaml_tag_directive_t = { NULL, NULL } -// mark yaml_mark_t = { 0, 0, 0 } -// -// assert(document) // Non-NULL document object is expected. -// assert((tag_directives_start && tag_directives_end) || -// (tag_directives_start == tag_directives_end)) -// // Valid tag directives are expected. -// -// if (!STACK_INIT(&context, nodes, INITIAL_STACK_SIZE)) goto error -// -// if (version_directive) { -// version_directive_copy = yaml_malloc(sizeof(yaml_version_directive_t)) -// if (!version_directive_copy) goto error -// version_directive_copy.major = version_directive.major -// version_directive_copy.minor = version_directive.minor -// } -// -// if (tag_directives_start != tag_directives_end) { -// tag_directive *yaml_tag_directive_t -// if (!STACK_INIT(&context, tag_directives_copy, INITIAL_STACK_SIZE)) -// goto error -// for (tag_directive = tag_directives_start -// tag_directive != tag_directives_end; tag_directive ++) { -// assert(tag_directive.handle) -// assert(tag_directive.prefix) -// if (!yaml_check_utf8(tag_directive.handle, -// strlen((char *)tag_directive.handle))) -// goto error -// if (!yaml_check_utf8(tag_directive.prefix, -// strlen((char *)tag_directive.prefix))) -// goto error -// value.handle = yaml_strdup(tag_directive.handle) -// value.prefix = yaml_strdup(tag_directive.prefix) -// if (!value.handle || !value.prefix) goto error -// if (!PUSH(&context, tag_directives_copy, value)) -// goto error -// value.handle = NULL -// value.prefix = NULL -// } -// } -// -// DOCUMENT_INIT(*document, nodes.start, nodes.end, version_directive_copy, -// tag_directives_copy.start, tag_directives_copy.top, -// start_implicit, end_implicit, mark, mark) -// -// return 1 -// -//error: -// STACK_DEL(&context, nodes) -// yaml_free(version_directive_copy) -// while (!STACK_EMPTY(&context, tag_directives_copy)) { -// value yaml_tag_directive_t = POP(&context, tag_directives_copy) -// yaml_free(value.handle) -// yaml_free(value.prefix) -// } -// STACK_DEL(&context, tag_directives_copy) -// yaml_free(value.handle) -// yaml_free(value.prefix) -// -// return 0 -//} -// -///* -// * Destroy a document object. -// */ -// -//YAML_DECLARE(void) -//yaml_document_delete(document *yaml_document_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// tag_directive *yaml_tag_directive_t -// -// context.error = YAML_NO_ERROR // Eliminate a compliler warning. -// -// assert(document) // Non-NULL document object is expected. -// -// while (!STACK_EMPTY(&context, document.nodes)) { -// node yaml_node_t = POP(&context, document.nodes) -// yaml_free(node.tag) -// switch (node.type) { -// case YAML_SCALAR_NODE: -// yaml_free(node.data.scalar.value) -// break -// case YAML_SEQUENCE_NODE: -// STACK_DEL(&context, node.data.sequence.items) -// break -// case YAML_MAPPING_NODE: -// STACK_DEL(&context, node.data.mapping.pairs) -// break -// default: -// assert(0) // Should not happen. -// } -// } -// STACK_DEL(&context, document.nodes) -// -// yaml_free(document.version_directive) -// for (tag_directive = document.tag_directives.start -// tag_directive != document.tag_directives.end -// tag_directive++) { -// yaml_free(tag_directive.handle) -// yaml_free(tag_directive.prefix) -// } -// yaml_free(document.tag_directives.start) -// -// memset(document, 0, sizeof(yaml_document_t)) -//} -// -///** -// * Get a document node. -// */ -// -//YAML_DECLARE(yaml_node_t *) -//yaml_document_get_node(document *yaml_document_t, index int) -//{ -// assert(document) // Non-NULL document object is expected. -// -// if (index > 0 && document.nodes.start + index <= document.nodes.top) { -// return document.nodes.start + index - 1 -// } -// return NULL -//} -// -///** -// * Get the root object. -// */ -// -//YAML_DECLARE(yaml_node_t *) -//yaml_document_get_root_node(document *yaml_document_t) -//{ -// assert(document) // Non-NULL document object is expected. -// -// if (document.nodes.top != document.nodes.start) { -// return document.nodes.start -// } -// return NULL -//} -// -///* -// * Add a scalar node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_scalar(document *yaml_document_t, -// tag *yaml_char_t, value *yaml_char_t, length int, -// style yaml_scalar_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// value_copy *yaml_char_t = NULL -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// assert(value) // Non-NULL value is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_SCALAR_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (length < 0) { -// length = strlen((char *)value) -// } -// -// if (!yaml_check_utf8(value, length)) goto error -// value_copy = yaml_malloc(length+1) -// if (!value_copy) goto error -// memcpy(value_copy, value, length) -// value_copy[length] = '\0' -// -// SCALAR_NODE_INIT(node, tag_copy, value_copy, length, style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// yaml_free(tag_copy) -// yaml_free(value_copy) -// -// return 0 -//} -// -///* -// * Add a sequence node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_sequence(document *yaml_document_t, -// tag *yaml_char_t, style yaml_sequence_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// struct { -// start *yaml_node_item_t -// end *yaml_node_item_t -// top *yaml_node_item_t -// } items = { NULL, NULL, NULL } -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_SEQUENCE_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (!STACK_INIT(&context, items, INITIAL_STACK_SIZE)) goto error -// -// SEQUENCE_NODE_INIT(node, tag_copy, items.start, items.end, -// style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// STACK_DEL(&context, items) -// yaml_free(tag_copy) -// -// return 0 -//} -// -///* -// * Add a mapping node to a document. -// */ -// -//YAML_DECLARE(int) -//yaml_document_add_mapping(document *yaml_document_t, -// tag *yaml_char_t, style yaml_mapping_style_t) -//{ -// struct { -// error yaml_error_type_t -// } context -// mark yaml_mark_t = { 0, 0, 0 } -// tag_copy *yaml_char_t = NULL -// struct { -// start *yaml_node_pair_t -// end *yaml_node_pair_t -// top *yaml_node_pair_t -// } pairs = { NULL, NULL, NULL } -// node yaml_node_t -// -// assert(document) // Non-NULL document object is expected. -// -// if (!tag) { -// tag = (yaml_char_t *)YAML_DEFAULT_MAPPING_TAG -// } -// -// if (!yaml_check_utf8(tag, strlen((char *)tag))) goto error -// tag_copy = yaml_strdup(tag) -// if (!tag_copy) goto error -// -// if (!STACK_INIT(&context, pairs, INITIAL_STACK_SIZE)) goto error -// -// MAPPING_NODE_INIT(node, tag_copy, pairs.start, pairs.end, -// style, mark, mark) -// if (!PUSH(&context, document.nodes, node)) goto error -// -// return document.nodes.top - document.nodes.start -// -//error: -// STACK_DEL(&context, pairs) -// yaml_free(tag_copy) -// -// return 0 -//} -// -///* -// * Append an item to a sequence node. -// */ -// -//YAML_DECLARE(int) -//yaml_document_append_sequence_item(document *yaml_document_t, -// sequence int, item int) -//{ -// struct { -// error yaml_error_type_t -// } context -// -// assert(document) // Non-NULL document is required. -// assert(sequence > 0 -// && document.nodes.start + sequence <= document.nodes.top) -// // Valid sequence id is required. -// assert(document.nodes.start[sequence-1].type == YAML_SEQUENCE_NODE) -// // A sequence node is required. -// assert(item > 0 && document.nodes.start + item <= document.nodes.top) -// // Valid item id is required. -// -// if (!PUSH(&context, -// document.nodes.start[sequence-1].data.sequence.items, item)) -// return 0 -// -// return 1 -//} -// -///* -// * Append a pair of a key and a value to a mapping node. -// */ -// -//YAML_DECLARE(int) -//yaml_document_append_mapping_pair(document *yaml_document_t, -// mapping int, key int, value int) -//{ -// struct { -// error yaml_error_type_t -// } context -// -// pair yaml_node_pair_t -// -// assert(document) // Non-NULL document is required. -// assert(mapping > 0 -// && document.nodes.start + mapping <= document.nodes.top) -// // Valid mapping id is required. -// assert(document.nodes.start[mapping-1].type == YAML_MAPPING_NODE) -// // A mapping node is required. -// assert(key > 0 && document.nodes.start + key <= document.nodes.top) -// // Valid key id is required. -// assert(value > 0 && document.nodes.start + value <= document.nodes.top) -// // Valid value id is required. -// -// pair.key = key -// pair.value = value -// -// if (!PUSH(&context, -// document.nodes.start[mapping-1].data.mapping.pairs, pair)) -// return 0 -// -// return 1 -//} -// -// diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/decode.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/decode.go deleted file mode 100644 index a098626..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/decode.go +++ /dev/null @@ -1,566 +0,0 @@ -package yaml - -import ( - "encoding/base64" - "fmt" - "reflect" - "strconv" - "time" -) - -const ( - documentNode = 1 << iota - mappingNode - sequenceNode - scalarNode - aliasNode -) - -type node struct { - kind int - line, column int - tag string - value string - implicit bool - children []*node - anchors map[string]*node -} - -// ---------------------------------------------------------------------------- -// Parser, produces a node tree out of a libyaml event stream. - -type parser struct { - parser yaml_parser_t - event yaml_event_t - doc *node -} - -func newParser(b []byte) *parser { - p := parser{} - if !yaml_parser_initialize(&p.parser) { - panic("Failed to initialize YAML emitter") - } - - if len(b) == 0 { - b = []byte{'\n'} - } - - yaml_parser_set_input_string(&p.parser, b) - - p.skip() - if p.event.typ != yaml_STREAM_START_EVENT { - panic("Expected stream start event, got " + strconv.Itoa(int(p.event.typ))) - } - p.skip() - return &p -} - -func (p *parser) destroy() { - if p.event.typ != yaml_NO_EVENT { - yaml_event_delete(&p.event) - } - yaml_parser_delete(&p.parser) -} - -func (p *parser) skip() { - if p.event.typ != yaml_NO_EVENT { - if p.event.typ == yaml_STREAM_END_EVENT { - fail("Attempted to go past the end of stream. Corrupted value?") - } - yaml_event_delete(&p.event) - } - if !yaml_parser_parse(&p.parser, &p.event) { - p.fail() - } -} - -func (p *parser) fail() { - var where string - var line int - if p.parser.problem_mark.line != 0 { - line = p.parser.problem_mark.line - } else if p.parser.context_mark.line != 0 { - line = p.parser.context_mark.line - } - if line != 0 { - where = "line " + strconv.Itoa(line) + ": " - } - var msg string - if len(p.parser.problem) > 0 { - msg = p.parser.problem - } else { - msg = "Unknown problem parsing YAML content" - } - fail(where + msg) -} - -func (p *parser) anchor(n *node, anchor []byte) { - if anchor != nil { - p.doc.anchors[string(anchor)] = n - } -} - -func (p *parser) parse() *node { - switch p.event.typ { - case yaml_SCALAR_EVENT: - return p.scalar() - case yaml_ALIAS_EVENT: - return p.alias() - case yaml_MAPPING_START_EVENT: - return p.mapping() - case yaml_SEQUENCE_START_EVENT: - return p.sequence() - case yaml_DOCUMENT_START_EVENT: - return p.document() - case yaml_STREAM_END_EVENT: - // Happens when attempting to decode an empty buffer. - return nil - default: - panic("Attempted to parse unknown event: " + strconv.Itoa(int(p.event.typ))) - } - panic("unreachable") -} - -func (p *parser) node(kind int) *node { - return &node{ - kind: kind, - line: p.event.start_mark.line, - column: p.event.start_mark.column, - } -} - -func (p *parser) document() *node { - n := p.node(documentNode) - n.anchors = make(map[string]*node) - p.doc = n - p.skip() - n.children = append(n.children, p.parse()) - if p.event.typ != yaml_DOCUMENT_END_EVENT { - panic("Expected end of document event but got " + strconv.Itoa(int(p.event.typ))) - } - p.skip() - return n -} - -func (p *parser) alias() *node { - n := p.node(aliasNode) - n.value = string(p.event.anchor) - p.skip() - return n -} - -func (p *parser) scalar() *node { - n := p.node(scalarNode) - n.value = string(p.event.value) - n.tag = string(p.event.tag) - n.implicit = p.event.implicit - p.anchor(n, p.event.anchor) - p.skip() - return n -} - -func (p *parser) sequence() *node { - n := p.node(sequenceNode) - p.anchor(n, p.event.anchor) - p.skip() - for p.event.typ != yaml_SEQUENCE_END_EVENT { - n.children = append(n.children, p.parse()) - } - p.skip() - return n -} - -func (p *parser) mapping() *node { - n := p.node(mappingNode) - p.anchor(n, p.event.anchor) - p.skip() - for p.event.typ != yaml_MAPPING_END_EVENT { - n.children = append(n.children, p.parse(), p.parse()) - } - p.skip() - return n -} - -// ---------------------------------------------------------------------------- -// Decoder, unmarshals a node into a provided value. - -type decoder struct { - doc *node - aliases map[string]bool -} - -func newDecoder() *decoder { - d := &decoder{} - d.aliases = make(map[string]bool) - return d -} - -// d.setter deals with setters and pointer dereferencing and initialization. -// -// It's a slightly convoluted case to handle properly: -// -// - nil pointers should be initialized, unless being set to nil -// - we don't know at this point yet what's the value to SetYAML() with. -// - we can't separate pointer deref/init and setter checking, because -// a setter may be found while going down a pointer chain. -// -// Thus, here is how it takes care of it: -// -// - out is provided as a pointer, so that it can be replaced. -// - when looking at a non-setter ptr, *out=ptr.Elem(), unless tag=!!null -// - when a setter is found, *out=interface{}, and a set() function is -// returned to call SetYAML() with the value of *out once it's defined. -// -func (d *decoder) setter(tag string, out *reflect.Value, good *bool) (set func()) { - if (*out).Kind() != reflect.Ptr && (*out).CanAddr() { - setter, _ := (*out).Addr().Interface().(Setter) - if setter != nil { - var arg interface{} - *out = reflect.ValueOf(&arg).Elem() - return func() { - *good = setter.SetYAML(shortTag(tag), arg) - } - } - } - again := true - for again { - again = false - setter, _ := (*out).Interface().(Setter) - if tag != yaml_NULL_TAG || setter != nil { - if pv := (*out); pv.Kind() == reflect.Ptr { - if pv.IsNil() { - *out = reflect.New(pv.Type().Elem()).Elem() - pv.Set((*out).Addr()) - } else { - *out = pv.Elem() - } - setter, _ = pv.Interface().(Setter) - again = true - } - } - if setter != nil { - var arg interface{} - *out = reflect.ValueOf(&arg).Elem() - return func() { - *good = setter.SetYAML(shortTag(tag), arg) - } - } - } - return nil -} - -func (d *decoder) unmarshal(n *node, out reflect.Value) (good bool) { - switch n.kind { - case documentNode: - good = d.document(n, out) - case scalarNode: - good = d.scalar(n, out) - case aliasNode: - good = d.alias(n, out) - case mappingNode: - good = d.mapping(n, out) - case sequenceNode: - good = d.sequence(n, out) - default: - panic("Internal error: unknown node kind: " + strconv.Itoa(n.kind)) - } - return -} - -func (d *decoder) document(n *node, out reflect.Value) (good bool) { - if len(n.children) == 1 { - d.doc = n - d.unmarshal(n.children[0], out) - return true - } - return false -} - -func (d *decoder) alias(n *node, out reflect.Value) (good bool) { - an, ok := d.doc.anchors[n.value] - if !ok { - fail("Unknown anchor '" + n.value + "' referenced") - } - if d.aliases[n.value] { - fail("Anchor '" + n.value + "' value contains itself") - } - d.aliases[n.value] = true - good = d.unmarshal(an, out) - delete(d.aliases, n.value) - return good -} - -var zeroValue reflect.Value - -func resetMap(out reflect.Value) { - for _, k := range out.MapKeys() { - out.SetMapIndex(k, zeroValue) - } -} - -var durationType = reflect.TypeOf(time.Duration(0)) - -func (d *decoder) scalar(n *node, out reflect.Value) (good bool) { - var tag string - var resolved interface{} - if n.tag == "" && !n.implicit { - tag = yaml_STR_TAG - resolved = n.value - } else { - tag, resolved = resolve(n.tag, n.value) - if tag == yaml_BINARY_TAG { - data, err := base64.StdEncoding.DecodeString(resolved.(string)) - if err != nil { - fail("!!binary value contains invalid base64 data") - } - resolved = string(data) - } - } - if set := d.setter(tag, &out, &good); set != nil { - defer set() - } - if resolved == nil { - if out.Kind() == reflect.Map && !out.CanAddr() { - resetMap(out) - } else { - out.Set(reflect.Zero(out.Type())) - } - good = true - return - } - switch out.Kind() { - case reflect.String: - if tag == yaml_BINARY_TAG { - out.SetString(resolved.(string)) - good = true - } else if resolved != nil { - out.SetString(n.value) - good = true - } - case reflect.Interface: - if resolved == nil { - out.Set(reflect.Zero(out.Type())) - } else { - out.Set(reflect.ValueOf(resolved)) - } - good = true - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - switch resolved := resolved.(type) { - case int: - if !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - good = true - } - case int64: - if !out.OverflowInt(resolved) { - out.SetInt(resolved) - good = true - } - case float64: - if resolved < 1<<63-1 && !out.OverflowInt(int64(resolved)) { - out.SetInt(int64(resolved)) - good = true - } - case string: - if out.Type() == durationType { - d, err := time.ParseDuration(resolved) - if err == nil { - out.SetInt(int64(d)) - good = true - } - } - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - switch resolved := resolved.(type) { - case int: - if resolved >= 0 { - out.SetUint(uint64(resolved)) - good = true - } - case int64: - if resolved >= 0 { - out.SetUint(uint64(resolved)) - good = true - } - case float64: - if resolved < 1<<64-1 && !out.OverflowUint(uint64(resolved)) { - out.SetUint(uint64(resolved)) - good = true - } - } - case reflect.Bool: - switch resolved := resolved.(type) { - case bool: - out.SetBool(resolved) - good = true - } - case reflect.Float32, reflect.Float64: - switch resolved := resolved.(type) { - case int: - out.SetFloat(float64(resolved)) - good = true - case int64: - out.SetFloat(float64(resolved)) - good = true - case float64: - out.SetFloat(resolved) - good = true - } - case reflect.Ptr: - if out.Type().Elem() == reflect.TypeOf(resolved) { - elem := reflect.New(out.Type().Elem()) - elem.Elem().Set(reflect.ValueOf(resolved)) - out.Set(elem) - good = true - } - } - return good -} - -func settableValueOf(i interface{}) reflect.Value { - v := reflect.ValueOf(i) - sv := reflect.New(v.Type()).Elem() - sv.Set(v) - return sv -} - -func (d *decoder) sequence(n *node, out reflect.Value) (good bool) { - if set := d.setter(yaml_SEQ_TAG, &out, &good); set != nil { - defer set() - } - var iface reflect.Value - if out.Kind() == reflect.Interface { - // No type hints. Will have to use a generic sequence. - iface = out - out = settableValueOf(make([]interface{}, 0)) - } - - if out.Kind() != reflect.Slice { - return false - } - et := out.Type().Elem() - - l := len(n.children) - for i := 0; i < l; i++ { - e := reflect.New(et).Elem() - if ok := d.unmarshal(n.children[i], e); ok { - out.Set(reflect.Append(out, e)) - } - } - if iface.IsValid() { - iface.Set(out) - } - return true -} - -func (d *decoder) mapping(n *node, out reflect.Value) (good bool) { - if set := d.setter(yaml_MAP_TAG, &out, &good); set != nil { - defer set() - } - if out.Kind() == reflect.Struct { - return d.mappingStruct(n, out) - } - - if out.Kind() == reflect.Interface { - // No type hints. Will have to use a generic map. - iface := out - out = settableValueOf(make(map[interface{}]interface{})) - iface.Set(out) - } - - if out.Kind() != reflect.Map { - return false - } - outt := out.Type() - kt := outt.Key() - et := outt.Elem() - - if out.IsNil() { - out.Set(reflect.MakeMap(outt)) - } - l := len(n.children) - for i := 0; i < l; i += 2 { - if isMerge(n.children[i]) { - d.merge(n.children[i+1], out) - continue - } - k := reflect.New(kt).Elem() - if d.unmarshal(n.children[i], k) { - kkind := k.Kind() - if kkind == reflect.Interface { - kkind = k.Elem().Kind() - } - if kkind == reflect.Map || kkind == reflect.Slice { - fail(fmt.Sprintf("invalid map key: %#v", k.Interface())) - } - e := reflect.New(et).Elem() - if d.unmarshal(n.children[i+1], e) { - out.SetMapIndex(k, e) - } - } - } - return true -} - -func (d *decoder) mappingStruct(n *node, out reflect.Value) (good bool) { - sinfo, err := getStructInfo(out.Type()) - if err != nil { - panic(err) - } - name := settableValueOf("") - l := len(n.children) - for i := 0; i < l; i += 2 { - ni := n.children[i] - if isMerge(ni) { - d.merge(n.children[i+1], out) - continue - } - if !d.unmarshal(ni, name) { - continue - } - if info, ok := sinfo.FieldsMap[name.String()]; ok { - var field reflect.Value - if info.Inline == nil { - field = out.Field(info.Num) - } else { - field = out.FieldByIndex(info.Inline) - } - d.unmarshal(n.children[i+1], field) - } - } - return true -} - -func (d *decoder) merge(n *node, out reflect.Value) { - const wantMap = "map merge requires map or sequence of maps as the value" - switch n.kind { - case mappingNode: - d.unmarshal(n, out) - case aliasNode: - an, ok := d.doc.anchors[n.value] - if ok && an.kind != mappingNode { - fail(wantMap) - } - d.unmarshal(n, out) - case sequenceNode: - // Step backwards as earlier nodes take precedence. - for i := len(n.children) - 1; i >= 0; i-- { - ni := n.children[i] - if ni.kind == aliasNode { - an, ok := d.doc.anchors[ni.value] - if ok && an.kind != mappingNode { - fail(wantMap) - } - } else if ni.kind != mappingNode { - fail(wantMap) - } - d.unmarshal(ni, out) - } - default: - fail(wantMap) - } -} - -func isMerge(n *node) bool { - return n.kind == scalarNode && n.value == "<<" && (n.implicit == true || n.tag == yaml_MERGE_TAG) -} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/decode_test.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/decode_test.go deleted file mode 100644 index 332ec87..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/decode_test.go +++ /dev/null @@ -1,703 +0,0 @@ -package yaml_test - -import ( - . "gopkg.in/check.v1" - "github.com/remind101/deploy/Godeps/_workspace/src/gopkg.in/yaml.v1" - "math" - "reflect" - "strings" - "time" -) - -var unmarshalIntTest = 123 - -var unmarshalTests = []struct { - data string - value interface{} -}{ - { - "", - &struct{}{}, - }, { - "{}", &struct{}{}, - }, { - "v: hi", - map[string]string{"v": "hi"}, - }, { - "v: hi", map[string]interface{}{"v": "hi"}, - }, { - "v: true", - map[string]string{"v": "true"}, - }, { - "v: true", - map[string]interface{}{"v": true}, - }, { - "v: 10", - map[string]interface{}{"v": 10}, - }, { - "v: 0b10", - map[string]interface{}{"v": 2}, - }, { - "v: 0xA", - map[string]interface{}{"v": 10}, - }, { - "v: 4294967296", - map[string]int64{"v": 4294967296}, - }, { - "v: 0.1", - map[string]interface{}{"v": 0.1}, - }, { - "v: .1", - map[string]interface{}{"v": 0.1}, - }, { - "v: .Inf", - map[string]interface{}{"v": math.Inf(+1)}, - }, { - "v: -.Inf", - map[string]interface{}{"v": math.Inf(-1)}, - }, { - "v: -10", - map[string]interface{}{"v": -10}, - }, { - "v: -.1", - map[string]interface{}{"v": -0.1}, - }, - - // Simple values. - { - "123", - &unmarshalIntTest, - }, - - // Floats from spec - { - "canonical: 6.8523e+5", - map[string]interface{}{"canonical": 6.8523e+5}, - }, { - "expo: 685.230_15e+03", - map[string]interface{}{"expo": 685.23015e+03}, - }, { - "fixed: 685_230.15", - map[string]interface{}{"fixed": 685230.15}, - }, { - "neginf: -.inf", - map[string]interface{}{"neginf": math.Inf(-1)}, - }, { - "fixed: 685_230.15", - map[string]float64{"fixed": 685230.15}, - }, - //{"sexa: 190:20:30.15", map[string]interface{}{"sexa": 0}}, // Unsupported - //{"notanum: .NaN", map[string]interface{}{"notanum": math.NaN()}}, // Equality of NaN fails. - - // Bools from spec - { - "canonical: y", - map[string]interface{}{"canonical": true}, - }, { - "answer: NO", - map[string]interface{}{"answer": false}, - }, { - "logical: True", - map[string]interface{}{"logical": true}, - }, { - "option: on", - map[string]interface{}{"option": true}, - }, { - "option: on", - map[string]bool{"option": true}, - }, - // Ints from spec - { - "canonical: 685230", - map[string]interface{}{"canonical": 685230}, - }, { - "decimal: +685_230", - map[string]interface{}{"decimal": 685230}, - }, { - "octal: 02472256", - map[string]interface{}{"octal": 685230}, - }, { - "hexa: 0x_0A_74_AE", - map[string]interface{}{"hexa": 685230}, - }, { - "bin: 0b1010_0111_0100_1010_1110", - map[string]interface{}{"bin": 685230}, - }, { - "bin: -0b101010", - map[string]interface{}{"bin": -42}, - }, { - "decimal: +685_230", - map[string]int{"decimal": 685230}, - }, - - //{"sexa: 190:20:30", map[string]interface{}{"sexa": 0}}, // Unsupported - - // Nulls from spec - { - "empty:", - map[string]interface{}{"empty": nil}, - }, { - "canonical: ~", - map[string]interface{}{"canonical": nil}, - }, { - "english: null", - map[string]interface{}{"english": nil}, - }, { - "~: null key", - map[interface{}]string{nil: "null key"}, - }, { - "empty:", - map[string]*bool{"empty": nil}, - }, - - // Flow sequence - { - "seq: [A,B]", - map[string]interface{}{"seq": []interface{}{"A", "B"}}, - }, { - "seq: [A,B,C,]", - map[string][]string{"seq": []string{"A", "B", "C"}}, - }, { - "seq: [A,1,C]", - map[string][]string{"seq": []string{"A", "1", "C"}}, - }, { - "seq: [A,1,C]", - map[string][]int{"seq": []int{1}}, - }, { - "seq: [A,1,C]", - map[string]interface{}{"seq": []interface{}{"A", 1, "C"}}, - }, - // Block sequence - { - "seq:\n - A\n - B", - map[string]interface{}{"seq": []interface{}{"A", "B"}}, - }, { - "seq:\n - A\n - B\n - C", - map[string][]string{"seq": []string{"A", "B", "C"}}, - }, { - "seq:\n - A\n - 1\n - C", - map[string][]string{"seq": []string{"A", "1", "C"}}, - }, { - "seq:\n - A\n - 1\n - C", - map[string][]int{"seq": []int{1}}, - }, { - "seq:\n - A\n - 1\n - C", - map[string]interface{}{"seq": []interface{}{"A", 1, "C"}}, - }, - - // Literal block scalar - { - "scalar: | # Comment\n\n literal\n\n \ttext\n\n", - map[string]string{"scalar": "\nliteral\n\n\ttext\n"}, - }, - - // Folded block scalar - { - "scalar: > # Comment\n\n folded\n line\n \n next\n line\n * one\n * two\n\n last\n line\n\n", - map[string]string{"scalar": "\nfolded line\nnext line\n * one\n * two\n\nlast line\n"}, - }, - - // Map inside interface with no type hints. - { - "a: {b: c}", - map[string]interface{}{"a": map[interface{}]interface{}{"b": "c"}}, - }, - - // Structs and type conversions. - { - "hello: world", - &struct{ Hello string }{"world"}, - }, { - "a: {b: c}", - &struct{ A struct{ B string } }{struct{ B string }{"c"}}, - }, { - "a: {b: c}", - &struct{ A *struct{ B string } }{&struct{ B string }{"c"}}, - }, { - "a: {b: c}", - &struct{ A map[string]string }{map[string]string{"b": "c"}}, - }, { - "a: {b: c}", - &struct{ A *map[string]string }{&map[string]string{"b": "c"}}, - }, { - "a:", - &struct{ A map[string]string }{}, - }, { - "a: 1", - &struct{ A int }{1}, - }, { - "a: 1", - &struct{ A float64 }{1}, - }, { - "a: 1.0", - &struct{ A int }{1}, - }, { - "a: 1.0", - &struct{ A uint }{1}, - }, { - "a: [1, 2]", - &struct{ A []int }{[]int{1, 2}}, - }, { - "a: 1", - &struct{ B int }{0}, - }, { - "a: 1", - &struct { - B int "a" - }{1}, - }, { - "a: y", - &struct{ A bool }{true}, - }, - - // Some cross type conversions - { - "v: 42", - map[string]uint{"v": 42}, - }, { - "v: -42", - map[string]uint{}, - }, { - "v: 4294967296", - map[string]uint64{"v": 4294967296}, - }, { - "v: -4294967296", - map[string]uint64{}, - }, - - // Overflow cases. - { - "v: 4294967297", - map[string]int32{}, - }, { - "v: 128", - map[string]int8{}, - }, - - // Quoted values. - { - "'1': '\"2\"'", - map[interface{}]interface{}{"1": "\"2\""}, - }, { - "v:\n- A\n- 'B\n\n C'\n", - map[string][]string{"v": []string{"A", "B\nC"}}, - }, - - // Explicit tags. - { - "v: !!float '1.1'", - map[string]interface{}{"v": 1.1}, - }, { - "v: !!null ''", - map[string]interface{}{"v": nil}, - }, { - "%TAG !y! tag:yaml.org,2002:\n---\nv: !y!int '1'", - map[string]interface{}{"v": 1}, - }, - - // Anchors and aliases. - { - "a: &x 1\nb: &y 2\nc: *x\nd: *y\n", - &struct{ A, B, C, D int }{1, 2, 1, 2}, - }, { - "a: &a {c: 1}\nb: *a", - &struct { - A, B struct { - C int - } - }{struct{ C int }{1}, struct{ C int }{1}}, - }, { - "a: &a [1, 2]\nb: *a", - &struct{ B []int }{[]int{1, 2}}, - }, - - // Bug #1133337 - { - "foo: ''", - map[string]*string{"foo": new(string)}, - }, { - "foo: null", - map[string]string{"foo": ""}, - }, { - "foo: null", - map[string]interface{}{"foo": nil}, - }, - - // Ignored field - { - "a: 1\nb: 2\n", - &struct { - A int - B int "-" - }{1, 0}, - }, - - // Bug #1191981 - { - "" + - "%YAML 1.1\n" + - "--- !!str\n" + - `"Generic line break (no glyph)\n\` + "\n" + - ` Generic line break (glyphed)\n\` + "\n" + - ` Line separator\u2028\` + "\n" + - ` Paragraph separator\u2029"` + "\n", - "" + - "Generic line break (no glyph)\n" + - "Generic line break (glyphed)\n" + - "Line separator\u2028Paragraph separator\u2029", - }, - - // Struct inlining - { - "a: 1\nb: 2\nc: 3\n", - &struct { - A int - C inlineB `yaml:",inline"` - }{1, inlineB{2, inlineC{3}}}, - }, - - // bug 1243827 - { - "a: -b_c", - map[string]interface{}{"a": "-b_c"}, - }, - { - "a: +b_c", - map[string]interface{}{"a": "+b_c"}, - }, - { - "a: 50cent_of_dollar", - map[string]interface{}{"a": "50cent_of_dollar"}, - }, - - // Duration - { - "a: 3s", - map[string]time.Duration{"a": 3 * time.Second}, - }, - - // Issue #24. - { - "a: ", - map[string]string{"a": ""}, - }, - - // Base 60 floats are obsolete and unsupported. - { - "a: 1:1\n", - map[string]string{"a": "1:1"}, - }, - - // Binary data. - { - "a: !!binary gIGC\n", - map[string]string{"a": "\x80\x81\x82"}, - }, { - "a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n", - map[string]string{"a": strings.Repeat("\x90", 54)}, - }, { - "a: !!binary |\n " + strings.Repeat("A", 70) + "\n ==\n", - map[string]string{"a": strings.Repeat("\x00", 52)}, - }, -} - -type inlineB struct { - B int - inlineC `yaml:",inline"` -} - -type inlineC struct { - C int -} - -func (s *S) TestUnmarshal(c *C) { - for i, item := range unmarshalTests { - t := reflect.ValueOf(item.value).Type() - var value interface{} - switch t.Kind() { - case reflect.Map: - value = reflect.MakeMap(t).Interface() - case reflect.String: - t := reflect.ValueOf(item.value).Type() - v := reflect.New(t) - value = v.Interface() - default: - pt := reflect.ValueOf(item.value).Type() - pv := reflect.New(pt.Elem()) - value = pv.Interface() - } - err := yaml.Unmarshal([]byte(item.data), value) - c.Assert(err, IsNil, Commentf("Item #%d", i)) - if t.Kind() == reflect.String { - c.Assert(*value.(*string), Equals, item.value, Commentf("Item #%d", i)) - } else { - c.Assert(value, DeepEquals, item.value, Commentf("Item #%d", i)) - } - } -} - -func (s *S) TestUnmarshalNaN(c *C) { - value := map[string]interface{}{} - err := yaml.Unmarshal([]byte("notanum: .NaN"), &value) - c.Assert(err, IsNil) - c.Assert(math.IsNaN(value["notanum"].(float64)), Equals, true) -} - -var unmarshalErrorTests = []struct { - data, error string -}{ - {"v: !!float 'error'", "YAML error: cannot decode !!str `error` as a !!float"}, - {"v: [A,", "YAML error: line 1: did not find expected node content"}, - {"v:\n- [A,", "YAML error: line 2: did not find expected node content"}, - {"a: *b\n", "YAML error: Unknown anchor 'b' referenced"}, - {"a: &a\n b: *a\n", "YAML error: Anchor 'a' value contains itself"}, - {"value: -", "YAML error: block sequence entries are not allowed in this context"}, - {"a: !!binary ==", "YAML error: !!binary value contains invalid base64 data"}, - {"{[.]}", `YAML error: invalid map key: \[\]interface \{\}\{"\."\}`}, - {"{{.}}", `YAML error: invalid map key: map\[interface\ \{\}\]interface \{\}\{".":interface \{\}\(nil\)\}`}, -} - -func (s *S) TestUnmarshalErrors(c *C) { - for _, item := range unmarshalErrorTests { - var value interface{} - err := yaml.Unmarshal([]byte(item.data), &value) - c.Assert(err, ErrorMatches, item.error, Commentf("Partial unmarshal: %#v", value)) - } -} - -var setterTests = []struct { - data, tag string - value interface{} -}{ - {"_: {hi: there}", "!!map", map[interface{}]interface{}{"hi": "there"}}, - {"_: [1,A]", "!!seq", []interface{}{1, "A"}}, - {"_: 10", "!!int", 10}, - {"_: null", "!!null", nil}, - {`_: BAR!`, "!!str", "BAR!"}, - {`_: "BAR!"`, "!!str", "BAR!"}, - {"_: !!foo 'BAR!'", "!!foo", "BAR!"}, -} - -var setterResult = map[int]bool{} - -type typeWithSetter struct { - tag string - value interface{} -} - -func (o *typeWithSetter) SetYAML(tag string, value interface{}) (ok bool) { - o.tag = tag - o.value = value - if i, ok := value.(int); ok { - if result, ok := setterResult[i]; ok { - return result - } - } - return true -} - -type setterPointerType struct { - Field *typeWithSetter "_" -} - -type setterValueType struct { - Field typeWithSetter "_" -} - -func (s *S) TestUnmarshalWithPointerSetter(c *C) { - for _, item := range setterTests { - obj := &setterPointerType{} - err := yaml.Unmarshal([]byte(item.data), obj) - c.Assert(err, IsNil) - c.Assert(obj.Field, NotNil, Commentf("Pointer not initialized (%#v)", item.value)) - c.Assert(obj.Field.tag, Equals, item.tag) - c.Assert(obj.Field.value, DeepEquals, item.value) - } -} - -func (s *S) TestUnmarshalWithValueSetter(c *C) { - for _, item := range setterTests { - obj := &setterValueType{} - err := yaml.Unmarshal([]byte(item.data), obj) - c.Assert(err, IsNil) - c.Assert(obj.Field, NotNil, Commentf("Pointer not initialized (%#v)", item.value)) - c.Assert(obj.Field.tag, Equals, item.tag) - c.Assert(obj.Field.value, DeepEquals, item.value) - } -} - -func (s *S) TestUnmarshalWholeDocumentWithSetter(c *C) { - obj := &typeWithSetter{} - err := yaml.Unmarshal([]byte(setterTests[0].data), obj) - c.Assert(err, IsNil) - c.Assert(obj.tag, Equals, setterTests[0].tag) - value, ok := obj.value.(map[interface{}]interface{}) - c.Assert(ok, Equals, true) - c.Assert(value["_"], DeepEquals, setterTests[0].value) -} - -func (s *S) TestUnmarshalWithFalseSetterIgnoresValue(c *C) { - setterResult[2] = false - setterResult[4] = false - defer func() { - delete(setterResult, 2) - delete(setterResult, 4) - }() - - m := map[string]*typeWithSetter{} - data := `{abc: 1, def: 2, ghi: 3, jkl: 4}` - err := yaml.Unmarshal([]byte(data), m) - c.Assert(err, IsNil) - c.Assert(m["abc"], NotNil) - c.Assert(m["def"], IsNil) - c.Assert(m["ghi"], NotNil) - c.Assert(m["jkl"], IsNil) - - c.Assert(m["abc"].value, Equals, 1) - c.Assert(m["ghi"].value, Equals, 3) -} - -// From http://yaml.org/type/merge.html -var mergeTests = ` -anchors: - - &CENTER { "x": 1, "y": 2 } - - &LEFT { "x": 0, "y": 2 } - - &BIG { "r": 10 } - - &SMALL { "r": 1 } - -# All the following maps are equal: - -plain: - # Explicit keys - "x": 1 - "y": 2 - "r": 10 - label: center/big - -mergeOne: - # Merge one map - << : *CENTER - "r": 10 - label: center/big - -mergeMultiple: - # Merge multiple maps - << : [ *CENTER, *BIG ] - label: center/big - -override: - # Override - << : [ *BIG, *LEFT, *SMALL ] - "x": 1 - label: center/big - -shortTag: - # Explicit short merge tag - !!merge "<<" : [ *CENTER, *BIG ] - label: center/big - -longTag: - # Explicit merge long tag - ! "<<" : [ *CENTER, *BIG ] - label: center/big - -inlineMap: - # Inlined map - << : {"x": 1, "y": 2, "r": 10} - label: center/big - -inlineSequenceMap: - # Inlined map in sequence - << : [ *CENTER, {"r": 10} ] - label: center/big -` - -func (s *S) TestMerge(c *C) { - var want = map[interface{}]interface{}{ - "x": 1, - "y": 2, - "r": 10, - "label": "center/big", - } - - var m map[string]interface{} - err := yaml.Unmarshal([]byte(mergeTests), &m) - c.Assert(err, IsNil) - for name, test := range m { - if name == "anchors" { - continue - } - c.Assert(test, DeepEquals, want, Commentf("test %q failed", name)) - } -} - -func (s *S) TestMergeStruct(c *C) { - type Data struct { - X, Y, R int - Label string - } - want := Data{1, 2, 10, "center/big"} - - var m map[string]Data - err := yaml.Unmarshal([]byte(mergeTests), &m) - c.Assert(err, IsNil) - for name, test := range m { - if name == "anchors" { - continue - } - c.Assert(test, Equals, want, Commentf("test %q failed", name)) - } -} - -var unmarshalNullTests = []func() interface{}{ - func() interface{} { var v interface{}; v = "v"; return &v }, - func() interface{} { var s = "s"; return &s }, - func() interface{} { var s = "s"; sptr := &s; return &sptr }, - func() interface{} { var i = 1; return &i }, - func() interface{} { var i = 1; iptr := &i; return &iptr }, - func() interface{} { m := map[string]int{"s": 1}; return &m }, - func() interface{} { m := map[string]int{"s": 1}; return m }, -} - -func (s *S) TestUnmarshalNull(c *C) { - for _, test := range unmarshalNullTests { - item := test() - zero := reflect.Zero(reflect.TypeOf(item).Elem()).Interface() - err := yaml.Unmarshal([]byte("null"), item) - c.Assert(err, IsNil) - if reflect.TypeOf(item).Kind() == reflect.Map { - c.Assert(reflect.ValueOf(item).Interface(), DeepEquals, reflect.MakeMap(reflect.TypeOf(item)).Interface()) - } else { - c.Assert(reflect.ValueOf(item).Elem().Interface(), DeepEquals, zero) - } - } -} - -//var data []byte -//func init() { -// var err error -// data, err = ioutil.ReadFile("/tmp/file.yaml") -// if err != nil { -// panic(err) -// } -//} -// -//func (s *S) BenchmarkUnmarshal(c *C) { -// var err error -// for i := 0; i < c.N; i++ { -// var v map[string]interface{} -// err = yaml.Unmarshal(data, &v) -// } -// if err != nil { -// panic(err) -// } -//} -// -//func (s *S) BenchmarkMarshal(c *C) { -// var v map[string]interface{} -// yaml.Unmarshal(data, &v) -// c.ResetTimer() -// for i := 0; i < c.N; i++ { -// yaml.Marshal(&v) -// } -//} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/emitterc.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/emitterc.go deleted file mode 100644 index 9b3dc4a..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/emitterc.go +++ /dev/null @@ -1,1685 +0,0 @@ -package yaml - -import ( - "bytes" -) - -// Flush the buffer if needed. -func flush(emitter *yaml_emitter_t) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) { - return yaml_emitter_flush(emitter) - } - return true -} - -// Put a character to the output buffer. -func put(emitter *yaml_emitter_t, value byte) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - emitter.buffer[emitter.buffer_pos] = value - emitter.buffer_pos++ - emitter.column++ - return true -} - -// Put a line break to the output buffer. -func put_break(emitter *yaml_emitter_t) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - switch emitter.line_break { - case yaml_CR_BREAK: - emitter.buffer[emitter.buffer_pos] = '\r' - emitter.buffer_pos += 1 - case yaml_LN_BREAK: - emitter.buffer[emitter.buffer_pos] = '\n' - emitter.buffer_pos += 1 - case yaml_CRLN_BREAK: - emitter.buffer[emitter.buffer_pos+0] = '\r' - emitter.buffer[emitter.buffer_pos+1] = '\n' - emitter.buffer_pos += 2 - default: - panic("unknown line break setting") - } - emitter.column = 0 - emitter.line++ - return true -} - -// Copy a character from a string into buffer. -func write(emitter *yaml_emitter_t, s []byte, i *int) bool { - if emitter.buffer_pos+5 >= len(emitter.buffer) && !yaml_emitter_flush(emitter) { - return false - } - p := emitter.buffer_pos - w := width(s[*i]) - switch w { - case 4: - emitter.buffer[p+3] = s[*i+3] - fallthrough - case 3: - emitter.buffer[p+2] = s[*i+2] - fallthrough - case 2: - emitter.buffer[p+1] = s[*i+1] - fallthrough - case 1: - emitter.buffer[p+0] = s[*i+0] - default: - panic("unknown character width") - } - emitter.column++ - emitter.buffer_pos += w - *i += w - return true -} - -// Write a whole string into buffer. -func write_all(emitter *yaml_emitter_t, s []byte) bool { - for i := 0; i < len(s); { - if !write(emitter, s, &i) { - return false - } - } - return true -} - -// Copy a line break character from a string into buffer. -func write_break(emitter *yaml_emitter_t, s []byte, i *int) bool { - if s[*i] == '\n' { - if !put_break(emitter) { - return false - } - *i++ - } else { - if !write(emitter, s, i) { - return false - } - emitter.column = 0 - emitter.line++ - } - return true -} - -// Set an emitter error and return false. -func yaml_emitter_set_emitter_error(emitter *yaml_emitter_t, problem string) bool { - emitter.error = yaml_EMITTER_ERROR - emitter.problem = problem - return false -} - -// Emit an event. -func yaml_emitter_emit(emitter *yaml_emitter_t, event *yaml_event_t) bool { - emitter.events = append(emitter.events, *event) - for !yaml_emitter_need_more_events(emitter) { - event := &emitter.events[emitter.events_head] - if !yaml_emitter_analyze_event(emitter, event) { - return false - } - if !yaml_emitter_state_machine(emitter, event) { - return false - } - yaml_event_delete(event) - emitter.events_head++ - } - return true -} - -// Check if we need to accumulate more events before emitting. -// -// We accumulate extra -// - 1 event for DOCUMENT-START -// - 2 events for SEQUENCE-START -// - 3 events for MAPPING-START -// -func yaml_emitter_need_more_events(emitter *yaml_emitter_t) bool { - if emitter.events_head == len(emitter.events) { - return true - } - var accumulate int - switch emitter.events[emitter.events_head].typ { - case yaml_DOCUMENT_START_EVENT: - accumulate = 1 - break - case yaml_SEQUENCE_START_EVENT: - accumulate = 2 - break - case yaml_MAPPING_START_EVENT: - accumulate = 3 - break - default: - return false - } - if len(emitter.events)-emitter.events_head > accumulate { - return false - } - var level int - for i := emitter.events_head; i < len(emitter.events); i++ { - switch emitter.events[i].typ { - case yaml_STREAM_START_EVENT, yaml_DOCUMENT_START_EVENT, yaml_SEQUENCE_START_EVENT, yaml_MAPPING_START_EVENT: - level++ - case yaml_STREAM_END_EVENT, yaml_DOCUMENT_END_EVENT, yaml_SEQUENCE_END_EVENT, yaml_MAPPING_END_EVENT: - level-- - } - if level == 0 { - return false - } - } - return true -} - -// Append a directive to the directives stack. -func yaml_emitter_append_tag_directive(emitter *yaml_emitter_t, value *yaml_tag_directive_t, allow_duplicates bool) bool { - for i := 0; i < len(emitter.tag_directives); i++ { - if bytes.Equal(value.handle, emitter.tag_directives[i].handle) { - if allow_duplicates { - return true - } - return yaml_emitter_set_emitter_error(emitter, "duplicate %TAG directive") - } - } - - // [Go] Do we actually need to copy this given garbage collection - // and the lack of deallocating destructors? - tag_copy := yaml_tag_directive_t{ - handle: make([]byte, len(value.handle)), - prefix: make([]byte, len(value.prefix)), - } - copy(tag_copy.handle, value.handle) - copy(tag_copy.prefix, value.prefix) - emitter.tag_directives = append(emitter.tag_directives, tag_copy) - return true -} - -// Increase the indentation level. -func yaml_emitter_increase_indent(emitter *yaml_emitter_t, flow, indentless bool) bool { - emitter.indents = append(emitter.indents, emitter.indent) - if emitter.indent < 0 { - if flow { - emitter.indent = emitter.best_indent - } else { - emitter.indent = 0 - } - } else if !indentless { - emitter.indent += emitter.best_indent - } - return true -} - -// State dispatcher. -func yaml_emitter_state_machine(emitter *yaml_emitter_t, event *yaml_event_t) bool { - switch emitter.state { - default: - case yaml_EMIT_STREAM_START_STATE: - return yaml_emitter_emit_stream_start(emitter, event) - - case yaml_EMIT_FIRST_DOCUMENT_START_STATE: - return yaml_emitter_emit_document_start(emitter, event, true) - - case yaml_EMIT_DOCUMENT_START_STATE: - return yaml_emitter_emit_document_start(emitter, event, false) - - case yaml_EMIT_DOCUMENT_CONTENT_STATE: - return yaml_emitter_emit_document_content(emitter, event) - - case yaml_EMIT_DOCUMENT_END_STATE: - return yaml_emitter_emit_document_end(emitter, event) - - case yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, true) - - case yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE: - return yaml_emitter_emit_flow_sequence_item(emitter, event, false) - - case yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, true) - - case yaml_EMIT_FLOW_MAPPING_KEY_STATE: - return yaml_emitter_emit_flow_mapping_key(emitter, event, false) - - case yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE: - return yaml_emitter_emit_flow_mapping_value(emitter, event, true) - - case yaml_EMIT_FLOW_MAPPING_VALUE_STATE: - return yaml_emitter_emit_flow_mapping_value(emitter, event, false) - - case yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE: - return yaml_emitter_emit_block_sequence_item(emitter, event, true) - - case yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE: - return yaml_emitter_emit_block_sequence_item(emitter, event, false) - - case yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE: - return yaml_emitter_emit_block_mapping_key(emitter, event, true) - - case yaml_EMIT_BLOCK_MAPPING_KEY_STATE: - return yaml_emitter_emit_block_mapping_key(emitter, event, false) - - case yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE: - return yaml_emitter_emit_block_mapping_value(emitter, event, true) - - case yaml_EMIT_BLOCK_MAPPING_VALUE_STATE: - return yaml_emitter_emit_block_mapping_value(emitter, event, false) - - case yaml_EMIT_END_STATE: - return yaml_emitter_set_emitter_error(emitter, "expected nothing after STREAM-END") - } - panic("invalid emitter state") -} - -// Expect STREAM-START. -func yaml_emitter_emit_stream_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if event.typ != yaml_STREAM_START_EVENT { - return yaml_emitter_set_emitter_error(emitter, "expected STREAM-START") - } - if emitter.encoding == yaml_ANY_ENCODING { - emitter.encoding = event.encoding - if emitter.encoding == yaml_ANY_ENCODING { - emitter.encoding = yaml_UTF8_ENCODING - } - } - if emitter.best_indent < 2 || emitter.best_indent > 9 { - emitter.best_indent = 2 - } - if emitter.best_width >= 0 && emitter.best_width <= emitter.best_indent*2 { - emitter.best_width = 80 - } - if emitter.best_width < 0 { - emitter.best_width = 1<<31 - 1 - } - if emitter.line_break == yaml_ANY_BREAK { - emitter.line_break = yaml_LN_BREAK - } - - emitter.indent = -1 - emitter.line = 0 - emitter.column = 0 - emitter.whitespace = true - emitter.indention = true - - if emitter.encoding != yaml_UTF8_ENCODING { - if !yaml_emitter_write_bom(emitter) { - return false - } - } - emitter.state = yaml_EMIT_FIRST_DOCUMENT_START_STATE - return true -} - -// Expect DOCUMENT-START or STREAM-END. -func yaml_emitter_emit_document_start(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - - if event.typ == yaml_DOCUMENT_START_EVENT { - - if event.version_directive != nil { - if !yaml_emitter_analyze_version_directive(emitter, event.version_directive) { - return false - } - } - - for i := 0; i < len(event.tag_directives); i++ { - tag_directive := &event.tag_directives[i] - if !yaml_emitter_analyze_tag_directive(emitter, tag_directive) { - return false - } - if !yaml_emitter_append_tag_directive(emitter, tag_directive, false) { - return false - } - } - - for i := 0; i < len(default_tag_directives); i++ { - tag_directive := &default_tag_directives[i] - if !yaml_emitter_append_tag_directive(emitter, tag_directive, true) { - return false - } - } - - implicit := event.implicit - if !first || emitter.canonical { - implicit = false - } - - if emitter.open_ended && (event.version_directive != nil || len(event.tag_directives) > 0) { - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if event.version_directive != nil { - implicit = false - if !yaml_emitter_write_indicator(emitter, []byte("%YAML"), true, false, false) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte("1.1"), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if len(event.tag_directives) > 0 { - implicit = false - for i := 0; i < len(event.tag_directives); i++ { - tag_directive := &event.tag_directives[i] - if !yaml_emitter_write_indicator(emitter, []byte("%TAG"), true, false, false) { - return false - } - if !yaml_emitter_write_tag_handle(emitter, tag_directive.handle) { - return false - } - if !yaml_emitter_write_tag_content(emitter, tag_directive.prefix, true) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - } - - if yaml_emitter_check_empty_document(emitter) { - implicit = false - } - if !implicit { - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte("---"), true, false, false) { - return false - } - if emitter.canonical { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - } - - emitter.state = yaml_EMIT_DOCUMENT_CONTENT_STATE - return true - } - - if event.typ == yaml_STREAM_END_EVENT { - if emitter.open_ended { - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_flush(emitter) { - return false - } - emitter.state = yaml_EMIT_END_STATE - return true - } - - return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-START or STREAM-END") -} - -// Expect the root node. -func yaml_emitter_emit_document_content(emitter *yaml_emitter_t, event *yaml_event_t) bool { - emitter.states = append(emitter.states, yaml_EMIT_DOCUMENT_END_STATE) - return yaml_emitter_emit_node(emitter, event, true, false, false, false) -} - -// Expect DOCUMENT-END. -func yaml_emitter_emit_document_end(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if event.typ != yaml_DOCUMENT_END_EVENT { - return yaml_emitter_set_emitter_error(emitter, "expected DOCUMENT-END") - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if !event.implicit { - // [Go] Allocate the slice elsewhere. - if !yaml_emitter_write_indicator(emitter, []byte("..."), true, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_flush(emitter) { - return false - } - emitter.state = yaml_EMIT_DOCUMENT_START_STATE - emitter.tag_directives = emitter.tag_directives[:0] - return true -} - -// Expect a flow item node. -func yaml_emitter_emit_flow_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_write_indicator(emitter, []byte{'['}, true, true, false) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - emitter.flow_level++ - } - - if event.typ == yaml_SEQUENCE_END_EVENT { - emitter.flow_level-- - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - if emitter.canonical && !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{']'}, false, false, false) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - - return true - } - - if !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_SEQUENCE_ITEM_STATE) - return yaml_emitter_emit_node(emitter, event, false, true, false, false) -} - -// Expect a flow key node. -func yaml_emitter_emit_flow_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_write_indicator(emitter, []byte{'{'}, true, true, false) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - emitter.flow_level++ - } - - if event.typ == yaml_MAPPING_END_EVENT { - emitter.flow_level-- - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - if emitter.canonical && !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'}'}, false, false, false) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - - if !first { - if !yaml_emitter_write_indicator(emitter, []byte{','}, false, false, false) { - return false - } - } - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - - if !emitter.canonical && yaml_emitter_check_simple_key(emitter) { - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_SIMPLE_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, true) - } - if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, false) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a flow value node. -func yaml_emitter_emit_flow_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { - if simple { - if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { - return false - } - } else { - if emitter.canonical || emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, false) { - return false - } - } - emitter.states = append(emitter.states, yaml_EMIT_FLOW_MAPPING_KEY_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a block item node. -func yaml_emitter_emit_block_sequence_item(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_increase_indent(emitter, false, emitter.mapping_context && !emitter.indention) { - return false - } - } - if event.typ == yaml_SEQUENCE_END_EVENT { - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{'-'}, true, false, true) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_SEQUENCE_ITEM_STATE) - return yaml_emitter_emit_node(emitter, event, false, true, false, false) -} - -// Expect a block key node. -func yaml_emitter_emit_block_mapping_key(emitter *yaml_emitter_t, event *yaml_event_t, first bool) bool { - if first { - if !yaml_emitter_increase_indent(emitter, false, false) { - return false - } - } - if event.typ == yaml_MAPPING_END_EVENT { - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true - } - if !yaml_emitter_write_indent(emitter) { - return false - } - if yaml_emitter_check_simple_key(emitter) { - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_SIMPLE_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, true) - } - if !yaml_emitter_write_indicator(emitter, []byte{'?'}, true, false, true) { - return false - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_VALUE_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a block value node. -func yaml_emitter_emit_block_mapping_value(emitter *yaml_emitter_t, event *yaml_event_t, simple bool) bool { - if simple { - if !yaml_emitter_write_indicator(emitter, []byte{':'}, false, false, false) { - return false - } - } else { - if !yaml_emitter_write_indent(emitter) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{':'}, true, false, true) { - return false - } - } - emitter.states = append(emitter.states, yaml_EMIT_BLOCK_MAPPING_KEY_STATE) - return yaml_emitter_emit_node(emitter, event, false, false, true, false) -} - -// Expect a node. -func yaml_emitter_emit_node(emitter *yaml_emitter_t, event *yaml_event_t, - root bool, sequence bool, mapping bool, simple_key bool) bool { - - emitter.root_context = root - emitter.sequence_context = sequence - emitter.mapping_context = mapping - emitter.simple_key_context = simple_key - - switch event.typ { - case yaml_ALIAS_EVENT: - return yaml_emitter_emit_alias(emitter, event) - case yaml_SCALAR_EVENT: - return yaml_emitter_emit_scalar(emitter, event) - case yaml_SEQUENCE_START_EVENT: - return yaml_emitter_emit_sequence_start(emitter, event) - case yaml_MAPPING_START_EVENT: - return yaml_emitter_emit_mapping_start(emitter, event) - default: - return yaml_emitter_set_emitter_error(emitter, - "expected SCALAR, SEQUENCE-START, MAPPING-START, or ALIAS") - } - return false -} - -// Expect ALIAS. -func yaml_emitter_emit_alias(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true -} - -// Expect SCALAR. -func yaml_emitter_emit_scalar(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_select_scalar_style(emitter, event) { - return false - } - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if !yaml_emitter_increase_indent(emitter, true, false) { - return false - } - if !yaml_emitter_process_scalar(emitter) { - return false - } - emitter.indent = emitter.indents[len(emitter.indents)-1] - emitter.indents = emitter.indents[:len(emitter.indents)-1] - emitter.state = emitter.states[len(emitter.states)-1] - emitter.states = emitter.states[:len(emitter.states)-1] - return true -} - -// Expect SEQUENCE-START. -func yaml_emitter_emit_sequence_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if emitter.flow_level > 0 || emitter.canonical || event.sequence_style() == yaml_FLOW_SEQUENCE_STYLE || - yaml_emitter_check_empty_sequence(emitter) { - emitter.state = yaml_EMIT_FLOW_SEQUENCE_FIRST_ITEM_STATE - } else { - emitter.state = yaml_EMIT_BLOCK_SEQUENCE_FIRST_ITEM_STATE - } - return true -} - -// Expect MAPPING-START. -func yaml_emitter_emit_mapping_start(emitter *yaml_emitter_t, event *yaml_event_t) bool { - if !yaml_emitter_process_anchor(emitter) { - return false - } - if !yaml_emitter_process_tag(emitter) { - return false - } - if emitter.flow_level > 0 || emitter.canonical || event.mapping_style() == yaml_FLOW_MAPPING_STYLE || - yaml_emitter_check_empty_mapping(emitter) { - emitter.state = yaml_EMIT_FLOW_MAPPING_FIRST_KEY_STATE - } else { - emitter.state = yaml_EMIT_BLOCK_MAPPING_FIRST_KEY_STATE - } - return true -} - -// Check if the document content is an empty scalar. -func yaml_emitter_check_empty_document(emitter *yaml_emitter_t) bool { - return false // [Go] Huh? -} - -// Check if the next events represent an empty sequence. -func yaml_emitter_check_empty_sequence(emitter *yaml_emitter_t) bool { - if len(emitter.events)-emitter.events_head < 2 { - return false - } - return emitter.events[emitter.events_head].typ == yaml_SEQUENCE_START_EVENT && - emitter.events[emitter.events_head+1].typ == yaml_SEQUENCE_END_EVENT -} - -// Check if the next events represent an empty mapping. -func yaml_emitter_check_empty_mapping(emitter *yaml_emitter_t) bool { - if len(emitter.events)-emitter.events_head < 2 { - return false - } - return emitter.events[emitter.events_head].typ == yaml_MAPPING_START_EVENT && - emitter.events[emitter.events_head+1].typ == yaml_MAPPING_END_EVENT -} - -// Check if the next node can be expressed as a simple key. -func yaml_emitter_check_simple_key(emitter *yaml_emitter_t) bool { - length := 0 - switch emitter.events[emitter.events_head].typ { - case yaml_ALIAS_EVENT: - length += len(emitter.anchor_data.anchor) - case yaml_SCALAR_EVENT: - if emitter.scalar_data.multiline { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) + - len(emitter.scalar_data.value) - case yaml_SEQUENCE_START_EVENT: - if !yaml_emitter_check_empty_sequence(emitter) { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) - case yaml_MAPPING_START_EVENT: - if !yaml_emitter_check_empty_mapping(emitter) { - return false - } - length += len(emitter.anchor_data.anchor) + - len(emitter.tag_data.handle) + - len(emitter.tag_data.suffix) - default: - return false - } - return length <= 128 -} - -// Determine an acceptable scalar style. -func yaml_emitter_select_scalar_style(emitter *yaml_emitter_t, event *yaml_event_t) bool { - - no_tag := len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 - if no_tag && !event.implicit && !event.quoted_implicit { - return yaml_emitter_set_emitter_error(emitter, "neither tag nor implicit flags are specified") - } - - style := event.scalar_style() - if style == yaml_ANY_SCALAR_STYLE { - style = yaml_PLAIN_SCALAR_STYLE - } - if emitter.canonical { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - if emitter.simple_key_context && emitter.scalar_data.multiline { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - - if style == yaml_PLAIN_SCALAR_STYLE { - if emitter.flow_level > 0 && !emitter.scalar_data.flow_plain_allowed || - emitter.flow_level == 0 && !emitter.scalar_data.block_plain_allowed { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - if len(emitter.scalar_data.value) == 0 && (emitter.flow_level > 0 || emitter.simple_key_context) { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - if no_tag && !event.implicit { - style = yaml_SINGLE_QUOTED_SCALAR_STYLE - } - } - if style == yaml_SINGLE_QUOTED_SCALAR_STYLE { - if !emitter.scalar_data.single_quoted_allowed { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - } - if style == yaml_LITERAL_SCALAR_STYLE || style == yaml_FOLDED_SCALAR_STYLE { - if !emitter.scalar_data.block_allowed || emitter.flow_level > 0 || emitter.simple_key_context { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - } - - if no_tag && !event.quoted_implicit && style != yaml_PLAIN_SCALAR_STYLE { - emitter.tag_data.handle = []byte{'!'} - } - emitter.scalar_data.style = style - return true -} - -// Write an achor. -func yaml_emitter_process_anchor(emitter *yaml_emitter_t) bool { - if emitter.anchor_data.anchor == nil { - return true - } - c := []byte{'&'} - if emitter.anchor_data.alias { - c[0] = '*' - } - if !yaml_emitter_write_indicator(emitter, c, true, false, false) { - return false - } - return yaml_emitter_write_anchor(emitter, emitter.anchor_data.anchor) -} - -// Write a tag. -func yaml_emitter_process_tag(emitter *yaml_emitter_t) bool { - if len(emitter.tag_data.handle) == 0 && len(emitter.tag_data.suffix) == 0 { - return true - } - if len(emitter.tag_data.handle) > 0 { - if !yaml_emitter_write_tag_handle(emitter, emitter.tag_data.handle) { - return false - } - if len(emitter.tag_data.suffix) > 0 { - if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { - return false - } - } - } else { - // [Go] Allocate these slices elsewhere. - if !yaml_emitter_write_indicator(emitter, []byte("!<"), true, false, false) { - return false - } - if !yaml_emitter_write_tag_content(emitter, emitter.tag_data.suffix, false) { - return false - } - if !yaml_emitter_write_indicator(emitter, []byte{'>'}, false, false, false) { - return false - } - } - return true -} - -// Write a scalar. -func yaml_emitter_process_scalar(emitter *yaml_emitter_t) bool { - switch emitter.scalar_data.style { - case yaml_PLAIN_SCALAR_STYLE: - return yaml_emitter_write_plain_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_SINGLE_QUOTED_SCALAR_STYLE: - return yaml_emitter_write_single_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_DOUBLE_QUOTED_SCALAR_STYLE: - return yaml_emitter_write_double_quoted_scalar(emitter, emitter.scalar_data.value, !emitter.simple_key_context) - - case yaml_LITERAL_SCALAR_STYLE: - return yaml_emitter_write_literal_scalar(emitter, emitter.scalar_data.value) - - case yaml_FOLDED_SCALAR_STYLE: - return yaml_emitter_write_folded_scalar(emitter, emitter.scalar_data.value) - } - panic("unknown scalar style") -} - -// Check if a %YAML directive is valid. -func yaml_emitter_analyze_version_directive(emitter *yaml_emitter_t, version_directive *yaml_version_directive_t) bool { - if version_directive.major != 1 || version_directive.minor != 1 { - return yaml_emitter_set_emitter_error(emitter, "incompatible %YAML directive") - } - return true -} - -// Check if a %TAG directive is valid. -func yaml_emitter_analyze_tag_directive(emitter *yaml_emitter_t, tag_directive *yaml_tag_directive_t) bool { - handle := tag_directive.handle - prefix := tag_directive.prefix - if len(handle) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag handle must not be empty") - } - if handle[0] != '!' { - return yaml_emitter_set_emitter_error(emitter, "tag handle must start with '!'") - } - if handle[len(handle)-1] != '!' { - return yaml_emitter_set_emitter_error(emitter, "tag handle must end with '!'") - } - for i := 1; i < len(handle)-1; i += width(handle[i]) { - if !is_alpha(handle, i) { - return yaml_emitter_set_emitter_error(emitter, "tag handle must contain alphanumerical characters only") - } - } - if len(prefix) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag prefix must not be empty") - } - return true -} - -// Check if an anchor is valid. -func yaml_emitter_analyze_anchor(emitter *yaml_emitter_t, anchor []byte, alias bool) bool { - if len(anchor) == 0 { - problem := "anchor value must not be empty" - if alias { - problem = "alias value must not be empty" - } - return yaml_emitter_set_emitter_error(emitter, problem) - } - for i := 0; i < len(anchor); i += width(anchor[i]) { - if !is_alpha(anchor, i) { - problem := "anchor value must contain alphanumerical characters only" - if alias { - problem = "alias value must contain alphanumerical characters only" - } - return yaml_emitter_set_emitter_error(emitter, problem) - } - } - emitter.anchor_data.anchor = anchor - emitter.anchor_data.alias = alias - return true -} - -// Check if a tag is valid. -func yaml_emitter_analyze_tag(emitter *yaml_emitter_t, tag []byte) bool { - if len(tag) == 0 { - return yaml_emitter_set_emitter_error(emitter, "tag value must not be empty") - } - for i := 0; i < len(emitter.tag_directives); i++ { - tag_directive := &emitter.tag_directives[i] - if bytes.HasPrefix(tag, tag_directive.prefix) { - emitter.tag_data.handle = tag_directive.handle - emitter.tag_data.suffix = tag[len(tag_directive.prefix):] - return true - } - } - emitter.tag_data.suffix = tag - return true -} - -// Check if a scalar is valid. -func yaml_emitter_analyze_scalar(emitter *yaml_emitter_t, value []byte) bool { - var ( - block_indicators = false - flow_indicators = false - line_breaks = false - special_characters = false - - leading_space = false - leading_break = false - trailing_space = false - trailing_break = false - break_space = false - space_break = false - - preceeded_by_whitespace = false - followed_by_whitespace = false - previous_space = false - previous_break = false - ) - - emitter.scalar_data.value = value - - if len(value) == 0 { - emitter.scalar_data.multiline = false - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = true - emitter.scalar_data.single_quoted_allowed = true - emitter.scalar_data.block_allowed = false - return true - } - - if len(value) >= 3 && ((value[0] == '-' && value[1] == '-' && value[2] == '-') || (value[0] == '.' && value[1] == '.' && value[2] == '.')) { - block_indicators = true - flow_indicators = true - } - - preceeded_by_whitespace = true - for i, w := 0, 0; i < len(value); i += w { - w = width(value[0]) - followed_by_whitespace = i+w >= len(value) || is_blank(value, i+w) - - if i == 0 { - switch value[i] { - case '#', ',', '[', ']', '{', '}', '&', '*', '!', '|', '>', '\'', '"', '%', '@', '`': - flow_indicators = true - block_indicators = true - case '?', ':': - flow_indicators = true - if followed_by_whitespace { - block_indicators = true - } - case '-': - if followed_by_whitespace { - flow_indicators = true - block_indicators = true - } - } - } else { - switch value[i] { - case ',', '?', '[', ']', '{', '}': - flow_indicators = true - case ':': - flow_indicators = true - if followed_by_whitespace { - block_indicators = true - } - case '#': - if preceeded_by_whitespace { - flow_indicators = true - block_indicators = true - } - } - } - - if !is_printable(value, i) || !is_ascii(value, i) && !emitter.unicode { - special_characters = true - } - if is_space(value, i) { - if i == 0 { - leading_space = true - } - if i+width(value[i]) == len(value) { - trailing_space = true - } - if previous_break { - break_space = true - } - previous_space = true - previous_break = false - } else if is_break(value, i) { - line_breaks = true - if i == 0 { - leading_break = true - } - if i+width(value[i]) == len(value) { - trailing_break = true - } - if previous_space { - space_break = true - } - previous_space = false - previous_break = true - } else { - previous_space = false - previous_break = false - } - - // [Go]: Why 'z'? Couldn't be the end of the string as that's the loop condition. - preceeded_by_whitespace = is_blankz(value, i) - } - - emitter.scalar_data.multiline = line_breaks - emitter.scalar_data.flow_plain_allowed = true - emitter.scalar_data.block_plain_allowed = true - emitter.scalar_data.single_quoted_allowed = true - emitter.scalar_data.block_allowed = true - - if leading_space || leading_break || trailing_space || trailing_break { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - } - if trailing_space { - emitter.scalar_data.block_allowed = false - } - if break_space { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - emitter.scalar_data.single_quoted_allowed = false - } - if space_break || special_characters { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - emitter.scalar_data.single_quoted_allowed = false - emitter.scalar_data.block_allowed = false - } - if line_breaks { - emitter.scalar_data.flow_plain_allowed = false - emitter.scalar_data.block_plain_allowed = false - } - if flow_indicators { - emitter.scalar_data.flow_plain_allowed = false - } - if block_indicators { - emitter.scalar_data.block_plain_allowed = false - } - return true -} - -// Check if the event data is valid. -func yaml_emitter_analyze_event(emitter *yaml_emitter_t, event *yaml_event_t) bool { - - emitter.anchor_data.anchor = nil - emitter.tag_data.handle = nil - emitter.tag_data.suffix = nil - emitter.scalar_data.value = nil - - switch event.typ { - case yaml_ALIAS_EVENT: - if !yaml_emitter_analyze_anchor(emitter, event.anchor, true) { - return false - } - - case yaml_SCALAR_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || (!event.implicit && !event.quoted_implicit)) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - if !yaml_emitter_analyze_scalar(emitter, event.value) { - return false - } - - case yaml_SEQUENCE_START_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - - case yaml_MAPPING_START_EVENT: - if len(event.anchor) > 0 { - if !yaml_emitter_analyze_anchor(emitter, event.anchor, false) { - return false - } - } - if len(event.tag) > 0 && (emitter.canonical || !event.implicit) { - if !yaml_emitter_analyze_tag(emitter, event.tag) { - return false - } - } - } - return true -} - -// Write the BOM character. -func yaml_emitter_write_bom(emitter *yaml_emitter_t) bool { - if !flush(emitter) { - return false - } - pos := emitter.buffer_pos - emitter.buffer[pos+0] = '\xEF' - emitter.buffer[pos+1] = '\xBB' - emitter.buffer[pos+2] = '\xBF' - emitter.buffer_pos += 3 - return true -} - -func yaml_emitter_write_indent(emitter *yaml_emitter_t) bool { - indent := emitter.indent - if indent < 0 { - indent = 0 - } - if !emitter.indention || emitter.column > indent || (emitter.column == indent && !emitter.whitespace) { - if !put_break(emitter) { - return false - } - } - for emitter.column < indent { - if !put(emitter, ' ') { - return false - } - } - emitter.whitespace = true - emitter.indention = true - return true -} - -func yaml_emitter_write_indicator(emitter *yaml_emitter_t, indicator []byte, need_whitespace, is_whitespace, is_indention bool) bool { - if need_whitespace && !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - if !write_all(emitter, indicator) { - return false - } - emitter.whitespace = is_whitespace - emitter.indention = (emitter.indention && is_indention) - emitter.open_ended = false - return true -} - -func yaml_emitter_write_anchor(emitter *yaml_emitter_t, value []byte) bool { - if !write_all(emitter, value) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_tag_handle(emitter *yaml_emitter_t, value []byte) bool { - if !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - if !write_all(emitter, value) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_tag_content(emitter *yaml_emitter_t, value []byte, need_whitespace bool) bool { - if need_whitespace && !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - for i := 0; i < len(value); { - var must_write bool - switch value[i] { - case ';', '/', '?', ':', '@', '&', '=', '+', '$', ',', '_', '.', '~', '*', '\'', '(', ')', '[', ']': - must_write = true - default: - must_write = is_alpha(value, i) - } - if must_write { - if !write(emitter, value, &i) { - return false - } - } else { - w := width(value[i]) - for k := 0; k < w; k++ { - octet := value[i] - i++ - if !put(emitter, '%') { - return false - } - - c := octet >> 4 - if c < 10 { - c += '0' - } else { - c += 'A' - 10 - } - if !put(emitter, c) { - return false - } - - c = octet & 0x0f - if c < 10 { - c += '0' - } else { - c += 'A' - 10 - } - if !put(emitter, c) { - return false - } - } - } - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_plain_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - if !emitter.whitespace { - if !put(emitter, ' ') { - return false - } - } - - spaces := false - breaks := false - for i := 0; i < len(value); { - if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && !is_space(value, i+1) { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - spaces = true - } else if is_break(value, i) { - if !breaks && value[i] == '\n' { - if !put_break(emitter) { - return false - } - } - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - spaces = false - breaks = false - } - } - - emitter.whitespace = false - emitter.indention = false - if emitter.root_context { - emitter.open_ended = true - } - - return true -} - -func yaml_emitter_write_single_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - - if !yaml_emitter_write_indicator(emitter, []byte{'\''}, true, false, false) { - return false - } - - spaces := false - breaks := false - for i := 0; i < len(value); { - if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 && !is_space(value, i+1) { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - spaces = true - } else if is_break(value, i) { - if !breaks && value[i] == '\n' { - if !put_break(emitter) { - return false - } - } - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if value[i] == '\'' { - if !put(emitter, '\'') { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - spaces = false - breaks = false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'\''}, false, false, false) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_double_quoted_scalar(emitter *yaml_emitter_t, value []byte, allow_breaks bool) bool { - spaces := false - if !yaml_emitter_write_indicator(emitter, []byte{'"'}, true, false, false) { - return false - } - - for i := 0; i < len(value); { - if !is_printable(value, i) || (!emitter.unicode && !is_ascii(value, i)) || - is_bom(value, i) || is_break(value, i) || - value[i] == '"' || value[i] == '\\' { - - octet := value[i] - - var w int - var v rune - switch { - case octet&0x80 == 0x00: - w, v = 1, rune(octet&0x7F) - case octet&0xE0 == 0xC0: - w, v = 2, rune(octet&0x1F) - case octet&0xF0 == 0xE0: - w, v = 3, rune(octet&0x0F) - case octet&0xF8 == 0xF0: - w, v = 4, rune(octet&0x07) - } - for k := 1; k < w; k++ { - octet = value[i+k] - v = (v << 6) + (rune(octet) & 0x3F) - } - i += w - - if !put(emitter, '\\') { - return false - } - - var ok bool - switch v { - case 0x00: - ok = put(emitter, '0') - case 0x07: - ok = put(emitter, 'a') - case 0x08: - ok = put(emitter, 'b') - case 0x09: - ok = put(emitter, 't') - case 0x0A: - ok = put(emitter, 'n') - case 0x0b: - ok = put(emitter, 'v') - case 0x0c: - ok = put(emitter, 'f') - case 0x0d: - ok = put(emitter, 'r') - case 0x1b: - ok = put(emitter, 'e') - case 0x22: - ok = put(emitter, '"') - case 0x5c: - ok = put(emitter, '\\') - case 0x85: - ok = put(emitter, 'N') - case 0xA0: - ok = put(emitter, '_') - case 0x2028: - ok = put(emitter, 'L') - case 0x2029: - ok = put(emitter, 'P') - default: - if v <= 0xFF { - ok = put(emitter, 'x') - w = 2 - } else if v <= 0xFFFF { - ok = put(emitter, 'u') - w = 4 - } else { - ok = put(emitter, 'U') - w = 8 - } - for k := (w - 1) * 4; ok && k >= 0; k -= 4 { - digit := byte((v >> uint(k)) & 0x0F) - if digit < 10 { - ok = put(emitter, digit+'0') - } else { - ok = put(emitter, digit+'A'-10) - } - } - } - if !ok { - return false - } - spaces = false - } else if is_space(value, i) { - if allow_breaks && !spaces && emitter.column > emitter.best_width && i > 0 && i < len(value)-1 { - if !yaml_emitter_write_indent(emitter) { - return false - } - if is_space(value, i+1) { - if !put(emitter, '\\') { - return false - } - } - i += width(value[i]) - } else if !write(emitter, value, &i) { - return false - } - spaces = true - } else { - if !write(emitter, value, &i) { - return false - } - spaces = false - } - } - if !yaml_emitter_write_indicator(emitter, []byte{'"'}, false, false, false) { - return false - } - emitter.whitespace = false - emitter.indention = false - return true -} - -func yaml_emitter_write_block_scalar_hints(emitter *yaml_emitter_t, value []byte) bool { - if is_space(value, 0) || is_break(value, 0) { - indent_hint := []byte{'0' + byte(emitter.best_indent)} - if !yaml_emitter_write_indicator(emitter, indent_hint, false, false, false) { - return false - } - } - - emitter.open_ended = false - - var chomp_hint [1]byte - if len(value) == 0 { - chomp_hint[0] = '-' - } else { - i := len(value) - 1 - for value[i]&0xC0 == 0x80 { - i-- - } - if !is_break(value, i) { - chomp_hint[0] = '-' - } else if i == 0 { - chomp_hint[0] = '+' - emitter.open_ended = true - } else { - i-- - for value[i]&0xC0 == 0x80 { - i-- - } - if is_break(value, i) { - chomp_hint[0] = '+' - emitter.open_ended = true - } - } - } - if chomp_hint[0] != 0 { - if !yaml_emitter_write_indicator(emitter, chomp_hint[:], false, false, false) { - return false - } - } - return true -} - -func yaml_emitter_write_literal_scalar(emitter *yaml_emitter_t, value []byte) bool { - if !yaml_emitter_write_indicator(emitter, []byte{'|'}, true, false, false) { - return false - } - if !yaml_emitter_write_block_scalar_hints(emitter, value) { - return false - } - if !put_break(emitter) { - return false - } - emitter.indention = true - emitter.whitespace = true - breaks := true - for i := 0; i < len(value); { - if is_break(value, i) { - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - } - if !write(emitter, value, &i) { - return false - } - emitter.indention = false - breaks = false - } - } - - return true -} - -func yaml_emitter_write_folded_scalar(emitter *yaml_emitter_t, value []byte) bool { - if !yaml_emitter_write_indicator(emitter, []byte{'>'}, true, false, false) { - return false - } - if !yaml_emitter_write_block_scalar_hints(emitter, value) { - return false - } - - if !put_break(emitter) { - return false - } - emitter.indention = true - emitter.whitespace = true - - breaks := true - leading_spaces := true - for i := 0; i < len(value); { - if is_break(value, i) { - if !breaks && !leading_spaces && value[i] == '\n' { - k := 0 - for is_break(value, k) { - k += width(value[k]) - } - if !is_blankz(value, k) { - if !put_break(emitter) { - return false - } - } - } - if !write_break(emitter, value, &i) { - return false - } - emitter.indention = true - breaks = true - } else { - if breaks { - if !yaml_emitter_write_indent(emitter) { - return false - } - leading_spaces = is_blank(value, i) - } - if !breaks && is_space(value, i) && !is_space(value, i+1) && emitter.column > emitter.best_width { - if !yaml_emitter_write_indent(emitter) { - return false - } - i += width(value[i]) - } else { - if !write(emitter, value, &i) { - return false - } - } - emitter.indention = false - breaks = false - } - } - return true -} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/encode.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/encode.go deleted file mode 100644 index 0b9048d..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/encode.go +++ /dev/null @@ -1,265 +0,0 @@ -package yaml - -import ( - "reflect" - "regexp" - "sort" - "strconv" - "strings" - "time" -) - -type encoder struct { - emitter yaml_emitter_t - event yaml_event_t - out []byte - flow bool -} - -func newEncoder() (e *encoder) { - e = &encoder{} - e.must(yaml_emitter_initialize(&e.emitter)) - yaml_emitter_set_output_string(&e.emitter, &e.out) - e.must(yaml_stream_start_event_initialize(&e.event, yaml_UTF8_ENCODING)) - e.emit() - e.must(yaml_document_start_event_initialize(&e.event, nil, nil, true)) - e.emit() - return e -} - -func (e *encoder) finish() { - e.must(yaml_document_end_event_initialize(&e.event, true)) - e.emit() - e.emitter.open_ended = false - e.must(yaml_stream_end_event_initialize(&e.event)) - e.emit() -} - -func (e *encoder) destroy() { - yaml_emitter_delete(&e.emitter) -} - -func (e *encoder) emit() { - // This will internally delete the e.event value. - if !yaml_emitter_emit(&e.emitter, &e.event) && e.event.typ != yaml_DOCUMENT_END_EVENT && e.event.typ != yaml_STREAM_END_EVENT { - e.must(false) - } -} - -func (e *encoder) must(ok bool) { - if !ok { - msg := e.emitter.problem - if msg == "" { - msg = "Unknown problem generating YAML content" - } - fail(msg) - } -} - -func (e *encoder) marshal(tag string, in reflect.Value) { - if !in.IsValid() { - e.nilv() - return - } - var value interface{} - if getter, ok := in.Interface().(Getter); ok { - tag, value = getter.GetYAML() - tag = longTag(tag) - if value == nil { - e.nilv() - return - } - in = reflect.ValueOf(value) - } - switch in.Kind() { - case reflect.Interface: - if in.IsNil() { - e.nilv() - } else { - e.marshal(tag, in.Elem()) - } - case reflect.Map: - e.mapv(tag, in) - case reflect.Ptr: - if in.IsNil() { - e.nilv() - } else { - e.marshal(tag, in.Elem()) - } - case reflect.Struct: - e.structv(tag, in) - case reflect.Slice: - e.slicev(tag, in) - case reflect.String: - e.stringv(tag, in) - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - if in.Type() == durationType { - e.stringv(tag, reflect.ValueOf(in.Interface().(time.Duration).String())) - } else { - e.intv(tag, in) - } - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - e.uintv(tag, in) - case reflect.Float32, reflect.Float64: - e.floatv(tag, in) - case reflect.Bool: - e.boolv(tag, in) - default: - panic("Can't marshal type: " + in.Type().String()) - } -} - -func (e *encoder) mapv(tag string, in reflect.Value) { - e.mappingv(tag, func() { - keys := keyList(in.MapKeys()) - sort.Sort(keys) - for _, k := range keys { - e.marshal("", k) - e.marshal("", in.MapIndex(k)) - } - }) -} - -func (e *encoder) structv(tag string, in reflect.Value) { - sinfo, err := getStructInfo(in.Type()) - if err != nil { - panic(err) - } - e.mappingv(tag, func() { - for _, info := range sinfo.FieldsList { - var value reflect.Value - if info.Inline == nil { - value = in.Field(info.Num) - } else { - value = in.FieldByIndex(info.Inline) - } - if info.OmitEmpty && isZero(value) { - continue - } - e.marshal("", reflect.ValueOf(info.Key)) - e.flow = info.Flow - e.marshal("", value) - } - }) -} - -func (e *encoder) mappingv(tag string, f func()) { - implicit := tag == "" - style := yaml_BLOCK_MAPPING_STYLE - if e.flow { - e.flow = false - style = yaml_FLOW_MAPPING_STYLE - } - e.must(yaml_mapping_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)) - e.emit() - f() - e.must(yaml_mapping_end_event_initialize(&e.event)) - e.emit() -} - -func (e *encoder) slicev(tag string, in reflect.Value) { - implicit := tag == "" - style := yaml_BLOCK_SEQUENCE_STYLE - if e.flow { - e.flow = false - style = yaml_FLOW_SEQUENCE_STYLE - } - e.must(yaml_sequence_start_event_initialize(&e.event, nil, []byte(tag), implicit, style)) - e.emit() - n := in.Len() - for i := 0; i < n; i++ { - e.marshal("", in.Index(i)) - } - e.must(yaml_sequence_end_event_initialize(&e.event)) - e.emit() -} - -// isBase60 returns whether s is in base 60 notation as defined in YAML 1.1. -// -// The base 60 float notation in YAML 1.1 is a terrible idea and is unsupported -// in YAML 1.2 and by this package, but these should be marshalled quoted for -// the time being for compatibility with other parsers. -func isBase60Float(s string) (result bool) { - // Fast path. - if s == "" { - return false - } - c := s[0] - if !(c == '+' || c == '-' || c >= '0' && c <= '9') || strings.IndexByte(s, ':') < 0 { - return false - } - // Do the full match. - return base60float.MatchString(s) -} - -// From http://yaml.org/type/float.html, except the regular expression there -// is bogus. In practice parsers do not enforce the "\.[0-9_]*" suffix. -var base60float = regexp.MustCompile(`^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+(?:\.[0-9_]*)?$`) - -func (e *encoder) stringv(tag string, in reflect.Value) { - var style yaml_scalar_style_t - s := in.String() - rtag, rs := resolve("", s) - if rtag == yaml_BINARY_TAG { - if tag == "" || tag == yaml_STR_TAG { - tag = rtag - s = rs.(string) - } else if tag == yaml_BINARY_TAG { - fail("explicitly tagged !!binary data must be base64-encoded") - } else { - fail("cannot marshal invalid UTF-8 data as " + shortTag(tag)) - } - } - if tag == "" && (rtag != yaml_STR_TAG || isBase60Float(s)) { - style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } else if strings.Contains(s, "\n") { - style = yaml_LITERAL_SCALAR_STYLE - } else { - style = yaml_PLAIN_SCALAR_STYLE - } - e.emitScalar(s, "", tag, style) -} - -func (e *encoder) boolv(tag string, in reflect.Value) { - var s string - if in.Bool() { - s = "true" - } else { - s = "false" - } - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) intv(tag string, in reflect.Value) { - s := strconv.FormatInt(in.Int(), 10) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) uintv(tag string, in reflect.Value) { - s := strconv.FormatUint(in.Uint(), 10) - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) floatv(tag string, in reflect.Value) { - // FIXME: Handle 64 bits here. - s := strconv.FormatFloat(float64(in.Float()), 'g', -1, 32) - switch s { - case "+Inf": - s = ".inf" - case "-Inf": - s = "-.inf" - case "NaN": - s = ".nan" - } - e.emitScalar(s, "", tag, yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) nilv() { - e.emitScalar("null", "", "", yaml_PLAIN_SCALAR_STYLE) -} - -func (e *encoder) emitScalar(value, anchor, tag string, style yaml_scalar_style_t) { - implicit := tag == "" - e.must(yaml_scalar_event_initialize(&e.event, []byte(anchor), []byte(tag), []byte(value), implicit, implicit, style)) - e.emit() -} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/encode_test.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/encode_test.go deleted file mode 100644 index 37d13a0..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/encode_test.go +++ /dev/null @@ -1,433 +0,0 @@ -package yaml_test - -import ( - "fmt" - "math" - "strconv" - "strings" - "time" - - . "gopkg.in/check.v1" - "github.com/remind101/deploy/Godeps/_workspace/src/gopkg.in/yaml.v1" -) - -var marshalIntTest = 123 - -var marshalTests = []struct { - value interface{} - data string -}{ - { - nil, - "null\n", - }, { - &struct{}{}, - "{}\n", - }, { - map[string]string{"v": "hi"}, - "v: hi\n", - }, { - map[string]interface{}{"v": "hi"}, - "v: hi\n", - }, { - map[string]string{"v": "true"}, - "v: \"true\"\n", - }, { - map[string]string{"v": "false"}, - "v: \"false\"\n", - }, { - map[string]interface{}{"v": true}, - "v: true\n", - }, { - map[string]interface{}{"v": false}, - "v: false\n", - }, { - map[string]interface{}{"v": 10}, - "v: 10\n", - }, { - map[string]interface{}{"v": -10}, - "v: -10\n", - }, { - map[string]uint{"v": 42}, - "v: 42\n", - }, { - map[string]interface{}{"v": int64(4294967296)}, - "v: 4294967296\n", - }, { - map[string]int64{"v": int64(4294967296)}, - "v: 4294967296\n", - }, { - map[string]uint64{"v": 4294967296}, - "v: 4294967296\n", - }, { - map[string]interface{}{"v": "10"}, - "v: \"10\"\n", - }, { - map[string]interface{}{"v": 0.1}, - "v: 0.1\n", - }, { - map[string]interface{}{"v": float64(0.1)}, - "v: 0.1\n", - }, { - map[string]interface{}{"v": -0.1}, - "v: -0.1\n", - }, { - map[string]interface{}{"v": math.Inf(+1)}, - "v: .inf\n", - }, { - map[string]interface{}{"v": math.Inf(-1)}, - "v: -.inf\n", - }, { - map[string]interface{}{"v": math.NaN()}, - "v: .nan\n", - }, { - map[string]interface{}{"v": nil}, - "v: null\n", - }, { - map[string]interface{}{"v": ""}, - "v: \"\"\n", - }, { - map[string][]string{"v": []string{"A", "B"}}, - "v:\n- A\n- B\n", - }, { - map[string][]string{"v": []string{"A", "B\nC"}}, - "v:\n- A\n- |-\n B\n C\n", - }, { - map[string][]interface{}{"v": []interface{}{"A", 1, map[string][]int{"B": []int{2, 3}}}}, - "v:\n- A\n- 1\n- B:\n - 2\n - 3\n", - }, { - map[string]interface{}{"a": map[interface{}]interface{}{"b": "c"}}, - "a:\n b: c\n", - }, { - map[string]interface{}{"a": "-"}, - "a: '-'\n", - }, - - // Simple values. - { - &marshalIntTest, - "123\n", - }, - - // Structures - { - &struct{ Hello string }{"world"}, - "hello: world\n", - }, { - &struct { - A struct { - B string - } - }{struct{ B string }{"c"}}, - "a:\n b: c\n", - }, { - &struct { - A *struct { - B string - } - }{&struct{ B string }{"c"}}, - "a:\n b: c\n", - }, { - &struct { - A *struct { - B string - } - }{}, - "a: null\n", - }, { - &struct{ A int }{1}, - "a: 1\n", - }, { - &struct{ A []int }{[]int{1, 2}}, - "a:\n- 1\n- 2\n", - }, { - &struct { - B int "a" - }{1}, - "a: 1\n", - }, { - &struct{ A bool }{true}, - "a: true\n", - }, - - // Conditional flag - { - &struct { - A int "a,omitempty" - B int "b,omitempty" - }{1, 0}, - "a: 1\n", - }, { - &struct { - A int "a,omitempty" - B int "b,omitempty" - }{0, 0}, - "{}\n", - }, { - &struct { - A *struct{ X int } "a,omitempty" - B int "b,omitempty" - }{nil, 0}, - "{}\n", - }, - - // Flow flag - { - &struct { - A []int "a,flow" - }{[]int{1, 2}}, - "a: [1, 2]\n", - }, { - &struct { - A map[string]string "a,flow" - }{map[string]string{"b": "c", "d": "e"}}, - "a: {b: c, d: e}\n", - }, { - &struct { - A struct { - B, D string - } "a,flow" - }{struct{ B, D string }{"c", "e"}}, - "a: {b: c, d: e}\n", - }, - - // Unexported field - { - &struct { - u int - A int - }{0, 1}, - "a: 1\n", - }, - - // Ignored field - { - &struct { - A int - B int "-" - }{1, 2}, - "a: 1\n", - }, - - // Struct inlining - { - &struct { - A int - C inlineB `yaml:",inline"` - }{1, inlineB{2, inlineC{3}}}, - "a: 1\nb: 2\nc: 3\n", - }, - - // Duration - { - map[string]time.Duration{"a": 3 * time.Second}, - "a: 3s\n", - }, - - // Issue #24: bug in map merging logic. - { - map[string]string{"a": ""}, - "a: \n", - }, - - // Issue #34: marshal unsupported base 60 floats quoted for compatibility - // with old YAML 1.1 parsers. - { - map[string]string{"a": "1:1"}, - "a: \"1:1\"\n", - }, - - // Binary data. - { - map[string]string{"a": "\x00"}, - "a: \"\\0\"\n", - }, { - map[string]string{"a": "\x80\x81\x82"}, - "a: !!binary gIGC\n", - }, { - map[string]string{"a": strings.Repeat("\x90", 54)}, - "a: !!binary |\n " + strings.Repeat("kJCQ", 17) + "kJ\n CQ\n", - }, { - map[string]interface{}{"a": typeWithGetter{"!!str", "\x80\x81\x82"}}, - "a: !!binary gIGC\n", - }, - - // Escaping of tags. - { - map[string]interface{}{"a": typeWithGetter{"foo!bar", 1}}, - "a: ! 1\n", - }, -} - -func (s *S) TestMarshal(c *C) { - for _, item := range marshalTests { - data, err := yaml.Marshal(item.value) - c.Assert(err, IsNil) - c.Assert(string(data), Equals, item.data) - } -} - -var marshalErrorTests = []struct { - value interface{} - error string - panic string -}{{ - value: &struct { - B int - inlineB ",inline" - }{1, inlineB{2, inlineC{3}}}, - panic: `Duplicated key 'b' in struct struct \{ B int; .*`, -}, { - value: typeWithGetter{"!!binary", "\x80"}, - error: "YAML error: explicitly tagged !!binary data must be base64-encoded", -}, { - value: typeWithGetter{"!!float", "\x80"}, - error: `YAML error: cannot marshal invalid UTF-8 data as !!float`, -}} - -func (s *S) TestMarshalErrors(c *C) { - for _, item := range marshalErrorTests { - if item.panic != "" { - c.Assert(func() { yaml.Marshal(item.value) }, PanicMatches, item.panic) - } else { - _, err := yaml.Marshal(item.value) - c.Assert(err, ErrorMatches, item.error) - } - } -} - -var marshalTaggedIfaceTest interface{} = &struct{ A string }{"B"} - -var getterTests = []struct { - data, tag string - value interface{} -}{ - {"_:\n hi: there\n", "", map[interface{}]interface{}{"hi": "there"}}, - {"_:\n- 1\n- A\n", "", []interface{}{1, "A"}}, - {"_: 10\n", "", 10}, - {"_: null\n", "", nil}, - {"_: !foo BAR!\n", "!foo", "BAR!"}, - {"_: !foo 1\n", "!foo", "1"}, - {"_: !foo '\"1\"'\n", "!foo", "\"1\""}, - {"_: !foo 1.1\n", "!foo", 1.1}, - {"_: !foo 1\n", "!foo", 1}, - {"_: !foo 1\n", "!foo", uint(1)}, - {"_: !foo true\n", "!foo", true}, - {"_: !foo\n- A\n- B\n", "!foo", []string{"A", "B"}}, - {"_: !foo\n A: B\n", "!foo", map[string]string{"A": "B"}}, - {"_: !foo\n a: B\n", "!foo", &marshalTaggedIfaceTest}, -} - -func (s *S) TestMarshalTypeCache(c *C) { - var data []byte - var err error - func() { - type T struct{ A int } - data, err = yaml.Marshal(&T{}) - c.Assert(err, IsNil) - }() - func() { - type T struct{ B int } - data, err = yaml.Marshal(&T{}) - c.Assert(err, IsNil) - }() - c.Assert(string(data), Equals, "b: 0\n") -} - -type typeWithGetter struct { - tag string - value interface{} -} - -func (o typeWithGetter) GetYAML() (tag string, value interface{}) { - return o.tag, o.value -} - -type typeWithGetterField struct { - Field typeWithGetter "_" -} - -func (s *S) TestMashalWithGetter(c *C) { - for _, item := range getterTests { - obj := &typeWithGetterField{} - obj.Field.tag = item.tag - obj.Field.value = item.value - data, err := yaml.Marshal(obj) - c.Assert(err, IsNil) - c.Assert(string(data), Equals, string(item.data)) - } -} - -func (s *S) TestUnmarshalWholeDocumentWithGetter(c *C) { - obj := &typeWithGetter{} - obj.tag = "" - obj.value = map[string]string{"hello": "world!"} - data, err := yaml.Marshal(obj) - c.Assert(err, IsNil) - c.Assert(string(data), Equals, "hello: world!\n") -} - -func (s *S) TestSortedOutput(c *C) { - order := []interface{}{ - false, - true, - 1, - uint(1), - 1.0, - 1.1, - 1.2, - 2, - uint(2), - 2.0, - 2.1, - "", - ".1", - ".2", - ".a", - "1", - "2", - "a!10", - "a/2", - "a/10", - "a~10", - "ab/1", - "b/1", - "b/01", - "b/2", - "b/02", - "b/3", - "b/03", - "b1", - "b01", - "b3", - "c2.10", - "c10.2", - "d1", - "d12", - "d12a", - } - m := make(map[interface{}]int) - for _, k := range order { - m[k] = 1 - } - data, err := yaml.Marshal(m) - c.Assert(err, IsNil) - out := "\n" + string(data) - last := 0 - for i, k := range order { - repr := fmt.Sprint(k) - if s, ok := k.(string); ok { - if _, err = strconv.ParseFloat(repr, 32); s == "" || err == nil { - repr = `"` + repr + `"` - } - } - index := strings.Index(out, "\n"+repr+":") - if index == -1 { - c.Fatalf("%#v is not in the output: %#v", k, out) - } - if index < last { - c.Fatalf("%#v was generated before %#v: %q", k, order[i-1], out) - } - last = index - } -} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/parserc.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/parserc.go deleted file mode 100644 index 0a7037a..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/parserc.go +++ /dev/null @@ -1,1096 +0,0 @@ -package yaml - -import ( - "bytes" -) - -// The parser implements the following grammar: -// -// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -// implicit_document ::= block_node DOCUMENT-END* -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// block_node_or_indentless_sequence ::= -// ALIAS -// | properties (block_content | indentless_block_sequence)? -// | block_content -// | indentless_block_sequence -// block_node ::= ALIAS -// | properties block_content? -// | block_content -// flow_node ::= ALIAS -// | properties flow_content? -// | flow_content -// properties ::= TAG ANCHOR? | ANCHOR TAG? -// block_content ::= block_collection | flow_collection | SCALAR -// flow_content ::= flow_collection | SCALAR -// block_collection ::= block_sequence | block_mapping -// flow_collection ::= flow_sequence | flow_mapping -// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -// block_mapping ::= BLOCK-MAPPING_START -// ((KEY block_node_or_indentless_sequence?)? -// (VALUE block_node_or_indentless_sequence?)?)* -// BLOCK-END -// flow_sequence ::= FLOW-SEQUENCE-START -// (flow_sequence_entry FLOW-ENTRY)* -// flow_sequence_entry? -// FLOW-SEQUENCE-END -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// flow_mapping ::= FLOW-MAPPING-START -// (flow_mapping_entry FLOW-ENTRY)* -// flow_mapping_entry? -// FLOW-MAPPING-END -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? - -// Peek the next token in the token queue. -func peek_token(parser *yaml_parser_t) *yaml_token_t { - if parser.token_available || yaml_parser_fetch_more_tokens(parser) { - return &parser.tokens[parser.tokens_head] - } - return nil -} - -// Remove the next token from the queue (must be called after peek_token). -func skip_token(parser *yaml_parser_t) { - parser.token_available = false - parser.tokens_parsed++ - parser.stream_end_produced = parser.tokens[parser.tokens_head].typ == yaml_STREAM_END_TOKEN - parser.tokens_head++ -} - -// Get the next event. -func yaml_parser_parse(parser *yaml_parser_t, event *yaml_event_t) bool { - // Erase the event object. - *event = yaml_event_t{} - - // No events after the end of the stream or error. - if parser.stream_end_produced || parser.error != yaml_NO_ERROR || parser.state == yaml_PARSE_END_STATE { - return true - } - - // Generate the next event. - return yaml_parser_state_machine(parser, event) -} - -// Set parser error. -func yaml_parser_set_parser_error(parser *yaml_parser_t, problem string, problem_mark yaml_mark_t) bool { - parser.error = yaml_PARSER_ERROR - parser.problem = problem - parser.problem_mark = problem_mark - return false -} - -func yaml_parser_set_parser_error_context(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string, problem_mark yaml_mark_t) bool { - parser.error = yaml_PARSER_ERROR - parser.context = context - parser.context_mark = context_mark - parser.problem = problem - parser.problem_mark = problem_mark - return false -} - -// State dispatcher. -func yaml_parser_state_machine(parser *yaml_parser_t, event *yaml_event_t) bool { - //trace("yaml_parser_state_machine", "state:", parser.state.String()) - - switch parser.state { - case yaml_PARSE_STREAM_START_STATE: - return yaml_parser_parse_stream_start(parser, event) - - case yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE: - return yaml_parser_parse_document_start(parser, event, true) - - case yaml_PARSE_DOCUMENT_START_STATE: - return yaml_parser_parse_document_start(parser, event, false) - - case yaml_PARSE_DOCUMENT_CONTENT_STATE: - return yaml_parser_parse_document_content(parser, event) - - case yaml_PARSE_DOCUMENT_END_STATE: - return yaml_parser_parse_document_end(parser, event) - - case yaml_PARSE_BLOCK_NODE_STATE: - return yaml_parser_parse_node(parser, event, true, false) - - case yaml_PARSE_BLOCK_NODE_OR_INDENTLESS_SEQUENCE_STATE: - return yaml_parser_parse_node(parser, event, true, true) - - case yaml_PARSE_FLOW_NODE_STATE: - return yaml_parser_parse_node(parser, event, false, false) - - case yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE: - return yaml_parser_parse_block_sequence_entry(parser, event, true) - - case yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_block_sequence_entry(parser, event, false) - - case yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_indentless_sequence_entry(parser, event) - - case yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE: - return yaml_parser_parse_block_mapping_key(parser, event, true) - - case yaml_PARSE_BLOCK_MAPPING_KEY_STATE: - return yaml_parser_parse_block_mapping_key(parser, event, false) - - case yaml_PARSE_BLOCK_MAPPING_VALUE_STATE: - return yaml_parser_parse_block_mapping_value(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE: - return yaml_parser_parse_flow_sequence_entry(parser, event, true) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE: - return yaml_parser_parse_flow_sequence_entry(parser, event, false) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_key(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_value(parser, event) - - case yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE: - return yaml_parser_parse_flow_sequence_entry_mapping_end(parser, event) - - case yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE: - return yaml_parser_parse_flow_mapping_key(parser, event, true) - - case yaml_PARSE_FLOW_MAPPING_KEY_STATE: - return yaml_parser_parse_flow_mapping_key(parser, event, false) - - case yaml_PARSE_FLOW_MAPPING_VALUE_STATE: - return yaml_parser_parse_flow_mapping_value(parser, event, false) - - case yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE: - return yaml_parser_parse_flow_mapping_value(parser, event, true) - - default: - panic("invalid parser state") - } - return false -} - -// Parse the production: -// stream ::= STREAM-START implicit_document? explicit_document* STREAM-END -// ************ -func yaml_parser_parse_stream_start(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_STREAM_START_TOKEN { - return yaml_parser_set_parser_error(parser, "did not find expected ", token.start_mark) - } - parser.state = yaml_PARSE_IMPLICIT_DOCUMENT_START_STATE - *event = yaml_event_t{ - typ: yaml_STREAM_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - encoding: token.encoding, - } - skip_token(parser) - return true -} - -// Parse the productions: -// implicit_document ::= block_node DOCUMENT-END* -// * -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// ************************* -func yaml_parser_parse_document_start(parser *yaml_parser_t, event *yaml_event_t, implicit bool) bool { - - token := peek_token(parser) - if token == nil { - return false - } - - // Parse extra document end indicators. - if !implicit { - for token.typ == yaml_DOCUMENT_END_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } - - if implicit && token.typ != yaml_VERSION_DIRECTIVE_TOKEN && - token.typ != yaml_TAG_DIRECTIVE_TOKEN && - token.typ != yaml_DOCUMENT_START_TOKEN && - token.typ != yaml_STREAM_END_TOKEN { - // Parse an implicit document. - if !yaml_parser_process_directives(parser, nil, nil) { - return false - } - parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) - parser.state = yaml_PARSE_BLOCK_NODE_STATE - - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - } else if token.typ != yaml_STREAM_END_TOKEN { - // Parse an explicit document. - var version_directive *yaml_version_directive_t - var tag_directives []yaml_tag_directive_t - start_mark := token.start_mark - if !yaml_parser_process_directives(parser, &version_directive, &tag_directives) { - return false - } - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_DOCUMENT_START_TOKEN { - yaml_parser_set_parser_error(parser, - "did not find expected ", token.start_mark) - return false - } - parser.states = append(parser.states, yaml_PARSE_DOCUMENT_END_STATE) - parser.state = yaml_PARSE_DOCUMENT_CONTENT_STATE - end_mark := token.end_mark - - *event = yaml_event_t{ - typ: yaml_DOCUMENT_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - version_directive: version_directive, - tag_directives: tag_directives, - implicit: false, - } - skip_token(parser) - - } else { - // Parse the stream end. - parser.state = yaml_PARSE_END_STATE - *event = yaml_event_t{ - typ: yaml_STREAM_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - } - - return true -} - -// Parse the productions: -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// *********** -// -func yaml_parser_parse_document_content(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VERSION_DIRECTIVE_TOKEN || - token.typ == yaml_TAG_DIRECTIVE_TOKEN || - token.typ == yaml_DOCUMENT_START_TOKEN || - token.typ == yaml_DOCUMENT_END_TOKEN || - token.typ == yaml_STREAM_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - return yaml_parser_process_empty_scalar(parser, event, - token.start_mark) - } - return yaml_parser_parse_node(parser, event, true, false) -} - -// Parse the productions: -// implicit_document ::= block_node DOCUMENT-END* -// ************* -// explicit_document ::= DIRECTIVE* DOCUMENT-START block_node? DOCUMENT-END* -// -func yaml_parser_parse_document_end(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - - start_mark := token.start_mark - end_mark := token.start_mark - - implicit := true - if token.typ == yaml_DOCUMENT_END_TOKEN { - end_mark = token.end_mark - skip_token(parser) - implicit = false - } - - parser.tag_directives = parser.tag_directives[:0] - - parser.state = yaml_PARSE_DOCUMENT_START_STATE - *event = yaml_event_t{ - typ: yaml_DOCUMENT_END_EVENT, - start_mark: start_mark, - end_mark: end_mark, - implicit: implicit, - } - return true -} - -// Parse the productions: -// block_node_or_indentless_sequence ::= -// ALIAS -// ***** -// | properties (block_content | indentless_block_sequence)? -// ********** * -// | block_content | indentless_block_sequence -// * -// block_node ::= ALIAS -// ***** -// | properties block_content? -// ********** * -// | block_content -// * -// flow_node ::= ALIAS -// ***** -// | properties flow_content? -// ********** * -// | flow_content -// * -// properties ::= TAG ANCHOR? | ANCHOR TAG? -// ************************* -// block_content ::= block_collection | flow_collection | SCALAR -// ****** -// flow_content ::= flow_collection | SCALAR -// ****** -func yaml_parser_parse_node(parser *yaml_parser_t, event *yaml_event_t, block, indentless_sequence bool) bool { - //defer trace("yaml_parser_parse_node", "block:", block, "indentless_sequence:", indentless_sequence)() - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_ALIAS_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - *event = yaml_event_t{ - typ: yaml_ALIAS_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - anchor: token.value, - } - skip_token(parser) - return true - } - - start_mark := token.start_mark - end_mark := token.start_mark - - var tag_token bool - var tag_handle, tag_suffix, anchor []byte - var tag_mark yaml_mark_t - if token.typ == yaml_ANCHOR_TOKEN { - anchor = token.value - start_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_TAG_TOKEN { - tag_token = true - tag_handle = token.value - tag_suffix = token.suffix - tag_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } else if token.typ == yaml_TAG_TOKEN { - tag_token = true - tag_handle = token.value - tag_suffix = token.suffix - start_mark = token.start_mark - tag_mark = token.start_mark - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_ANCHOR_TOKEN { - anchor = token.value - end_mark = token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - } - - var tag []byte - if tag_token { - if len(tag_handle) == 0 { - tag = tag_suffix - tag_suffix = nil - } else { - for i := range parser.tag_directives { - if bytes.Equal(parser.tag_directives[i].handle, tag_handle) { - tag = append([]byte(nil), parser.tag_directives[i].prefix...) - tag = append(tag, tag_suffix...) - break - } - } - if len(tag) == 0 { - yaml_parser_set_parser_error_context(parser, - "while parsing a node", start_mark, - "found undefined tag handle", tag_mark) - return false - } - } - } - - implicit := len(tag) == 0 - if indentless_sequence && token.typ == yaml_BLOCK_ENTRY_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), - } - return true - } - if token.typ == yaml_SCALAR_TOKEN { - var plain_implicit, quoted_implicit bool - end_mark = token.end_mark - if (len(tag) == 0 && token.style == yaml_PLAIN_SCALAR_STYLE) || (len(tag) == 1 && tag[0] == '!') { - plain_implicit = true - } else if len(tag) == 0 { - quoted_implicit = true - } - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - value: token.value, - implicit: plain_implicit, - quoted_implicit: quoted_implicit, - style: yaml_style_t(token.style), - } - skip_token(parser) - return true - } - if token.typ == yaml_FLOW_SEQUENCE_START_TOKEN { - // [Go] Some of the events below can be merged as they differ only on style. - end_mark = token.end_mark - parser.state = yaml_PARSE_FLOW_SEQUENCE_FIRST_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_FLOW_SEQUENCE_STYLE), - } - return true - } - if token.typ == yaml_FLOW_MAPPING_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_FLOW_MAPPING_FIRST_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), - } - return true - } - if block && token.typ == yaml_BLOCK_SEQUENCE_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_BLOCK_SEQUENCE_FIRST_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_SEQUENCE_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_SEQUENCE_STYLE), - } - return true - } - if block && token.typ == yaml_BLOCK_MAPPING_START_TOKEN { - end_mark = token.end_mark - parser.state = yaml_PARSE_BLOCK_MAPPING_FIRST_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - style: yaml_style_t(yaml_BLOCK_MAPPING_STYLE), - } - return true - } - if len(anchor) > 0 || len(tag) > 0 { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: start_mark, - end_mark: end_mark, - anchor: anchor, - tag: tag, - implicit: implicit, - quoted_implicit: false, - style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), - } - return true - } - - context := "while parsing a flow node" - if block { - context = "while parsing a block node" - } - yaml_parser_set_parser_error_context(parser, context, start_mark, - "did not find expected node content", token.start_mark) - return false -} - -// Parse the productions: -// block_sequence ::= BLOCK-SEQUENCE-START (BLOCK-ENTRY block_node?)* BLOCK-END -// ******************** *********** * ********* -// -func yaml_parser_parse_block_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_BLOCK_ENTRY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_BLOCK_ENTRY_TOKEN && token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, true, false) - } else { - parser.state = yaml_PARSE_BLOCK_SEQUENCE_ENTRY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - } - if token.typ == yaml_BLOCK_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - skip_token(parser) - return true - } - - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a block collection", context_mark, - "did not find expected '-' indicator", token.start_mark) -} - -// Parse the productions: -// indentless_sequence ::= (BLOCK-ENTRY block_node?)+ -// *********** * -func yaml_parser_parse_indentless_sequence_entry(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_BLOCK_ENTRY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_BLOCK_ENTRY_TOKEN && - token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, true, false) - } - parser.state = yaml_PARSE_INDENTLESS_SEQUENCE_ENTRY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.start_mark, // [Go] Shouldn't this be token.end_mark? - } - return true -} - -// Parse the productions: -// block_mapping ::= BLOCK-MAPPING_START -// ******************* -// ((KEY block_node_or_indentless_sequence?)? -// *** * -// (VALUE block_node_or_indentless_sequence?)?)* -// -// BLOCK-END -// ********* -// -func yaml_parser_parse_block_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ == yaml_KEY_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, true, true) - } else { - parser.state = yaml_PARSE_BLOCK_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - } else if token.typ == yaml_BLOCK_END_TOKEN { - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - return true - } - - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a block mapping", context_mark, - "did not find expected key", token.start_mark) -} - -// Parse the productions: -// block_mapping ::= BLOCK-MAPPING_START -// -// ((KEY block_node_or_indentless_sequence?)? -// -// (VALUE block_node_or_indentless_sequence?)?)* -// ***** * -// BLOCK-END -// -// -func yaml_parser_parse_block_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VALUE_TOKEN { - mark := token.end_mark - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_KEY_TOKEN && - token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_BLOCK_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_BLOCK_MAPPING_KEY_STATE) - return yaml_parser_parse_node(parser, event, true, true) - } - parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) - } - parser.state = yaml_PARSE_BLOCK_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Parse the productions: -// flow_sequence ::= FLOW-SEQUENCE-START -// ******************* -// (flow_sequence_entry FLOW-ENTRY)* -// * ********** -// flow_sequence_entry? -// * -// FLOW-SEQUENCE-END -// ***************** -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * -// -func yaml_parser_parse_flow_sequence_entry(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - if !first { - if token.typ == yaml_FLOW_ENTRY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } else { - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a flow sequence", context_mark, - "did not find expected ',' or ']'", token.start_mark) - } - } - - if token.typ == yaml_KEY_TOKEN { - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_KEY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_START_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - implicit: true, - style: yaml_style_t(yaml_FLOW_MAPPING_STYLE), - } - skip_token(parser) - return true - } else if token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - - *event = yaml_event_t{ - typ: yaml_SEQUENCE_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - - skip_token(parser) - return true -} - -// -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// *** * -// -func yaml_parser_parse_flow_sequence_entry_mapping_key(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_FLOW_ENTRY_TOKEN && - token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - mark := token.end_mark - skip_token(parser) - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, mark) -} - -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// ***** * -// -func yaml_parser_parse_flow_sequence_entry_mapping_value(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - if token.typ == yaml_VALUE_TOKEN { - skip_token(parser) - token := peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_SEQUENCE_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_MAPPING_END_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Parse the productions: -// flow_sequence_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * -// -func yaml_parser_parse_flow_sequence_entry_mapping_end(parser *yaml_parser_t, event *yaml_event_t) bool { - token := peek_token(parser) - if token == nil { - return false - } - parser.state = yaml_PARSE_FLOW_SEQUENCE_ENTRY_STATE - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.start_mark, // [Go] Shouldn't this be end_mark? - } - return true -} - -// Parse the productions: -// flow_mapping ::= FLOW-MAPPING-START -// ****************** -// (flow_mapping_entry FLOW-ENTRY)* -// * ********** -// flow_mapping_entry? -// ****************** -// FLOW-MAPPING-END -// **************** -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * *** * -// -func yaml_parser_parse_flow_mapping_key(parser *yaml_parser_t, event *yaml_event_t, first bool) bool { - if first { - token := peek_token(parser) - parser.marks = append(parser.marks, token.start_mark) - skip_token(parser) - } - - token := peek_token(parser) - if token == nil { - return false - } - - if token.typ != yaml_FLOW_MAPPING_END_TOKEN { - if !first { - if token.typ == yaml_FLOW_ENTRY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } else { - context_mark := parser.marks[len(parser.marks)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - return yaml_parser_set_parser_error_context(parser, - "while parsing a flow mapping", context_mark, - "did not find expected ',' or '}'", token.start_mark) - } - } - - if token.typ == yaml_KEY_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_VALUE_TOKEN && - token.typ != yaml_FLOW_ENTRY_TOKEN && - token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } else { - parser.state = yaml_PARSE_FLOW_MAPPING_VALUE_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) - } - } else if token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_EMPTY_VALUE_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - - parser.state = parser.states[len(parser.states)-1] - parser.states = parser.states[:len(parser.states)-1] - parser.marks = parser.marks[:len(parser.marks)-1] - *event = yaml_event_t{ - typ: yaml_MAPPING_END_EVENT, - start_mark: token.start_mark, - end_mark: token.end_mark, - } - skip_token(parser) - return true -} - -// Parse the productions: -// flow_mapping_entry ::= flow_node | KEY flow_node? (VALUE flow_node?)? -// * ***** * -// -func yaml_parser_parse_flow_mapping_value(parser *yaml_parser_t, event *yaml_event_t, empty bool) bool { - token := peek_token(parser) - if token == nil { - return false - } - if empty { - parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) - } - if token.typ == yaml_VALUE_TOKEN { - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - if token.typ != yaml_FLOW_ENTRY_TOKEN && token.typ != yaml_FLOW_MAPPING_END_TOKEN { - parser.states = append(parser.states, yaml_PARSE_FLOW_MAPPING_KEY_STATE) - return yaml_parser_parse_node(parser, event, false, false) - } - } - parser.state = yaml_PARSE_FLOW_MAPPING_KEY_STATE - return yaml_parser_process_empty_scalar(parser, event, token.start_mark) -} - -// Generate an empty scalar event. -func yaml_parser_process_empty_scalar(parser *yaml_parser_t, event *yaml_event_t, mark yaml_mark_t) bool { - *event = yaml_event_t{ - typ: yaml_SCALAR_EVENT, - start_mark: mark, - end_mark: mark, - value: nil, // Empty - implicit: true, - style: yaml_style_t(yaml_PLAIN_SCALAR_STYLE), - } - return true -} - -var default_tag_directives = []yaml_tag_directive_t{ - {[]byte("!"), []byte("!")}, - {[]byte("!!"), []byte("tag:yaml.org,2002:")}, -} - -// Parse directives. -func yaml_parser_process_directives(parser *yaml_parser_t, - version_directive_ref **yaml_version_directive_t, - tag_directives_ref *[]yaml_tag_directive_t) bool { - - var version_directive *yaml_version_directive_t - var tag_directives []yaml_tag_directive_t - - token := peek_token(parser) - if token == nil { - return false - } - - for token.typ == yaml_VERSION_DIRECTIVE_TOKEN || token.typ == yaml_TAG_DIRECTIVE_TOKEN { - if token.typ == yaml_VERSION_DIRECTIVE_TOKEN { - if version_directive != nil { - yaml_parser_set_parser_error(parser, - "found duplicate %YAML directive", token.start_mark) - return false - } - if token.major != 1 || token.minor != 1 { - yaml_parser_set_parser_error(parser, - "found incompatible YAML document", token.start_mark) - return false - } - version_directive = &yaml_version_directive_t{ - major: token.major, - minor: token.minor, - } - } else if token.typ == yaml_TAG_DIRECTIVE_TOKEN { - value := yaml_tag_directive_t{ - handle: token.value, - prefix: token.prefix, - } - if !yaml_parser_append_tag_directive(parser, value, false, token.start_mark) { - return false - } - tag_directives = append(tag_directives, value) - } - - skip_token(parser) - token = peek_token(parser) - if token == nil { - return false - } - } - - for i := range default_tag_directives { - if !yaml_parser_append_tag_directive(parser, default_tag_directives[i], true, token.start_mark) { - return false - } - } - - if version_directive_ref != nil { - *version_directive_ref = version_directive - } - if tag_directives_ref != nil { - *tag_directives_ref = tag_directives - } - return true -} - -// Append a tag directive to the directives stack. -func yaml_parser_append_tag_directive(parser *yaml_parser_t, value yaml_tag_directive_t, allow_duplicates bool, mark yaml_mark_t) bool { - for i := range parser.tag_directives { - if bytes.Equal(value.handle, parser.tag_directives[i].handle) { - if allow_duplicates { - return true - } - return yaml_parser_set_parser_error(parser, "found duplicate %TAG directive", mark) - } - } - - // [Go] I suspect the copy is unnecessary. This was likely done - // because there was no way to track ownership of the data. - value_copy := yaml_tag_directive_t{ - handle: make([]byte, len(value.handle)), - prefix: make([]byte, len(value.prefix)), - } - copy(value_copy.handle, value.handle) - copy(value_copy.prefix, value.prefix) - parser.tag_directives = append(parser.tag_directives, value_copy) - return true -} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/readerc.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/readerc.go deleted file mode 100644 index d5fb097..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/readerc.go +++ /dev/null @@ -1,391 +0,0 @@ -package yaml - -import ( - "io" -) - -// Set the reader error and return 0. -func yaml_parser_set_reader_error(parser *yaml_parser_t, problem string, offset int, value int) bool { - parser.error = yaml_READER_ERROR - parser.problem = problem - parser.problem_offset = offset - parser.problem_value = value - return false -} - -// Byte order marks. -const ( - bom_UTF8 = "\xef\xbb\xbf" - bom_UTF16LE = "\xff\xfe" - bom_UTF16BE = "\xfe\xff" -) - -// Determine the input stream encoding by checking the BOM symbol. If no BOM is -// found, the UTF-8 encoding is assumed. Return 1 on success, 0 on failure. -func yaml_parser_determine_encoding(parser *yaml_parser_t) bool { - // Ensure that we had enough bytes in the raw buffer. - for !parser.eof && len(parser.raw_buffer)-parser.raw_buffer_pos < 3 { - if !yaml_parser_update_raw_buffer(parser) { - return false - } - } - - // Determine the encoding. - buf := parser.raw_buffer - pos := parser.raw_buffer_pos - avail := len(buf) - pos - if avail >= 2 && buf[pos] == bom_UTF16LE[0] && buf[pos+1] == bom_UTF16LE[1] { - parser.encoding = yaml_UTF16LE_ENCODING - parser.raw_buffer_pos += 2 - parser.offset += 2 - } else if avail >= 2 && buf[pos] == bom_UTF16BE[0] && buf[pos+1] == bom_UTF16BE[1] { - parser.encoding = yaml_UTF16BE_ENCODING - parser.raw_buffer_pos += 2 - parser.offset += 2 - } else if avail >= 3 && buf[pos] == bom_UTF8[0] && buf[pos+1] == bom_UTF8[1] && buf[pos+2] == bom_UTF8[2] { - parser.encoding = yaml_UTF8_ENCODING - parser.raw_buffer_pos += 3 - parser.offset += 3 - } else { - parser.encoding = yaml_UTF8_ENCODING - } - return true -} - -// Update the raw buffer. -func yaml_parser_update_raw_buffer(parser *yaml_parser_t) bool { - size_read := 0 - - // Return if the raw buffer is full. - if parser.raw_buffer_pos == 0 && len(parser.raw_buffer) == cap(parser.raw_buffer) { - return true - } - - // Return on EOF. - if parser.eof { - return true - } - - // Move the remaining bytes in the raw buffer to the beginning. - if parser.raw_buffer_pos > 0 && parser.raw_buffer_pos < len(parser.raw_buffer) { - copy(parser.raw_buffer, parser.raw_buffer[parser.raw_buffer_pos:]) - } - parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)-parser.raw_buffer_pos] - parser.raw_buffer_pos = 0 - - // Call the read handler to fill the buffer. - size_read, err := parser.read_handler(parser, parser.raw_buffer[len(parser.raw_buffer):cap(parser.raw_buffer)]) - parser.raw_buffer = parser.raw_buffer[:len(parser.raw_buffer)+size_read] - if err == io.EOF { - parser.eof = true - } else if err != nil { - return yaml_parser_set_reader_error(parser, "input error: "+err.Error(), parser.offset, -1) - } - return true -} - -// Ensure that the buffer contains at least `length` characters. -// Return true on success, false on failure. -// -// The length is supposed to be significantly less that the buffer size. -func yaml_parser_update_buffer(parser *yaml_parser_t, length int) bool { - if parser.read_handler == nil { - panic("read handler must be set") - } - - // If the EOF flag is set and the raw buffer is empty, do nothing. - if parser.eof && parser.raw_buffer_pos == len(parser.raw_buffer) { - return true - } - - // Return if the buffer contains enough characters. - if parser.unread >= length { - return true - } - - // Determine the input encoding if it is not known yet. - if parser.encoding == yaml_ANY_ENCODING { - if !yaml_parser_determine_encoding(parser) { - return false - } - } - - // Move the unread characters to the beginning of the buffer. - buffer_len := len(parser.buffer) - if parser.buffer_pos > 0 && parser.buffer_pos < buffer_len { - copy(parser.buffer, parser.buffer[parser.buffer_pos:]) - buffer_len -= parser.buffer_pos - parser.buffer_pos = 0 - } else if parser.buffer_pos == buffer_len { - buffer_len = 0 - parser.buffer_pos = 0 - } - - // Open the whole buffer for writing, and cut it before returning. - parser.buffer = parser.buffer[:cap(parser.buffer)] - - // Fill the buffer until it has enough characters. - first := true - for parser.unread < length { - - // Fill the raw buffer if necessary. - if !first || parser.raw_buffer_pos == len(parser.raw_buffer) { - if !yaml_parser_update_raw_buffer(parser) { - parser.buffer = parser.buffer[:buffer_len] - return false - } - } - first = false - - // Decode the raw buffer. - inner: - for parser.raw_buffer_pos != len(parser.raw_buffer) { - var value rune - var width int - - raw_unread := len(parser.raw_buffer) - parser.raw_buffer_pos - - // Decode the next character. - switch parser.encoding { - case yaml_UTF8_ENCODING: - // Decode a UTF-8 character. Check RFC 3629 - // (http://www.ietf.org/rfc/rfc3629.txt) for more details. - // - // The following table (taken from the RFC) is used for - // decoding. - // - // Char. number range | UTF-8 octet sequence - // (hexadecimal) | (binary) - // --------------------+------------------------------------ - // 0000 0000-0000 007F | 0xxxxxxx - // 0000 0080-0000 07FF | 110xxxxx 10xxxxxx - // 0000 0800-0000 FFFF | 1110xxxx 10xxxxxx 10xxxxxx - // 0001 0000-0010 FFFF | 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx - // - // Additionally, the characters in the range 0xD800-0xDFFF - // are prohibited as they are reserved for use with UTF-16 - // surrogate pairs. - - // Determine the length of the UTF-8 sequence. - octet := parser.raw_buffer[parser.raw_buffer_pos] - switch { - case octet&0x80 == 0x00: - width = 1 - case octet&0xE0 == 0xC0: - width = 2 - case octet&0xF0 == 0xE0: - width = 3 - case octet&0xF8 == 0xF0: - width = 4 - default: - // The leading octet is invalid. - return yaml_parser_set_reader_error(parser, - "invalid leading UTF-8 octet", - parser.offset, int(octet)) - } - - // Check if the raw buffer contains an incomplete character. - if width > raw_unread { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-8 octet sequence", - parser.offset, -1) - } - break inner - } - - // Decode the leading octet. - switch { - case octet&0x80 == 0x00: - value = rune(octet & 0x7F) - case octet&0xE0 == 0xC0: - value = rune(octet & 0x1F) - case octet&0xF0 == 0xE0: - value = rune(octet & 0x0F) - case octet&0xF8 == 0xF0: - value = rune(octet & 0x07) - default: - value = 0 - } - - // Check and decode the trailing octets. - for k := 1; k < width; k++ { - octet = parser.raw_buffer[parser.raw_buffer_pos+k] - - // Check if the octet is valid. - if (octet & 0xC0) != 0x80 { - return yaml_parser_set_reader_error(parser, - "invalid trailing UTF-8 octet", - parser.offset+k, int(octet)) - } - - // Decode the octet. - value = (value << 6) + rune(octet&0x3F) - } - - // Check the length of the sequence against the value. - switch { - case width == 1: - case width == 2 && value >= 0x80: - case width == 3 && value >= 0x800: - case width == 4 && value >= 0x10000: - default: - return yaml_parser_set_reader_error(parser, - "invalid length of a UTF-8 sequence", - parser.offset, -1) - } - - // Check the range of the value. - if value >= 0xD800 && value <= 0xDFFF || value > 0x10FFFF { - return yaml_parser_set_reader_error(parser, - "invalid Unicode character", - parser.offset, int(value)) - } - - case yaml_UTF16LE_ENCODING, yaml_UTF16BE_ENCODING: - var low, high int - if parser.encoding == yaml_UTF16LE_ENCODING { - low, high = 0, 1 - } else { - high, low = 1, 0 - } - - // The UTF-16 encoding is not as simple as one might - // naively think. Check RFC 2781 - // (http://www.ietf.org/rfc/rfc2781.txt). - // - // Normally, two subsequent bytes describe a Unicode - // character. However a special technique (called a - // surrogate pair) is used for specifying character - // values larger than 0xFFFF. - // - // A surrogate pair consists of two pseudo-characters: - // high surrogate area (0xD800-0xDBFF) - // low surrogate area (0xDC00-0xDFFF) - // - // The following formulas are used for decoding - // and encoding characters using surrogate pairs: - // - // U = U' + 0x10000 (0x01 00 00 <= U <= 0x10 FF FF) - // U' = yyyyyyyyyyxxxxxxxxxx (0 <= U' <= 0x0F FF FF) - // W1 = 110110yyyyyyyyyy - // W2 = 110111xxxxxxxxxx - // - // where U is the character value, W1 is the high surrogate - // area, W2 is the low surrogate area. - - // Check for incomplete UTF-16 character. - if raw_unread < 2 { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-16 character", - parser.offset, -1) - } - break inner - } - - // Get the character. - value = rune(parser.raw_buffer[parser.raw_buffer_pos+low]) + - (rune(parser.raw_buffer[parser.raw_buffer_pos+high]) << 8) - - // Check for unexpected low surrogate area. - if value&0xFC00 == 0xDC00 { - return yaml_parser_set_reader_error(parser, - "unexpected low surrogate area", - parser.offset, int(value)) - } - - // Check for a high surrogate area. - if value&0xFC00 == 0xD800 { - width = 4 - - // Check for incomplete surrogate pair. - if raw_unread < 4 { - if parser.eof { - return yaml_parser_set_reader_error(parser, - "incomplete UTF-16 surrogate pair", - parser.offset, -1) - } - break inner - } - - // Get the next character. - value2 := rune(parser.raw_buffer[parser.raw_buffer_pos+low+2]) + - (rune(parser.raw_buffer[parser.raw_buffer_pos+high+2]) << 8) - - // Check for a low surrogate area. - if value2&0xFC00 != 0xDC00 { - return yaml_parser_set_reader_error(parser, - "expected low surrogate area", - parser.offset+2, int(value2)) - } - - // Generate the value of the surrogate pair. - value = 0x10000 + ((value & 0x3FF) << 10) + (value2 & 0x3FF) - } else { - width = 2 - } - - default: - panic("impossible") - } - - // Check if the character is in the allowed range: - // #x9 | #xA | #xD | [#x20-#x7E] (8 bit) - // | #x85 | [#xA0-#xD7FF] | [#xE000-#xFFFD] (16 bit) - // | [#x10000-#x10FFFF] (32 bit) - switch { - case value == 0x09: - case value == 0x0A: - case value == 0x0D: - case value >= 0x20 && value <= 0x7E: - case value == 0x85: - case value >= 0xA0 && value <= 0xD7FF: - case value >= 0xE000 && value <= 0xFFFD: - case value >= 0x10000 && value <= 0x10FFFF: - default: - return yaml_parser_set_reader_error(parser, - "control characters are not allowed", - parser.offset, int(value)) - } - - // Move the raw pointers. - parser.raw_buffer_pos += width - parser.offset += width - - // Finally put the character into the buffer. - if value <= 0x7F { - // 0000 0000-0000 007F . 0xxxxxxx - parser.buffer[buffer_len+0] = byte(value) - } else if value <= 0x7FF { - // 0000 0080-0000 07FF . 110xxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xC0 + (value >> 6)) - parser.buffer[buffer_len+1] = byte(0x80 + (value & 0x3F)) - } else if value <= 0xFFFF { - // 0000 0800-0000 FFFF . 1110xxxx 10xxxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xE0 + (value >> 12)) - parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 6) & 0x3F)) - parser.buffer[buffer_len+2] = byte(0x80 + (value & 0x3F)) - } else { - // 0001 0000-0010 FFFF . 11110xxx 10xxxxxx 10xxxxxx 10xxxxxx - parser.buffer[buffer_len+0] = byte(0xF0 + (value >> 18)) - parser.buffer[buffer_len+1] = byte(0x80 + ((value >> 12) & 0x3F)) - parser.buffer[buffer_len+2] = byte(0x80 + ((value >> 6) & 0x3F)) - parser.buffer[buffer_len+3] = byte(0x80 + (value & 0x3F)) - } - buffer_len += width - - parser.unread++ - } - - // On EOF, put NUL into the buffer and return. - if parser.eof { - parser.buffer[buffer_len] = 0 - buffer_len++ - parser.unread++ - break - } - } - parser.buffer = parser.buffer[:buffer_len] - return true -} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/resolve.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/resolve.go deleted file mode 100644 index 06c698a..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/resolve.go +++ /dev/null @@ -1,190 +0,0 @@ -package yaml - -import ( - "encoding/base64" - "fmt" - "math" - "strconv" - "strings" - "unicode/utf8" -) - -// TODO: merge, timestamps, base 60 floats, omap. - -type resolveMapItem struct { - value interface{} - tag string -} - -var resolveTable = make([]byte, 256) -var resolveMap = make(map[string]resolveMapItem) - -func init() { - t := resolveTable - t[int('+')] = 'S' // Sign - t[int('-')] = 'S' - for _, c := range "0123456789" { - t[int(c)] = 'D' // Digit - } - for _, c := range "yYnNtTfFoO~" { - t[int(c)] = 'M' // In map - } - t[int('.')] = '.' // Float (potentially in map) - - var resolveMapList = []struct { - v interface{} - tag string - l []string - }{ - {true, yaml_BOOL_TAG, []string{"y", "Y", "yes", "Yes", "YES"}}, - {true, yaml_BOOL_TAG, []string{"true", "True", "TRUE"}}, - {true, yaml_BOOL_TAG, []string{"on", "On", "ON"}}, - {false, yaml_BOOL_TAG, []string{"n", "N", "no", "No", "NO"}}, - {false, yaml_BOOL_TAG, []string{"false", "False", "FALSE"}}, - {false, yaml_BOOL_TAG, []string{"off", "Off", "OFF"}}, - {nil, yaml_NULL_TAG, []string{"", "~", "null", "Null", "NULL"}}, - {math.NaN(), yaml_FLOAT_TAG, []string{".nan", ".NaN", ".NAN"}}, - {math.Inf(+1), yaml_FLOAT_TAG, []string{".inf", ".Inf", ".INF"}}, - {math.Inf(+1), yaml_FLOAT_TAG, []string{"+.inf", "+.Inf", "+.INF"}}, - {math.Inf(-1), yaml_FLOAT_TAG, []string{"-.inf", "-.Inf", "-.INF"}}, - {"<<", yaml_MERGE_TAG, []string{"<<"}}, - } - - m := resolveMap - for _, item := range resolveMapList { - for _, s := range item.l { - m[s] = resolveMapItem{item.v, item.tag} - } - } -} - -const longTagPrefix = "tag:yaml.org,2002:" - -func shortTag(tag string) string { - // TODO This can easily be made faster and produce less garbage. - if strings.HasPrefix(tag, longTagPrefix) { - return "!!" + tag[len(longTagPrefix):] - } - return tag -} - -func longTag(tag string) string { - if strings.HasPrefix(tag, "!!") { - return longTagPrefix + tag[2:] - } - return tag -} - -func resolvableTag(tag string) bool { - switch tag { - case "", yaml_STR_TAG, yaml_BOOL_TAG, yaml_INT_TAG, yaml_FLOAT_TAG, yaml_NULL_TAG: - return true - } - return false -} - -func resolve(tag string, in string) (rtag string, out interface{}) { - if !resolvableTag(tag) { - return tag, in - } - - defer func() { - switch tag { - case "", rtag, yaml_STR_TAG, yaml_BINARY_TAG: - return - } - fail(fmt.Sprintf("cannot decode %s `%s` as a %s", shortTag(rtag), in, shortTag(tag))) - }() - - // Any data is accepted as a !!str or !!binary. - // Otherwise, the prefix is enough of a hint about what it might be. - hint := byte('N') - if in != "" { - hint = resolveTable[in[0]] - } - if hint != 0 && tag != yaml_STR_TAG && tag != yaml_BINARY_TAG { - // Handle things we can lookup in a map. - if item, ok := resolveMap[in]; ok { - return item.tag, item.value - } - - // Base 60 floats are a bad idea, were dropped in YAML 1.2, and - // are purposefully unsupported here. They're still quoted on - // the way out for compatibility with other parser, though. - - switch hint { - case 'M': - // We've already checked the map above. - - case '.': - // Not in the map, so maybe a normal float. - floatv, err := strconv.ParseFloat(in, 64) - if err == nil { - return yaml_FLOAT_TAG, floatv - } - - case 'D', 'S': - // Int, float, or timestamp. - plain := strings.Replace(in, "_", "", -1) - intv, err := strconv.ParseInt(plain, 0, 64) - if err == nil { - if intv == int64(int(intv)) { - return yaml_INT_TAG, int(intv) - } else { - return yaml_INT_TAG, intv - } - } - floatv, err := strconv.ParseFloat(plain, 64) - if err == nil { - return yaml_FLOAT_TAG, floatv - } - if strings.HasPrefix(plain, "0b") { - intv, err := strconv.ParseInt(plain[2:], 2, 64) - if err == nil { - return yaml_INT_TAG, int(intv) - } - } else if strings.HasPrefix(plain, "-0b") { - intv, err := strconv.ParseInt(plain[3:], 2, 64) - if err == nil { - return yaml_INT_TAG, -int(intv) - } - } - // XXX Handle timestamps here. - - default: - panic("resolveTable item not yet handled: " + string(rune(hint)) + " (with " + in + ")") - } - } - if tag == yaml_BINARY_TAG { - return yaml_BINARY_TAG, in - } - if utf8.ValidString(in) { - return yaml_STR_TAG, in - } - return yaml_BINARY_TAG, encodeBase64(in) -} - -// encodeBase64 encodes s as base64 that is broken up into multiple lines -// as appropriate for the resulting length. -func encodeBase64(s string) string { - const lineLen = 70 - encLen := base64.StdEncoding.EncodedLen(len(s)) - lines := encLen/lineLen + 1 - buf := make([]byte, encLen*2+lines) - in := buf[0:encLen] - out := buf[encLen:] - base64.StdEncoding.Encode(in, []byte(s)) - k := 0 - for i := 0; i < len(in); i += lineLen { - j := i + lineLen - if j > len(in) { - j = len(in) - } - k += copy(out[k:], in[i:j]) - if lines > 1 { - out[k] = '\n' - k++ - } - } - return string(out[:k]) -} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/scannerc.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/scannerc.go deleted file mode 100644 index fe93b19..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/scannerc.go +++ /dev/null @@ -1,2710 +0,0 @@ -package yaml - -import ( - "bytes" - "fmt" -) - -// Introduction -// ************ -// -// The following notes assume that you are familiar with the YAML specification -// (http://yaml.org/spec/cvs/current.html). We mostly follow it, although in -// some cases we are less restrictive that it requires. -// -// The process of transforming a YAML stream into a sequence of events is -// divided on two steps: Scanning and Parsing. -// -// The Scanner transforms the input stream into a sequence of tokens, while the -// parser transform the sequence of tokens produced by the Scanner into a -// sequence of parsing events. -// -// The Scanner is rather clever and complicated. The Parser, on the contrary, -// is a straightforward implementation of a recursive-descendant parser (or, -// LL(1) parser, as it is usually called). -// -// Actually there are two issues of Scanning that might be called "clever", the -// rest is quite straightforward. The issues are "block collection start" and -// "simple keys". Both issues are explained below in details. -// -// Here the Scanning step is explained and implemented. We start with the list -// of all the tokens produced by the Scanner together with short descriptions. -// -// Now, tokens: -// -// STREAM-START(encoding) # The stream start. -// STREAM-END # The stream end. -// VERSION-DIRECTIVE(major,minor) # The '%YAML' directive. -// TAG-DIRECTIVE(handle,prefix) # The '%TAG' directive. -// DOCUMENT-START # '---' -// DOCUMENT-END # '...' -// BLOCK-SEQUENCE-START # Indentation increase denoting a block -// BLOCK-MAPPING-START # sequence or a block mapping. -// BLOCK-END # Indentation decrease. -// FLOW-SEQUENCE-START # '[' -// FLOW-SEQUENCE-END # ']' -// BLOCK-SEQUENCE-START # '{' -// BLOCK-SEQUENCE-END # '}' -// BLOCK-ENTRY # '-' -// FLOW-ENTRY # ',' -// KEY # '?' or nothing (simple keys). -// VALUE # ':' -// ALIAS(anchor) # '*anchor' -// ANCHOR(anchor) # '&anchor' -// TAG(handle,suffix) # '!handle!suffix' -// SCALAR(value,style) # A scalar. -// -// The following two tokens are "virtual" tokens denoting the beginning and the -// end of the stream: -// -// STREAM-START(encoding) -// STREAM-END -// -// We pass the information about the input stream encoding with the -// STREAM-START token. -// -// The next two tokens are responsible for tags: -// -// VERSION-DIRECTIVE(major,minor) -// TAG-DIRECTIVE(handle,prefix) -// -// Example: -// -// %YAML 1.1 -// %TAG ! !foo -// %TAG !yaml! tag:yaml.org,2002: -// --- -// -// The correspoding sequence of tokens: -// -// STREAM-START(utf-8) -// VERSION-DIRECTIVE(1,1) -// TAG-DIRECTIVE("!","!foo") -// TAG-DIRECTIVE("!yaml","tag:yaml.org,2002:") -// DOCUMENT-START -// STREAM-END -// -// Note that the VERSION-DIRECTIVE and TAG-DIRECTIVE tokens occupy a whole -// line. -// -// The document start and end indicators are represented by: -// -// DOCUMENT-START -// DOCUMENT-END -// -// Note that if a YAML stream contains an implicit document (without '---' -// and '...' indicators), no DOCUMENT-START and DOCUMENT-END tokens will be -// produced. -// -// In the following examples, we present whole documents together with the -// produced tokens. -// -// 1. An implicit document: -// -// 'a scalar' -// -// Tokens: -// -// STREAM-START(utf-8) -// SCALAR("a scalar",single-quoted) -// STREAM-END -// -// 2. An explicit document: -// -// --- -// 'a scalar' -// ... -// -// Tokens: -// -// STREAM-START(utf-8) -// DOCUMENT-START -// SCALAR("a scalar",single-quoted) -// DOCUMENT-END -// STREAM-END -// -// 3. Several documents in a stream: -// -// 'a scalar' -// --- -// 'another scalar' -// --- -// 'yet another scalar' -// -// Tokens: -// -// STREAM-START(utf-8) -// SCALAR("a scalar",single-quoted) -// DOCUMENT-START -// SCALAR("another scalar",single-quoted) -// DOCUMENT-START -// SCALAR("yet another scalar",single-quoted) -// STREAM-END -// -// We have already introduced the SCALAR token above. The following tokens are -// used to describe aliases, anchors, tag, and scalars: -// -// ALIAS(anchor) -// ANCHOR(anchor) -// TAG(handle,suffix) -// SCALAR(value,style) -// -// The following series of examples illustrate the usage of these tokens: -// -// 1. A recursive sequence: -// -// &A [ *A ] -// -// Tokens: -// -// STREAM-START(utf-8) -// ANCHOR("A") -// FLOW-SEQUENCE-START -// ALIAS("A") -// FLOW-SEQUENCE-END -// STREAM-END -// -// 2. A tagged scalar: -// -// !!float "3.14" # A good approximation. -// -// Tokens: -// -// STREAM-START(utf-8) -// TAG("!!","float") -// SCALAR("3.14",double-quoted) -// STREAM-END -// -// 3. Various scalar styles: -// -// --- # Implicit empty plain scalars do not produce tokens. -// --- a plain scalar -// --- 'a single-quoted scalar' -// --- "a double-quoted scalar" -// --- |- -// a literal scalar -// --- >- -// a folded -// scalar -// -// Tokens: -// -// STREAM-START(utf-8) -// DOCUMENT-START -// DOCUMENT-START -// SCALAR("a plain scalar",plain) -// DOCUMENT-START -// SCALAR("a single-quoted scalar",single-quoted) -// DOCUMENT-START -// SCALAR("a double-quoted scalar",double-quoted) -// DOCUMENT-START -// SCALAR("a literal scalar",literal) -// DOCUMENT-START -// SCALAR("a folded scalar",folded) -// STREAM-END -// -// Now it's time to review collection-related tokens. We will start with -// flow collections: -// -// FLOW-SEQUENCE-START -// FLOW-SEQUENCE-END -// FLOW-MAPPING-START -// FLOW-MAPPING-END -// FLOW-ENTRY -// KEY -// VALUE -// -// The tokens FLOW-SEQUENCE-START, FLOW-SEQUENCE-END, FLOW-MAPPING-START, and -// FLOW-MAPPING-END represent the indicators '[', ']', '{', and '}' -// correspondingly. FLOW-ENTRY represent the ',' indicator. Finally the -// indicators '?' and ':', which are used for denoting mapping keys and values, -// are represented by the KEY and VALUE tokens. -// -// The following examples show flow collections: -// -// 1. A flow sequence: -// -// [item 1, item 2, item 3] -// -// Tokens: -// -// STREAM-START(utf-8) -// FLOW-SEQUENCE-START -// SCALAR("item 1",plain) -// FLOW-ENTRY -// SCALAR("item 2",plain) -// FLOW-ENTRY -// SCALAR("item 3",plain) -// FLOW-SEQUENCE-END -// STREAM-END -// -// 2. A flow mapping: -// -// { -// a simple key: a value, # Note that the KEY token is produced. -// ? a complex key: another value, -// } -// -// Tokens: -// -// STREAM-START(utf-8) -// FLOW-MAPPING-START -// KEY -// SCALAR("a simple key",plain) -// VALUE -// SCALAR("a value",plain) -// FLOW-ENTRY -// KEY -// SCALAR("a complex key",plain) -// VALUE -// SCALAR("another value",plain) -// FLOW-ENTRY -// FLOW-MAPPING-END -// STREAM-END -// -// A simple key is a key which is not denoted by the '?' indicator. Note that -// the Scanner still produce the KEY token whenever it encounters a simple key. -// -// For scanning block collections, the following tokens are used (note that we -// repeat KEY and VALUE here): -// -// BLOCK-SEQUENCE-START -// BLOCK-MAPPING-START -// BLOCK-END -// BLOCK-ENTRY -// KEY -// VALUE -// -// The tokens BLOCK-SEQUENCE-START and BLOCK-MAPPING-START denote indentation -// increase that precedes a block collection (cf. the INDENT token in Python). -// The token BLOCK-END denote indentation decrease that ends a block collection -// (cf. the DEDENT token in Python). However YAML has some syntax pecularities -// that makes detections of these tokens more complex. -// -// The tokens BLOCK-ENTRY, KEY, and VALUE are used to represent the indicators -// '-', '?', and ':' correspondingly. -// -// The following examples show how the tokens BLOCK-SEQUENCE-START, -// BLOCK-MAPPING-START, and BLOCK-END are emitted by the Scanner: -// -// 1. Block sequences: -// -// - item 1 -// - item 2 -// - -// - item 3.1 -// - item 3.2 -// - -// key 1: value 1 -// key 2: value 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-ENTRY -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 3.1",plain) -// BLOCK-ENTRY -// SCALAR("item 3.2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// 2. Block mappings: -// -// a simple key: a value # The KEY token is produced here. -// ? a complex key -// : another value -// a mapping: -// key 1: value 1 -// key 2: value 2 -// a sequence: -// - item 1 -// - item 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("a simple key",plain) -// VALUE -// SCALAR("a value",plain) -// KEY -// SCALAR("a complex key",plain) -// VALUE -// SCALAR("another value",plain) -// KEY -// SCALAR("a mapping",plain) -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// KEY -// SCALAR("a sequence",plain) -// VALUE -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// YAML does not always require to start a new block collection from a new -// line. If the current line contains only '-', '?', and ':' indicators, a new -// block collection may start at the current line. The following examples -// illustrate this case: -// -// 1. Collections in a sequence: -// -// - - item 1 -// - item 2 -// - key 1: value 1 -// key 2: value 2 -// - ? complex key -// : complex value -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-ENTRY -// BLOCK-MAPPING-START -// KEY -// SCALAR("complex key") -// VALUE -// SCALAR("complex value") -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// 2. Collections in a mapping: -// -// ? a sequence -// : - item 1 -// - item 2 -// ? a mapping -// : key 1: value 1 -// key 2: value 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("a sequence",plain) -// VALUE -// BLOCK-SEQUENCE-START -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// KEY -// SCALAR("a mapping",plain) -// VALUE -// BLOCK-MAPPING-START -// KEY -// SCALAR("key 1",plain) -// VALUE -// SCALAR("value 1",plain) -// KEY -// SCALAR("key 2",plain) -// VALUE -// SCALAR("value 2",plain) -// BLOCK-END -// BLOCK-END -// STREAM-END -// -// YAML also permits non-indented sequences if they are included into a block -// mapping. In this case, the token BLOCK-SEQUENCE-START is not produced: -// -// key: -// - item 1 # BLOCK-SEQUENCE-START is NOT produced here. -// - item 2 -// -// Tokens: -// -// STREAM-START(utf-8) -// BLOCK-MAPPING-START -// KEY -// SCALAR("key",plain) -// VALUE -// BLOCK-ENTRY -// SCALAR("item 1",plain) -// BLOCK-ENTRY -// SCALAR("item 2",plain) -// BLOCK-END -// - -// Ensure that the buffer contains the required number of characters. -// Return true on success, false on failure (reader error or memory error). -func cache(parser *yaml_parser_t, length int) bool { - // [Go] This was inlined: !cache(A, B) -> unread < B && !update(A, B) - return parser.unread >= length || yaml_parser_update_buffer(parser, length) -} - -// Advance the buffer pointer. -func skip(parser *yaml_parser_t) { - parser.mark.index++ - parser.mark.column++ - parser.unread-- - parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) -} - -func skip_line(parser *yaml_parser_t) { - if is_crlf(parser.buffer, parser.buffer_pos) { - parser.mark.index += 2 - parser.mark.column = 0 - parser.mark.line++ - parser.unread -= 2 - parser.buffer_pos += 2 - } else if is_break(parser.buffer, parser.buffer_pos) { - parser.mark.index++ - parser.mark.column = 0 - parser.mark.line++ - parser.unread-- - parser.buffer_pos += width(parser.buffer[parser.buffer_pos]) - } -} - -// Copy a character to a string buffer and advance pointers. -func read(parser *yaml_parser_t, s []byte) []byte { - w := width(parser.buffer[parser.buffer_pos]) - if w == 0 { - panic("invalid character sequence") - } - if len(s) == 0 { - s = make([]byte, 0, 32) - } - if w == 1 && len(s)+w <= cap(s) { - s = s[:len(s)+1] - s[len(s)-1] = parser.buffer[parser.buffer_pos] - parser.buffer_pos++ - } else { - s = append(s, parser.buffer[parser.buffer_pos:parser.buffer_pos+w]...) - parser.buffer_pos += w - } - parser.mark.index++ - parser.mark.column++ - parser.unread-- - return s -} - -// Copy a line break character to a string buffer and advance pointers. -func read_line(parser *yaml_parser_t, s []byte) []byte { - buf := parser.buffer - pos := parser.buffer_pos - switch { - case buf[pos] == '\r' && buf[pos+1] == '\n': - // CR LF . LF - s = append(s, '\n') - parser.buffer_pos += 2 - parser.mark.index++ - parser.unread-- - case buf[pos] == '\r' || buf[pos] == '\n': - // CR|LF . LF - s = append(s, '\n') - parser.buffer_pos += 1 - case buf[pos] == '\xC2' && buf[pos+1] == '\x85': - // NEL . LF - s = append(s, '\n') - parser.buffer_pos += 2 - case buf[pos] == '\xE2' && buf[pos+1] == '\x80' && (buf[pos+2] == '\xA8' || buf[pos+2] == '\xA9'): - // LS|PS . LS|PS - s = append(s, buf[parser.buffer_pos:pos+3]...) - parser.buffer_pos += 3 - default: - return s - } - parser.mark.index++ - parser.mark.column = 0 - parser.mark.line++ - parser.unread-- - return s -} - -// Get the next token. -func yaml_parser_scan(parser *yaml_parser_t, token *yaml_token_t) bool { - // Erase the token object. - *token = yaml_token_t{} // [Go] Is this necessary? - - // No tokens after STREAM-END or error. - if parser.stream_end_produced || parser.error != yaml_NO_ERROR { - return true - } - - // Ensure that the tokens queue contains enough tokens. - if !parser.token_available { - if !yaml_parser_fetch_more_tokens(parser) { - return false - } - } - - // Fetch the next token from the queue. - *token = parser.tokens[parser.tokens_head] - parser.tokens_head++ - parser.tokens_parsed++ - parser.token_available = false - - if token.typ == yaml_STREAM_END_TOKEN { - parser.stream_end_produced = true - } - return true -} - -// Set the scanner error and return false. -func yaml_parser_set_scanner_error(parser *yaml_parser_t, context string, context_mark yaml_mark_t, problem string) bool { - parser.error = yaml_SCANNER_ERROR - parser.context = context - parser.context_mark = context_mark - parser.problem = problem - parser.problem_mark = parser.mark - return false -} - -func yaml_parser_set_scanner_tag_error(parser *yaml_parser_t, directive bool, context_mark yaml_mark_t, problem string) bool { - context := "while parsing a tag" - if directive { - context = "while parsing a %TAG directive" - } - return yaml_parser_set_scanner_error(parser, context, context_mark, "did not find URI escaped octet") -} - -func trace(args ...interface{}) func() { - pargs := append([]interface{}{"+++"}, args...) - fmt.Println(pargs...) - pargs = append([]interface{}{"---"}, args...) - return func() { fmt.Println(pargs...) } -} - -// Ensure that the tokens queue contains at least one token which can be -// returned to the Parser. -func yaml_parser_fetch_more_tokens(parser *yaml_parser_t) bool { - // While we need more tokens to fetch, do it. - for { - // Check if we really need to fetch more tokens. - need_more_tokens := false - - if parser.tokens_head == len(parser.tokens) { - // Queue is empty. - need_more_tokens = true - } else { - // Check if any potential simple key may occupy the head position. - if !yaml_parser_stale_simple_keys(parser) { - return false - } - - for i := range parser.simple_keys { - simple_key := &parser.simple_keys[i] - if simple_key.possible && simple_key.token_number == parser.tokens_parsed { - need_more_tokens = true - break - } - } - } - - // We are finished. - if !need_more_tokens { - break - } - // Fetch the next token. - if !yaml_parser_fetch_next_token(parser) { - return false - } - } - - parser.token_available = true - return true -} - -// The dispatcher for token fetchers. -func yaml_parser_fetch_next_token(parser *yaml_parser_t) bool { - // Ensure that the buffer is initialized. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check if we just started scanning. Fetch STREAM-START then. - if !parser.stream_start_produced { - return yaml_parser_fetch_stream_start(parser) - } - - // Eat whitespaces and comments until we reach the next token. - if !yaml_parser_scan_to_next_token(parser) { - return false - } - - // Remove obsolete potential simple keys. - if !yaml_parser_stale_simple_keys(parser) { - return false - } - - // Check the indentation level against the current column. - if !yaml_parser_unroll_indent(parser, parser.mark.column) { - return false - } - - // Ensure that the buffer contains at least 4 characters. 4 is the length - // of the longest indicators ('--- ' and '... '). - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - - // Is it the end of the stream? - if is_z(parser.buffer, parser.buffer_pos) { - return yaml_parser_fetch_stream_end(parser) - } - - // Is it a directive? - if parser.mark.column == 0 && parser.buffer[parser.buffer_pos] == '%' { - return yaml_parser_fetch_directive(parser) - } - - buf := parser.buffer - pos := parser.buffer_pos - - // Is it the document start indicator? - if parser.mark.column == 0 && buf[pos] == '-' && buf[pos+1] == '-' && buf[pos+2] == '-' && is_blankz(buf, pos+3) { - return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_START_TOKEN) - } - - // Is it the document end indicator? - if parser.mark.column == 0 && buf[pos] == '.' && buf[pos+1] == '.' && buf[pos+2] == '.' && is_blankz(buf, pos+3) { - return yaml_parser_fetch_document_indicator(parser, yaml_DOCUMENT_END_TOKEN) - } - - // Is it the flow sequence start indicator? - if buf[pos] == '[' { - return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_SEQUENCE_START_TOKEN) - } - - // Is it the flow mapping start indicator? - if parser.buffer[parser.buffer_pos] == '{' { - return yaml_parser_fetch_flow_collection_start(parser, yaml_FLOW_MAPPING_START_TOKEN) - } - - // Is it the flow sequence end indicator? - if parser.buffer[parser.buffer_pos] == ']' { - return yaml_parser_fetch_flow_collection_end(parser, - yaml_FLOW_SEQUENCE_END_TOKEN) - } - - // Is it the flow mapping end indicator? - if parser.buffer[parser.buffer_pos] == '}' { - return yaml_parser_fetch_flow_collection_end(parser, - yaml_FLOW_MAPPING_END_TOKEN) - } - - // Is it the flow entry indicator? - if parser.buffer[parser.buffer_pos] == ',' { - return yaml_parser_fetch_flow_entry(parser) - } - - // Is it the block entry indicator? - if parser.buffer[parser.buffer_pos] == '-' && is_blankz(parser.buffer, parser.buffer_pos+1) { - return yaml_parser_fetch_block_entry(parser) - } - - // Is it the key indicator? - if parser.buffer[parser.buffer_pos] == '?' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_key(parser) - } - - // Is it the value indicator? - if parser.buffer[parser.buffer_pos] == ':' && (parser.flow_level > 0 || is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_value(parser) - } - - // Is it an alias? - if parser.buffer[parser.buffer_pos] == '*' { - return yaml_parser_fetch_anchor(parser, yaml_ALIAS_TOKEN) - } - - // Is it an anchor? - if parser.buffer[parser.buffer_pos] == '&' { - return yaml_parser_fetch_anchor(parser, yaml_ANCHOR_TOKEN) - } - - // Is it a tag? - if parser.buffer[parser.buffer_pos] == '!' { - return yaml_parser_fetch_tag(parser) - } - - // Is it a literal scalar? - if parser.buffer[parser.buffer_pos] == '|' && parser.flow_level == 0 { - return yaml_parser_fetch_block_scalar(parser, true) - } - - // Is it a folded scalar? - if parser.buffer[parser.buffer_pos] == '>' && parser.flow_level == 0 { - return yaml_parser_fetch_block_scalar(parser, false) - } - - // Is it a single-quoted scalar? - if parser.buffer[parser.buffer_pos] == '\'' { - return yaml_parser_fetch_flow_scalar(parser, true) - } - - // Is it a double-quoted scalar? - if parser.buffer[parser.buffer_pos] == '"' { - return yaml_parser_fetch_flow_scalar(parser, false) - } - - // Is it a plain scalar? - // - // A plain scalar may start with any non-blank characters except - // - // '-', '?', ':', ',', '[', ']', '{', '}', - // '#', '&', '*', '!', '|', '>', '\'', '\"', - // '%', '@', '`'. - // - // In the block context (and, for the '-' indicator, in the flow context - // too), it may also start with the characters - // - // '-', '?', ':' - // - // if it is followed by a non-space character. - // - // The last rule is more restrictive than the specification requires. - // [Go] Make this logic more reasonable. - //switch parser.buffer[parser.buffer_pos] { - //case '-', '?', ':', ',', '?', '-', ',', ':', ']', '[', '}', '{', '&', '#', '!', '*', '>', '|', '"', '\'', '@', '%', '-', '`': - //} - if !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '-' || - parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':' || - parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '[' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || - parser.buffer[parser.buffer_pos] == '}' || parser.buffer[parser.buffer_pos] == '#' || - parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '*' || - parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '|' || - parser.buffer[parser.buffer_pos] == '>' || parser.buffer[parser.buffer_pos] == '\'' || - parser.buffer[parser.buffer_pos] == '"' || parser.buffer[parser.buffer_pos] == '%' || - parser.buffer[parser.buffer_pos] == '@' || parser.buffer[parser.buffer_pos] == '`') || - (parser.buffer[parser.buffer_pos] == '-' && !is_blank(parser.buffer, parser.buffer_pos+1)) || - (parser.flow_level == 0 && - (parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == ':') && - !is_blankz(parser.buffer, parser.buffer_pos+1)) { - return yaml_parser_fetch_plain_scalar(parser) - } - - // If we don't determine the token type so far, it is an error. - return yaml_parser_set_scanner_error(parser, - "while scanning for the next token", parser.mark, - "found character that cannot start any token") -} - -// Check the list of potential simple keys and remove the positions that -// cannot contain simple keys anymore. -func yaml_parser_stale_simple_keys(parser *yaml_parser_t) bool { - // Check for a potential simple key for each flow level. - for i := range parser.simple_keys { - simple_key := &parser.simple_keys[i] - - // The specification requires that a simple key - // - // - is limited to a single line, - // - is shorter than 1024 characters. - if simple_key.possible && (simple_key.mark.line < parser.mark.line || simple_key.mark.index+1024 < parser.mark.index) { - - // Check if the potential simple key to be removed is required. - if simple_key.required { - return yaml_parser_set_scanner_error(parser, - "while scanning a simple key", simple_key.mark, - "could not find expected ':'") - } - simple_key.possible = false - } - } - return true -} - -// Check if a simple key may start at the current position and add it if -// needed. -func yaml_parser_save_simple_key(parser *yaml_parser_t) bool { - // A simple key is required at the current position if the scanner is in - // the block context and the current column coincides with the indentation - // level. - - required := parser.flow_level == 0 && parser.indent == parser.mark.column - - // A simple key is required only when it is the first token in the current - // line. Therefore it is always allowed. But we add a check anyway. - if required && !parser.simple_key_allowed { - panic("should not happen") - } - - // - // If the current position may start a simple key, save it. - // - if parser.simple_key_allowed { - simple_key := yaml_simple_key_t{ - possible: true, - required: required, - token_number: parser.tokens_parsed + (len(parser.tokens) - parser.tokens_head), - } - simple_key.mark = parser.mark - - if !yaml_parser_remove_simple_key(parser) { - return false - } - parser.simple_keys[len(parser.simple_keys)-1] = simple_key - } - return true -} - -// Remove a potential simple key at the current flow level. -func yaml_parser_remove_simple_key(parser *yaml_parser_t) bool { - i := len(parser.simple_keys) - 1 - if parser.simple_keys[i].possible { - // If the key is required, it is an error. - if parser.simple_keys[i].required { - return yaml_parser_set_scanner_error(parser, - "while scanning a simple key", parser.simple_keys[i].mark, - "could not find expected ':'") - } - } - // Remove the key from the stack. - parser.simple_keys[i].possible = false - return true -} - -// Increase the flow level and resize the simple key list if needed. -func yaml_parser_increase_flow_level(parser *yaml_parser_t) bool { - // Reset the simple key on the next level. - parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) - - // Increase the flow level. - parser.flow_level++ - return true -} - -// Decrease the flow level. -func yaml_parser_decrease_flow_level(parser *yaml_parser_t) bool { - if parser.flow_level > 0 { - parser.flow_level-- - parser.simple_keys = parser.simple_keys[:len(parser.simple_keys)-1] - } - return true -} - -// Push the current indentation level to the stack and set the new level -// the current column is greater than the indentation level. In this case, -// append or insert the specified token into the token queue. -func yaml_parser_roll_indent(parser *yaml_parser_t, column, number int, typ yaml_token_type_t, mark yaml_mark_t) bool { - // In the flow context, do nothing. - if parser.flow_level > 0 { - return true - } - - if parser.indent < column { - // Push the current indentation level to the stack and set the new - // indentation level. - parser.indents = append(parser.indents, parser.indent) - parser.indent = column - - // Create a token and insert it into the queue. - token := yaml_token_t{ - typ: typ, - start_mark: mark, - end_mark: mark, - } - if number > -1 { - number -= parser.tokens_parsed - } - yaml_insert_token(parser, number, &token) - } - return true -} - -// Pop indentation levels from the indents stack until the current level -// becomes less or equal to the column. For each intendation level, append -// the BLOCK-END token. -func yaml_parser_unroll_indent(parser *yaml_parser_t, column int) bool { - // In the flow context, do nothing. - if parser.flow_level > 0 { - return true - } - - // Loop through the intendation levels in the stack. - for parser.indent > column { - // Create a token and append it to the queue. - token := yaml_token_t{ - typ: yaml_BLOCK_END_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - } - yaml_insert_token(parser, -1, &token) - - // Pop the indentation level. - parser.indent = parser.indents[len(parser.indents)-1] - parser.indents = parser.indents[:len(parser.indents)-1] - } - return true -} - -// Initialize the scanner and produce the STREAM-START token. -func yaml_parser_fetch_stream_start(parser *yaml_parser_t) bool { - - // Set the initial indentation. - parser.indent = -1 - - // Initialize the simple key stack. - parser.simple_keys = append(parser.simple_keys, yaml_simple_key_t{}) - - // A simple key is allowed at the beginning of the stream. - parser.simple_key_allowed = true - - // We have started. - parser.stream_start_produced = true - - // Create the STREAM-START token and append it to the queue. - token := yaml_token_t{ - typ: yaml_STREAM_START_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - encoding: parser.encoding, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the STREAM-END token and shut down the scanner. -func yaml_parser_fetch_stream_end(parser *yaml_parser_t) bool { - - // Force new line. - if parser.mark.column != 0 { - parser.mark.column = 0 - parser.mark.line++ - } - - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Create the STREAM-END token and append it to the queue. - token := yaml_token_t{ - typ: yaml_STREAM_END_TOKEN, - start_mark: parser.mark, - end_mark: parser.mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce a VERSION-DIRECTIVE or TAG-DIRECTIVE token. -func yaml_parser_fetch_directive(parser *yaml_parser_t) bool { - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Create the YAML-DIRECTIVE or TAG-DIRECTIVE token. - token := yaml_token_t{} - if !yaml_parser_scan_directive(parser, &token) { - return false - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the DOCUMENT-START or DOCUMENT-END token. -func yaml_parser_fetch_document_indicator(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // Reset the indentation level. - if !yaml_parser_unroll_indent(parser, -1) { - return false - } - - // Reset simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - parser.simple_key_allowed = false - - // Consume the token. - start_mark := parser.mark - - skip(parser) - skip(parser) - skip(parser) - - end_mark := parser.mark - - // Create the DOCUMENT-START or DOCUMENT-END token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-SEQUENCE-START or FLOW-MAPPING-START token. -func yaml_parser_fetch_flow_collection_start(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // The indicators '[' and '{' may start a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // Increase the flow level. - if !yaml_parser_increase_flow_level(parser) { - return false - } - - // A simple key may follow the indicators '[' and '{'. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-SEQUENCE-START of FLOW-MAPPING-START token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-SEQUENCE-END or FLOW-MAPPING-END token. -func yaml_parser_fetch_flow_collection_end(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // Reset any potential simple key on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Decrease the flow level. - if !yaml_parser_decrease_flow_level(parser) { - return false - } - - // No simple keys after the indicators ']' and '}'. - parser.simple_key_allowed = false - - // Consume the token. - - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-SEQUENCE-END of FLOW-MAPPING-END token. - token := yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - } - // Append the token to the queue. - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the FLOW-ENTRY token. -func yaml_parser_fetch_flow_entry(parser *yaml_parser_t) bool { - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after ','. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the FLOW-ENTRY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_FLOW_ENTRY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the BLOCK-ENTRY token. -func yaml_parser_fetch_block_entry(parser *yaml_parser_t) bool { - // Check if the scanner is in the block context. - if parser.flow_level == 0 { - // Check if we are allowed to start a new entry. - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "block sequence entries are not allowed in this context") - } - // Add the BLOCK-SEQUENCE-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_SEQUENCE_START_TOKEN, parser.mark) { - return false - } - } else { - // It is an error for the '-' indicator to occur in the flow context, - // but we let the Parser detect and report about it because the Parser - // is able to point to the context. - } - - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after '-'. - parser.simple_key_allowed = true - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the BLOCK-ENTRY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_BLOCK_ENTRY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the KEY token. -func yaml_parser_fetch_key(parser *yaml_parser_t) bool { - - // In the block context, additional checks are required. - if parser.flow_level == 0 { - // Check if we are allowed to start a new key (not nessesary simple). - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "mapping keys are not allowed in this context") - } - // Add the BLOCK-MAPPING-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { - return false - } - } - - // Reset any potential simple keys on the current flow level. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // Simple keys are allowed after '?' in the block context. - parser.simple_key_allowed = parser.flow_level == 0 - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the KEY token and append it to the queue. - token := yaml_token_t{ - typ: yaml_KEY_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the VALUE token. -func yaml_parser_fetch_value(parser *yaml_parser_t) bool { - - simple_key := &parser.simple_keys[len(parser.simple_keys)-1] - - // Have we found a simple key? - if simple_key.possible { - // Create the KEY token and insert it into the queue. - token := yaml_token_t{ - typ: yaml_KEY_TOKEN, - start_mark: simple_key.mark, - end_mark: simple_key.mark, - } - yaml_insert_token(parser, simple_key.token_number-parser.tokens_parsed, &token) - - // In the block context, we may need to add the BLOCK-MAPPING-START token. - if !yaml_parser_roll_indent(parser, simple_key.mark.column, - simple_key.token_number, - yaml_BLOCK_MAPPING_START_TOKEN, simple_key.mark) { - return false - } - - // Remove the simple key. - simple_key.possible = false - - // A simple key cannot follow another simple key. - parser.simple_key_allowed = false - - } else { - // The ':' indicator follows a complex key. - - // In the block context, extra checks are required. - if parser.flow_level == 0 { - - // Check if we are allowed to start a complex value. - if !parser.simple_key_allowed { - return yaml_parser_set_scanner_error(parser, "", parser.mark, - "mapping values are not allowed in this context") - } - - // Add the BLOCK-MAPPING-START token if needed. - if !yaml_parser_roll_indent(parser, parser.mark.column, -1, yaml_BLOCK_MAPPING_START_TOKEN, parser.mark) { - return false - } - } - - // Simple keys after ':' are allowed in the block context. - parser.simple_key_allowed = parser.flow_level == 0 - } - - // Consume the token. - start_mark := parser.mark - skip(parser) - end_mark := parser.mark - - // Create the VALUE token and append it to the queue. - token := yaml_token_t{ - typ: yaml_VALUE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the ALIAS or ANCHOR token. -func yaml_parser_fetch_anchor(parser *yaml_parser_t, typ yaml_token_type_t) bool { - // An anchor or an alias could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow an anchor or an alias. - parser.simple_key_allowed = false - - // Create the ALIAS or ANCHOR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_anchor(parser, &token, typ) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the TAG token. -func yaml_parser_fetch_tag(parser *yaml_parser_t) bool { - // A tag could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a tag. - parser.simple_key_allowed = false - - // Create the TAG token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_tag(parser, &token) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,literal) or SCALAR(...,folded) tokens. -func yaml_parser_fetch_block_scalar(parser *yaml_parser_t, literal bool) bool { - // Remove any potential simple keys. - if !yaml_parser_remove_simple_key(parser) { - return false - } - - // A simple key may follow a block scalar. - parser.simple_key_allowed = true - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_block_scalar(parser, &token, literal) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,single-quoted) or SCALAR(...,double-quoted) tokens. -func yaml_parser_fetch_flow_scalar(parser *yaml_parser_t, single bool) bool { - // A plain scalar could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a flow scalar. - parser.simple_key_allowed = false - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_flow_scalar(parser, &token, single) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Produce the SCALAR(...,plain) token. -func yaml_parser_fetch_plain_scalar(parser *yaml_parser_t) bool { - // A plain scalar could be a simple key. - if !yaml_parser_save_simple_key(parser) { - return false - } - - // A simple key cannot follow a flow scalar. - parser.simple_key_allowed = false - - // Create the SCALAR token and append it to the queue. - var token yaml_token_t - if !yaml_parser_scan_plain_scalar(parser, &token) { - return false - } - yaml_insert_token(parser, -1, &token) - return true -} - -// Eat whitespaces and comments until the next token is found. -func yaml_parser_scan_to_next_token(parser *yaml_parser_t) bool { - - // Until the next token is not found. - for { - // Allow the BOM mark to start a line. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.mark.column == 0 && is_bom(parser.buffer, parser.buffer_pos) { - skip(parser) - } - - // Eat whitespaces. - // Tabs are allowed: - // - in the flow context - // - in the block context, but not at the beginning of the line or - // after '-', '?', or ':' (complex value). - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for parser.buffer[parser.buffer_pos] == ' ' || ((parser.flow_level > 0 || !parser.simple_key_allowed) && parser.buffer[parser.buffer_pos] == '\t') { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Eat a comment until a line break. - if parser.buffer[parser.buffer_pos] == '#' { - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // If it is a line break, eat it. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - - // In the block context, a new line may start a simple key. - if parser.flow_level == 0 { - parser.simple_key_allowed = true - } - } else { - break // We have found a token. - } - } - - return true -} - -// Scan a YAML-DIRECTIVE or TAG-DIRECTIVE token. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// -func yaml_parser_scan_directive(parser *yaml_parser_t, token *yaml_token_t) bool { - // Eat '%'. - start_mark := parser.mark - skip(parser) - - // Scan the directive name. - var name []byte - if !yaml_parser_scan_directive_name(parser, start_mark, &name) { - return false - } - - // Is it a YAML directive? - if bytes.Equal(name, []byte("YAML")) { - // Scan the VERSION directive value. - var major, minor int8 - if !yaml_parser_scan_version_directive_value(parser, start_mark, &major, &minor) { - return false - } - end_mark := parser.mark - - // Create a VERSION-DIRECTIVE token. - *token = yaml_token_t{ - typ: yaml_VERSION_DIRECTIVE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - major: major, - minor: minor, - } - - // Is it a TAG directive? - } else if bytes.Equal(name, []byte("TAG")) { - // Scan the TAG directive value. - var handle, prefix []byte - if !yaml_parser_scan_tag_directive_value(parser, start_mark, &handle, &prefix) { - return false - } - end_mark := parser.mark - - // Create a TAG-DIRECTIVE token. - *token = yaml_token_t{ - typ: yaml_TAG_DIRECTIVE_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: handle, - prefix: prefix, - } - - // Unknown directive. - } else { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "found uknown directive name") - return false - } - - // Eat the rest of the line including any comments. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - if parser.buffer[parser.buffer_pos] == '#' { - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // Check if we are at the end of the line. - if !is_breakz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "did not find expected comment or line break") - return false - } - - // Eat a line break. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - } - - return true -} - -// Scan the directive name. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^^^^ -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^ -// -func yaml_parser_scan_directive_name(parser *yaml_parser_t, start_mark yaml_mark_t, name *[]byte) bool { - // Consume the directive name. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - var s []byte - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the name is empty. - if len(s) == 0 { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "could not find expected directive name") - return false - } - - // Check for an blank character after the name. - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a directive", - start_mark, "found unexpected non-alphabetical character") - return false - } - *name = s - return true -} - -// Scan the value of VERSION-DIRECTIVE. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^^^^^^ -func yaml_parser_scan_version_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, major, minor *int8) bool { - // Eat whitespaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Consume the major version number. - if !yaml_parser_scan_version_directive_number(parser, start_mark, major) { - return false - } - - // Eat '.'. - if parser.buffer[parser.buffer_pos] != '.' { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "did not find expected digit or '.' character") - } - - skip(parser) - - // Consume the minor version number. - if !yaml_parser_scan_version_directive_number(parser, start_mark, minor) { - return false - } - return true -} - -const max_number_length = 2 - -// Scan the version number of VERSION-DIRECTIVE. -// -// Scope: -// %YAML 1.1 # a comment \n -// ^ -// %YAML 1.1 # a comment \n -// ^ -func yaml_parser_scan_version_directive_number(parser *yaml_parser_t, start_mark yaml_mark_t, number *int8) bool { - - // Repeat while the next character is digit. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - var value, length int8 - for is_digit(parser.buffer, parser.buffer_pos) { - // Check if the number is too long. - length++ - if length > max_number_length { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "found extremely long version number") - } - value = value*10 + int8(as_digit(parser.buffer, parser.buffer_pos)) - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the number was present. - if length == 0 { - return yaml_parser_set_scanner_error(parser, "while scanning a %YAML directive", - start_mark, "did not find expected version number") - } - *number = value - return true -} - -// Scan the value of a TAG-DIRECTIVE token. -// -// Scope: -// %TAG !yaml! tag:yaml.org,2002: \n -// ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ -// -func yaml_parser_scan_tag_directive_value(parser *yaml_parser_t, start_mark yaml_mark_t, handle, prefix *[]byte) bool { - var handle_value, prefix_value []byte - - // Eat whitespaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Scan a handle. - if !yaml_parser_scan_tag_handle(parser, true, start_mark, &handle_value) { - return false - } - - // Expect a whitespace. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blank(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", - start_mark, "did not find expected whitespace") - return false - } - - // Eat whitespaces. - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Scan a prefix. - if !yaml_parser_scan_tag_uri(parser, true, nil, start_mark, &prefix_value) { - return false - } - - // Expect a whitespace or line break. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a %TAG directive", - start_mark, "did not find expected whitespace or line break") - return false - } - - *handle = handle_value - *prefix = prefix_value - return true -} - -func yaml_parser_scan_anchor(parser *yaml_parser_t, token *yaml_token_t, typ yaml_token_type_t) bool { - var s []byte - - // Eat the indicator character. - start_mark := parser.mark - skip(parser) - - // Consume the value. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - end_mark := parser.mark - - /* - * Check if length of the anchor is greater than 0 and it is followed by - * a whitespace character or one of the indicators: - * - * '?', ':', ',', ']', '}', '%', '@', '`'. - */ - - if len(s) == 0 || - !(is_blankz(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == '?' || - parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == ',' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '}' || - parser.buffer[parser.buffer_pos] == '%' || parser.buffer[parser.buffer_pos] == '@' || - parser.buffer[parser.buffer_pos] == '`') { - context := "while scanning an alias" - if typ == yaml_ANCHOR_TOKEN { - context = "while scanning an anchor" - } - yaml_parser_set_scanner_error(parser, context, start_mark, - "did not find expected alphabetic or numeric character") - return false - } - - // Create a token. - *token = yaml_token_t{ - typ: typ, - start_mark: start_mark, - end_mark: end_mark, - value: s, - } - - return true -} - -/* - * Scan a TAG token. - */ - -func yaml_parser_scan_tag(parser *yaml_parser_t, token *yaml_token_t) bool { - var handle, suffix []byte - - start_mark := parser.mark - - // Check if the tag is in the canonical form. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - if parser.buffer[parser.buffer_pos+1] == '<' { - // Keep the handle as '' - - // Eat '!<' - skip(parser) - skip(parser) - - // Consume the tag value. - if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { - return false - } - - // Check for '>' and eat it. - if parser.buffer[parser.buffer_pos] != '>' { - yaml_parser_set_scanner_error(parser, "while scanning a tag", - start_mark, "did not find the expected '>'") - return false - } - - skip(parser) - } else { - // The tag has either the '!suffix' or the '!handle!suffix' form. - - // First, try to scan a handle. - if !yaml_parser_scan_tag_handle(parser, false, start_mark, &handle) { - return false - } - - // Check if it is, indeed, handle. - if handle[0] == '!' && len(handle) > 1 && handle[len(handle)-1] == '!' { - // Scan the suffix now. - if !yaml_parser_scan_tag_uri(parser, false, nil, start_mark, &suffix) { - return false - } - } else { - // It wasn't a handle after all. Scan the rest of the tag. - if !yaml_parser_scan_tag_uri(parser, false, handle, start_mark, &suffix) { - return false - } - - // Set the handle to '!'. - handle = []byte{'!'} - - // A special case: the '!' tag. Set the handle to '' and the - // suffix to '!'. - if len(suffix) == 0 { - handle, suffix = suffix, handle - } - } - } - - // Check the character which ends the tag. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if !is_blankz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a tag", - start_mark, "did not find expected whitespace or line break") - return false - } - - end_mark := parser.mark - - // Create a token. - *token = yaml_token_t{ - typ: yaml_TAG_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: handle, - suffix: suffix, - } - return true -} - -// Scan a tag handle. -func yaml_parser_scan_tag_handle(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, handle *[]byte) bool { - // Check the initial '!' character. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.buffer[parser.buffer_pos] != '!' { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected '!'") - return false - } - - var s []byte - - // Copy the '!' character. - s = read(parser, s) - - // Copy all subsequent alphabetical and numerical characters. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_alpha(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the trailing character is '!' and copy it. - if parser.buffer[parser.buffer_pos] == '!' { - s = read(parser, s) - } else { - // It's either the '!' tag or not really a tag handle. If it's a %TAG - // directive, it's an error. If it's a tag token, it must be a part of URI. - if directive && !(s[0] == '!' && s[1] == 0) { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected '!'") - return false - } - } - - *handle = s - return true -} - -// Scan a tag. -func yaml_parser_scan_tag_uri(parser *yaml_parser_t, directive bool, head []byte, start_mark yaml_mark_t, uri *[]byte) bool { - //size_t length = head ? strlen((char *)head) : 0 - var s []byte - - // Copy the head if needed. - // - // Note that we don't copy the leading '!' character. - if len(head) > 1 { - s = append(s, head[1:]...) - } - - // Scan the tag. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // The set of characters that may appear in URI is as follows: - // - // '0'-'9', 'A'-'Z', 'a'-'z', '_', '-', ';', '/', '?', ':', '@', '&', - // '=', '+', '$', ',', '.', '!', '~', '*', '\'', '(', ')', '[', ']', - // '%'. - // [Go] Convert this into more reasonable logic. - for is_alpha(parser.buffer, parser.buffer_pos) || parser.buffer[parser.buffer_pos] == ';' || - parser.buffer[parser.buffer_pos] == '/' || parser.buffer[parser.buffer_pos] == '?' || - parser.buffer[parser.buffer_pos] == ':' || parser.buffer[parser.buffer_pos] == '@' || - parser.buffer[parser.buffer_pos] == '&' || parser.buffer[parser.buffer_pos] == '=' || - parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '$' || - parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == '.' || - parser.buffer[parser.buffer_pos] == '!' || parser.buffer[parser.buffer_pos] == '~' || - parser.buffer[parser.buffer_pos] == '*' || parser.buffer[parser.buffer_pos] == '\'' || - parser.buffer[parser.buffer_pos] == '(' || parser.buffer[parser.buffer_pos] == ')' || - parser.buffer[parser.buffer_pos] == '[' || parser.buffer[parser.buffer_pos] == ']' || - parser.buffer[parser.buffer_pos] == '%' { - // Check if it is a URI-escape sequence. - if parser.buffer[parser.buffer_pos] == '%' { - if !yaml_parser_scan_uri_escapes(parser, directive, start_mark, &s) { - return false - } - } else { - s = read(parser, s) - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check if the tag is non-empty. - if len(s) == 0 { - yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find expected tag URI") - return false - } - *uri = s - return true -} - -// Decode an URI-escape sequence corresponding to a single UTF-8 character. -func yaml_parser_scan_uri_escapes(parser *yaml_parser_t, directive bool, start_mark yaml_mark_t, s *[]byte) bool { - - // Decode the required number of characters. - w := 1024 - for w > 0 { - // Check for a URI-escaped octet. - if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { - return false - } - - if !(parser.buffer[parser.buffer_pos] == '%' && - is_hex(parser.buffer, parser.buffer_pos+1) && - is_hex(parser.buffer, parser.buffer_pos+2)) { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "did not find URI escaped octet") - } - - // Get the octet. - octet := byte((as_hex(parser.buffer, parser.buffer_pos+1) << 4) + as_hex(parser.buffer, parser.buffer_pos+2)) - - // If it is the leading octet, determine the length of the UTF-8 sequence. - if w == 1024 { - w = width(octet) - if w == 0 { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "found an incorrect leading UTF-8 octet") - } - } else { - // Check if the trailing octet is correct. - if octet&0xC0 != 0x80 { - return yaml_parser_set_scanner_tag_error(parser, directive, - start_mark, "found an incorrect trailing UTF-8 octet") - } - } - - // Copy the octet and move the pointers. - *s = append(*s, octet) - skip(parser) - skip(parser) - skip(parser) - w-- - } - return true -} - -// Scan a block scalar. -func yaml_parser_scan_block_scalar(parser *yaml_parser_t, token *yaml_token_t, literal bool) bool { - // Eat the indicator '|' or '>'. - start_mark := parser.mark - skip(parser) - - // Scan the additional block scalar indicators. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - // Check for a chomping indicator. - var chomping, increment int - if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { - // Set the chomping method and eat the indicator. - if parser.buffer[parser.buffer_pos] == '+' { - chomping = +1 - } else { - chomping = -1 - } - skip(parser) - - // Check for an indentation indicator. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if is_digit(parser.buffer, parser.buffer_pos) { - // Check that the intendation is greater than 0. - if parser.buffer[parser.buffer_pos] == '0' { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found an intendation indicator equal to 0") - return false - } - - // Get the intendation level and eat the indicator. - increment = as_digit(parser.buffer, parser.buffer_pos) - skip(parser) - } - - } else if is_digit(parser.buffer, parser.buffer_pos) { - // Do the same as above, but in the opposite order. - - if parser.buffer[parser.buffer_pos] == '0' { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found an intendation indicator equal to 0") - return false - } - increment = as_digit(parser.buffer, parser.buffer_pos) - skip(parser) - - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - if parser.buffer[parser.buffer_pos] == '+' || parser.buffer[parser.buffer_pos] == '-' { - if parser.buffer[parser.buffer_pos] == '+' { - chomping = +1 - } else { - chomping = -1 - } - skip(parser) - } - } - - // Eat whitespaces and comments to the end of the line. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for is_blank(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - if parser.buffer[parser.buffer_pos] == '#' { - for !is_breakz(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - } - - // Check if we are at the end of the line. - if !is_breakz(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "did not find expected comment or line break") - return false - } - - // Eat a line break. - if is_break(parser.buffer, parser.buffer_pos) { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - skip_line(parser) - } - - end_mark := parser.mark - - // Set the intendation level if it was specified. - var indent int - if increment > 0 { - if parser.indent >= 0 { - indent = parser.indent + increment - } else { - indent = increment - } - } - - // Scan the leading line breaks and determine the indentation level if needed. - var s, leading_break, trailing_breaks []byte - if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { - return false - } - - // Scan the block scalar content. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - var leading_blank, trailing_blank bool - for parser.mark.column == indent && !is_z(parser.buffer, parser.buffer_pos) { - // We are at the beginning of a non-empty line. - - // Is it a trailing whitespace? - trailing_blank = is_blank(parser.buffer, parser.buffer_pos) - - // Check if we need to fold the leading line break. - if !literal && !leading_blank && !trailing_blank && len(leading_break) > 0 && leading_break[0] == '\n' { - // Do we need to join the lines by space? - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } - } else { - s = append(s, leading_break...) - } - leading_break = leading_break[:0] - - // Append the remaining line breaks. - s = append(s, trailing_breaks...) - trailing_breaks = trailing_breaks[:0] - - // Is it a leading whitespace? - leading_blank = is_blank(parser.buffer, parser.buffer_pos) - - // Consume the current line. - for !is_breakz(parser.buffer, parser.buffer_pos) { - s = read(parser, s) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Consume the line break. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - leading_break = read_line(parser, leading_break) - - // Eat the following intendation spaces and line breaks. - if !yaml_parser_scan_block_scalar_breaks(parser, &indent, &trailing_breaks, start_mark, &end_mark) { - return false - } - } - - // Chomp the tail. - if chomping != -1 { - s = append(s, leading_break...) - } - if chomping == 1 { - s = append(s, trailing_breaks...) - } - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_LITERAL_SCALAR_STYLE, - } - if !literal { - token.style = yaml_FOLDED_SCALAR_STYLE - } - return true -} - -// Scan intendation spaces and line breaks for a block scalar. Determine the -// intendation level if needed. -func yaml_parser_scan_block_scalar_breaks(parser *yaml_parser_t, indent *int, breaks *[]byte, start_mark yaml_mark_t, end_mark *yaml_mark_t) bool { - *end_mark = parser.mark - - // Eat the intendation spaces and line breaks. - max_indent := 0 - for { - // Eat the intendation spaces. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - for (*indent == 0 || parser.mark.column < *indent) && is_space(parser.buffer, parser.buffer_pos) { - skip(parser) - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - if parser.mark.column > max_indent { - max_indent = parser.mark.column - } - - // Check for a tab character messing the intendation. - if (*indent == 0 || parser.mark.column < *indent) && is_tab(parser.buffer, parser.buffer_pos) { - return yaml_parser_set_scanner_error(parser, "while scanning a block scalar", - start_mark, "found a tab character where an intendation space is expected") - } - - // Have we found a non-empty line? - if !is_break(parser.buffer, parser.buffer_pos) { - break - } - - // Consume the line break. - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - // [Go] Should really be returning breaks instead. - *breaks = read_line(parser, *breaks) - *end_mark = parser.mark - } - - // Determine the indentation level if needed. - if *indent == 0 { - *indent = max_indent - if *indent < parser.indent+1 { - *indent = parser.indent + 1 - } - if *indent < 1 { - *indent = 1 - } - } - return true -} - -// Scan a quoted scalar. -func yaml_parser_scan_flow_scalar(parser *yaml_parser_t, token *yaml_token_t, single bool) bool { - // Eat the left quote. - start_mark := parser.mark - skip(parser) - - // Consume the content of the quoted scalar. - var s, leading_break, trailing_breaks, whitespaces []byte - for { - // Check that there are no document indicators at the beginning of the line. - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - - if parser.mark.column == 0 && - ((parser.buffer[parser.buffer_pos+0] == '-' && - parser.buffer[parser.buffer_pos+1] == '-' && - parser.buffer[parser.buffer_pos+2] == '-') || - (parser.buffer[parser.buffer_pos+0] == '.' && - parser.buffer[parser.buffer_pos+1] == '.' && - parser.buffer[parser.buffer_pos+2] == '.')) && - is_blankz(parser.buffer, parser.buffer_pos+3) { - yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", - start_mark, "found unexpected document indicator") - return false - } - - // Check for EOF. - if is_z(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a quoted scalar", - start_mark, "found unexpected end of stream") - return false - } - - // Consume non-blank characters. - leading_blanks := false - for !is_blankz(parser.buffer, parser.buffer_pos) { - if single && parser.buffer[parser.buffer_pos] == '\'' && parser.buffer[parser.buffer_pos+1] == '\'' { - // Is is an escaped single quote. - s = append(s, '\'') - skip(parser) - skip(parser) - - } else if single && parser.buffer[parser.buffer_pos] == '\'' { - // It is a right single quote. - break - } else if !single && parser.buffer[parser.buffer_pos] == '"' { - // It is a right double quote. - break - - } else if !single && parser.buffer[parser.buffer_pos] == '\\' && is_break(parser.buffer, parser.buffer_pos+1) { - // It is an escaped line break. - if parser.unread < 3 && !yaml_parser_update_buffer(parser, 3) { - return false - } - skip(parser) - skip_line(parser) - leading_blanks = true - break - - } else if !single && parser.buffer[parser.buffer_pos] == '\\' { - // It is an escape sequence. - code_length := 0 - - // Check the escape character. - switch parser.buffer[parser.buffer_pos+1] { - case '0': - s = append(s, 0) - case 'a': - s = append(s, '\x07') - case 'b': - s = append(s, '\x08') - case 't', '\t': - s = append(s, '\x09') - case 'n': - s = append(s, '\x0A') - case 'v': - s = append(s, '\x0B') - case 'f': - s = append(s, '\x0C') - case 'r': - s = append(s, '\x0D') - case 'e': - s = append(s, '\x1B') - case ' ': - s = append(s, '\x20') - case '"': - s = append(s, '"') - case '\'': - s = append(s, '\'') - case '\\': - s = append(s, '\\') - case 'N': // NEL (#x85) - s = append(s, '\xC2') - s = append(s, '\x85') - case '_': // #xA0 - s = append(s, '\xC2') - s = append(s, '\xA0') - case 'L': // LS (#x2028) - s = append(s, '\xE2') - s = append(s, '\x80') - s = append(s, '\xA8') - case 'P': // PS (#x2029) - s = append(s, '\xE2') - s = append(s, '\x80') - s = append(s, '\xA9') - case 'x': - code_length = 2 - case 'u': - code_length = 4 - case 'U': - code_length = 8 - default: - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "found unknown escape character") - return false - } - - skip(parser) - skip(parser) - - // Consume an arbitrary escape code. - if code_length > 0 { - var value int - - // Scan the character value. - if parser.unread < code_length && !yaml_parser_update_buffer(parser, code_length) { - return false - } - for k := 0; k < code_length; k++ { - if !is_hex(parser.buffer, parser.buffer_pos+k) { - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "did not find expected hexdecimal number") - return false - } - value = (value << 4) + as_hex(parser.buffer, parser.buffer_pos+k) - } - - // Check the value and write the character. - if (value >= 0xD800 && value <= 0xDFFF) || value > 0x10FFFF { - yaml_parser_set_scanner_error(parser, "while parsing a quoted scalar", - start_mark, "found invalid Unicode character escape code") - return false - } - if value <= 0x7F { - s = append(s, byte(value)) - } else if value <= 0x7FF { - s = append(s, byte(0xC0+(value>>6))) - s = append(s, byte(0x80+(value&0x3F))) - } else if value <= 0xFFFF { - s = append(s, byte(0xE0+(value>>12))) - s = append(s, byte(0x80+((value>>6)&0x3F))) - s = append(s, byte(0x80+(value&0x3F))) - } else { - s = append(s, byte(0xF0+(value>>18))) - s = append(s, byte(0x80+((value>>12)&0x3F))) - s = append(s, byte(0x80+((value>>6)&0x3F))) - s = append(s, byte(0x80+(value&0x3F))) - } - - // Advance the pointer. - for k := 0; k < code_length; k++ { - skip(parser) - } - } - } else { - // It is a non-escaped non-blank character. - s = read(parser, s) - } - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - } - - // Check if we are at the end of the scalar. - if single { - if parser.buffer[parser.buffer_pos] == '\'' { - break - } - } else { - if parser.buffer[parser.buffer_pos] == '"' { - break - } - } - - // Consume blank characters. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { - if is_blank(parser.buffer, parser.buffer_pos) { - // Consume a space or a tab character. - if !leading_blanks { - whitespaces = read(parser, whitespaces) - } else { - skip(parser) - } - } else { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - // Check if it is a first line break. - if !leading_blanks { - whitespaces = whitespaces[:0] - leading_break = read_line(parser, leading_break) - leading_blanks = true - } else { - trailing_breaks = read_line(parser, trailing_breaks) - } - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Join the whitespaces or fold line breaks. - if leading_blanks { - // Do we need to fold line breaks? - if len(leading_break) > 0 && leading_break[0] == '\n' { - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } else { - s = append(s, trailing_breaks...) - } - } else { - s = append(s, leading_break...) - s = append(s, trailing_breaks...) - } - trailing_breaks = trailing_breaks[:0] - leading_break = leading_break[:0] - } else { - s = append(s, whitespaces...) - whitespaces = whitespaces[:0] - } - } - - // Eat the right quote. - skip(parser) - end_mark := parser.mark - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_SINGLE_QUOTED_SCALAR_STYLE, - } - if !single { - token.style = yaml_DOUBLE_QUOTED_SCALAR_STYLE - } - return true -} - -// Scan a plain scalar. -func yaml_parser_scan_plain_scalar(parser *yaml_parser_t, token *yaml_token_t) bool { - - var s, leading_break, trailing_breaks, whitespaces []byte - var leading_blanks bool - var indent = parser.indent + 1 - - start_mark := parser.mark - end_mark := parser.mark - - // Consume the content of the plain scalar. - for { - // Check for a document indicator. - if parser.unread < 4 && !yaml_parser_update_buffer(parser, 4) { - return false - } - if parser.mark.column == 0 && - ((parser.buffer[parser.buffer_pos+0] == '-' && - parser.buffer[parser.buffer_pos+1] == '-' && - parser.buffer[parser.buffer_pos+2] == '-') || - (parser.buffer[parser.buffer_pos+0] == '.' && - parser.buffer[parser.buffer_pos+1] == '.' && - parser.buffer[parser.buffer_pos+2] == '.')) && - is_blankz(parser.buffer, parser.buffer_pos+3) { - break - } - - // Check for a comment. - if parser.buffer[parser.buffer_pos] == '#' { - break - } - - // Consume non-blank characters. - for !is_blankz(parser.buffer, parser.buffer_pos) { - - // Check for 'x:x' in the flow context. TODO: Fix the test "spec-08-13". - if parser.flow_level > 0 && - parser.buffer[parser.buffer_pos] == ':' && - !is_blankz(parser.buffer, parser.buffer_pos+1) { - yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", - start_mark, "found unexpected ':'") - return false - } - - // Check for indicators that may end a plain scalar. - if (parser.buffer[parser.buffer_pos] == ':' && is_blankz(parser.buffer, parser.buffer_pos+1)) || - (parser.flow_level > 0 && - (parser.buffer[parser.buffer_pos] == ',' || parser.buffer[parser.buffer_pos] == ':' || - parser.buffer[parser.buffer_pos] == '?' || parser.buffer[parser.buffer_pos] == '[' || - parser.buffer[parser.buffer_pos] == ']' || parser.buffer[parser.buffer_pos] == '{' || - parser.buffer[parser.buffer_pos] == '}')) { - break - } - - // Check if we need to join whitespaces and breaks. - if leading_blanks || len(whitespaces) > 0 { - if leading_blanks { - // Do we need to fold line breaks? - if leading_break[0] == '\n' { - if len(trailing_breaks) == 0 { - s = append(s, ' ') - } else { - s = append(s, trailing_breaks...) - } - } else { - s = append(s, leading_break...) - s = append(s, trailing_breaks...) - } - trailing_breaks = trailing_breaks[:0] - leading_break = leading_break[:0] - leading_blanks = false - } else { - s = append(s, whitespaces...) - whitespaces = whitespaces[:0] - } - } - - // Copy the character. - s = read(parser, s) - - end_mark = parser.mark - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - } - - // Is it the end? - if !(is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos)) { - break - } - - // Consume blank characters. - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - - for is_blank(parser.buffer, parser.buffer_pos) || is_break(parser.buffer, parser.buffer_pos) { - if is_blank(parser.buffer, parser.buffer_pos) { - - // Check for tab character that abuse intendation. - if leading_blanks && parser.mark.column < indent && is_tab(parser.buffer, parser.buffer_pos) { - yaml_parser_set_scanner_error(parser, "while scanning a plain scalar", - start_mark, "found a tab character that violate intendation") - return false - } - - // Consume a space or a tab character. - if !leading_blanks { - whitespaces = read(parser, whitespaces) - } else { - skip(parser) - } - } else { - if parser.unread < 2 && !yaml_parser_update_buffer(parser, 2) { - return false - } - - // Check if it is a first line break. - if !leading_blanks { - whitespaces = whitespaces[:0] - leading_break = read_line(parser, leading_break) - leading_blanks = true - } else { - trailing_breaks = read_line(parser, trailing_breaks) - } - } - if parser.unread < 1 && !yaml_parser_update_buffer(parser, 1) { - return false - } - } - - // Check intendation level. - if parser.flow_level == 0 && parser.mark.column < indent { - break - } - } - - // Create a token. - *token = yaml_token_t{ - typ: yaml_SCALAR_TOKEN, - start_mark: start_mark, - end_mark: end_mark, - value: s, - style: yaml_PLAIN_SCALAR_STYLE, - } - - // Note that we change the 'simple_key_allowed' flag. - if leading_blanks { - parser.simple_key_allowed = true - } - return true -} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/sorter.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/sorter.go deleted file mode 100644 index 5958822..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/sorter.go +++ /dev/null @@ -1,104 +0,0 @@ -package yaml - -import ( - "reflect" - "unicode" -) - -type keyList []reflect.Value - -func (l keyList) Len() int { return len(l) } -func (l keyList) Swap(i, j int) { l[i], l[j] = l[j], l[i] } -func (l keyList) Less(i, j int) bool { - a := l[i] - b := l[j] - ak := a.Kind() - bk := b.Kind() - for (ak == reflect.Interface || ak == reflect.Ptr) && !a.IsNil() { - a = a.Elem() - ak = a.Kind() - } - for (bk == reflect.Interface || bk == reflect.Ptr) && !b.IsNil() { - b = b.Elem() - bk = b.Kind() - } - af, aok := keyFloat(a) - bf, bok := keyFloat(b) - if aok && bok { - if af != bf { - return af < bf - } - if ak != bk { - return ak < bk - } - return numLess(a, b) - } - if ak != reflect.String || bk != reflect.String { - return ak < bk - } - ar, br := []rune(a.String()), []rune(b.String()) - for i := 0; i < len(ar) && i < len(br); i++ { - if ar[i] == br[i] { - continue - } - al := unicode.IsLetter(ar[i]) - bl := unicode.IsLetter(br[i]) - if al && bl { - return ar[i] < br[i] - } - if al || bl { - return bl - } - var ai, bi int - var an, bn int64 - for ai = i; ai < len(ar) && unicode.IsDigit(ar[ai]); ai++ { - an = an*10 + int64(ar[ai]-'0') - } - for bi = i; bi < len(br) && unicode.IsDigit(br[bi]); bi++ { - bn = bn*10 + int64(br[bi]-'0') - } - if an != bn { - return an < bn - } - if ai != bi { - return ai < bi - } - return ar[i] < br[i] - } - return len(ar) < len(br) -} - -// keyFloat returns a float value for v if it is a number/bool -// and whether it is a number/bool or not. -func keyFloat(v reflect.Value) (f float64, ok bool) { - switch v.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return float64(v.Int()), true - case reflect.Float32, reflect.Float64: - return v.Float(), true - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return float64(v.Uint()), true - case reflect.Bool: - if v.Bool() { - return 1, true - } - return 0, true - } - return 0, false -} - -// numLess returns whether a < b. -// a and b must necessarily have the same kind. -func numLess(a, b reflect.Value) bool { - switch a.Kind() { - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return a.Int() < b.Int() - case reflect.Float32, reflect.Float64: - return a.Float() < b.Float() - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return a.Uint() < b.Uint() - case reflect.Bool: - return !a.Bool() && b.Bool() - } - panic("not a number") -} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/suite_test.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/suite_test.go deleted file mode 100644 index c5cf1ed..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/suite_test.go +++ /dev/null @@ -1,12 +0,0 @@ -package yaml_test - -import ( - . "gopkg.in/check.v1" - "testing" -) - -func Test(t *testing.T) { TestingT(t) } - -type S struct{} - -var _ = Suite(&S{}) diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/writerc.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/writerc.go deleted file mode 100644 index 190362f..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/writerc.go +++ /dev/null @@ -1,89 +0,0 @@ -package yaml - -// Set the writer error and return false. -func yaml_emitter_set_writer_error(emitter *yaml_emitter_t, problem string) bool { - emitter.error = yaml_WRITER_ERROR - emitter.problem = problem - return false -} - -// Flush the output buffer. -func yaml_emitter_flush(emitter *yaml_emitter_t) bool { - if emitter.write_handler == nil { - panic("write handler not set") - } - - // Check if the buffer is empty. - if emitter.buffer_pos == 0 { - return true - } - - // If the output encoding is UTF-8, we don't need to recode the buffer. - if emitter.encoding == yaml_UTF8_ENCODING { - if err := emitter.write_handler(emitter, emitter.buffer[:emitter.buffer_pos]); err != nil { - return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) - } - emitter.buffer_pos = 0 - return true - } - - // Recode the buffer into the raw buffer. - var low, high int - if emitter.encoding == yaml_UTF16LE_ENCODING { - low, high = 0, 1 - } else { - high, low = 1, 0 - } - - pos := 0 - for pos < emitter.buffer_pos { - // See the "reader.c" code for more details on UTF-8 encoding. Note - // that we assume that the buffer contains a valid UTF-8 sequence. - - // Read the next UTF-8 character. - octet := emitter.buffer[pos] - - var w int - var value rune - switch { - case octet&0x80 == 0x00: - w, value = 1, rune(octet&0x7F) - case octet&0xE0 == 0xC0: - w, value = 2, rune(octet&0x1F) - case octet&0xF0 == 0xE0: - w, value = 3, rune(octet&0x0F) - case octet&0xF8 == 0xF0: - w, value = 4, rune(octet&0x07) - } - for k := 1; k < w; k++ { - octet = emitter.buffer[pos+k] - value = (value << 6) + (rune(octet) & 0x3F) - } - pos += w - - // Write the character. - if value < 0x10000 { - var b [2]byte - b[high] = byte(value >> 8) - b[low] = byte(value & 0xFF) - emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1]) - } else { - // Write the character using a surrogate pair (check "reader.c"). - var b [4]byte - value -= 0x10000 - b[high] = byte(0xD8 + (value >> 18)) - b[low] = byte((value >> 10) & 0xFF) - b[high+2] = byte(0xDC + ((value >> 8) & 0xFF)) - b[low+2] = byte(value & 0xFF) - emitter.raw_buffer = append(emitter.raw_buffer, b[0], b[1], b[2], b[3]) - } - } - - // Write the raw buffer. - if err := emitter.write_handler(emitter, emitter.raw_buffer); err != nil { - return yaml_emitter_set_writer_error(emitter, "write error: "+err.Error()) - } - emitter.buffer_pos = 0 - emitter.raw_buffer = emitter.raw_buffer[:0] - return true -} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/yaml.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/yaml.go deleted file mode 100644 index f1c390e..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/yaml.go +++ /dev/null @@ -1,301 +0,0 @@ -// Package yaml implements YAML support for the Go language. -// -// Source code and other details for the project are available at GitHub: -// -// https://github.com/go-yaml/yaml -// -package yaml - -import ( - "errors" - "fmt" - "reflect" - "strings" - "sync" -) - -type yamlError string - -func fail(msg string) { - panic(yamlError(msg)) -} - -func handleErr(err *error) { - if r := recover(); r != nil { - if e, ok := r.(yamlError); ok { - *err = errors.New("YAML error: " + string(e)) - } else { - panic(r) - } - } -} - -// The Setter interface may be implemented by types to do their own custom -// unmarshalling of YAML values, rather than being implicitly assigned by -// the yaml package machinery. If setting the value works, the method should -// return true. If it returns false, the value is considered unsupported -// and is omitted from maps and slices. -type Setter interface { - SetYAML(tag string, value interface{}) bool -} - -// The Getter interface is implemented by types to do their own custom -// marshalling into a YAML tag and value. -type Getter interface { - GetYAML() (tag string, value interface{}) -} - -// Unmarshal decodes the first document found within the in byte slice -// and assigns decoded values into the out value. -// -// Maps and pointers (to a struct, string, int, etc) are accepted as out -// values. If an internal pointer within a struct is not initialized, -// the yaml package will initialize it if necessary for unmarshalling -// the provided data. The out parameter must not be nil. -// -// The type of the decoded values and the type of out will be considered, -// and Unmarshal will do the best possible job to unmarshal values -// appropriately. It is NOT considered an error, though, to skip values -// because they are not available in the decoded YAML, or if they are not -// compatible with the out value. To ensure something was properly -// unmarshaled use a map or compare against the previous value for the -// field (usually the zero value). -// -// Struct fields are only unmarshalled if they are exported (have an -// upper case first letter), and are unmarshalled using the field name -// lowercased as the default key. Custom keys may be defined via the -// "yaml" name in the field tag: the content preceding the first comma -// is used as the key, and the following comma-separated options are -// used to tweak the marshalling process (see Marshal). -// Conflicting names result in a runtime error. -// -// For example: -// -// type T struct { -// F int `yaml:"a,omitempty"` -// B int -// } -// var t T -// yaml.Unmarshal([]byte("a: 1\nb: 2"), &t) -// -// See the documentation of Marshal for the format of tags and a list of -// supported tag options. -// -func Unmarshal(in []byte, out interface{}) (err error) { - defer handleErr(&err) - d := newDecoder() - p := newParser(in) - defer p.destroy() - node := p.parse() - if node != nil { - v := reflect.ValueOf(out) - if v.Kind() == reflect.Ptr && !v.IsNil() { - v = v.Elem() - } - d.unmarshal(node, v) - } - return nil -} - -// Marshal serializes the value provided into a YAML document. The structure -// of the generated document will reflect the structure of the value itself. -// Maps and pointers (to struct, string, int, etc) are accepted as the in value. -// -// Struct fields are only unmarshalled if they are exported (have an upper case -// first letter), and are unmarshalled using the field name lowercased as the -// default key. Custom keys may be defined via the "yaml" name in the field -// tag: the content preceding the first comma is used as the key, and the -// following comma-separated options are used to tweak the marshalling process. -// Conflicting names result in a runtime error. -// -// The field tag format accepted is: -// -// `(...) yaml:"[][,[,]]" (...)` -// -// The following flags are currently supported: -// -// omitempty Only include the field if it's not set to the zero -// value for the type or to empty slices or maps. -// Does not apply to zero valued structs. -// -// flow Marshal using a flow style (useful for structs, -// sequences and maps. -// -// inline Inline the struct it's applied to, so its fields -// are processed as if they were part of the outer -// struct. -// -// In addition, if the key is "-", the field is ignored. -// -// For example: -// -// type T struct { -// F int "a,omitempty" -// B int -// } -// yaml.Marshal(&T{B: 2}) // Returns "b: 2\n" -// yaml.Marshal(&T{F: 1}} // Returns "a: 1\nb: 0\n" -// -func Marshal(in interface{}) (out []byte, err error) { - defer handleErr(&err) - e := newEncoder() - defer e.destroy() - e.marshal("", reflect.ValueOf(in)) - e.finish() - out = e.out - return -} - -// -------------------------------------------------------------------------- -// Maintain a mapping of keys to structure field indexes - -// The code in this section was copied from mgo/bson. - -// structInfo holds details for the serialization of fields of -// a given struct. -type structInfo struct { - FieldsMap map[string]fieldInfo - FieldsList []fieldInfo - - // InlineMap is the number of the field in the struct that - // contains an ,inline map, or -1 if there's none. - InlineMap int -} - -type fieldInfo struct { - Key string - Num int - OmitEmpty bool - Flow bool - - // Inline holds the field index if the field is part of an inlined struct. - Inline []int -} - -var structMap = make(map[reflect.Type]*structInfo) -var fieldMapMutex sync.RWMutex - -func getStructInfo(st reflect.Type) (*structInfo, error) { - fieldMapMutex.RLock() - sinfo, found := structMap[st] - fieldMapMutex.RUnlock() - if found { - return sinfo, nil - } - - n := st.NumField() - fieldsMap := make(map[string]fieldInfo) - fieldsList := make([]fieldInfo, 0, n) - inlineMap := -1 - for i := 0; i != n; i++ { - field := st.Field(i) - if field.PkgPath != "" { - continue // Private field - } - - info := fieldInfo{Num: i} - - tag := field.Tag.Get("yaml") - if tag == "" && strings.Index(string(field.Tag), ":") < 0 { - tag = string(field.Tag) - } - if tag == "-" { - continue - } - - inline := false - fields := strings.Split(tag, ",") - if len(fields) > 1 { - for _, flag := range fields[1:] { - switch flag { - case "omitempty": - info.OmitEmpty = true - case "flow": - info.Flow = true - case "inline": - inline = true - default: - return nil, errors.New(fmt.Sprintf("Unsupported flag %q in tag %q of type %s", flag, tag, st)) - } - } - tag = fields[0] - } - - if inline { - switch field.Type.Kind() { - // TODO: Implement support for inline maps. - //case reflect.Map: - // if inlineMap >= 0 { - // return nil, errors.New("Multiple ,inline maps in struct " + st.String()) - // } - // if field.Type.Key() != reflect.TypeOf("") { - // return nil, errors.New("Option ,inline needs a map with string keys in struct " + st.String()) - // } - // inlineMap = info.Num - case reflect.Struct: - sinfo, err := getStructInfo(field.Type) - if err != nil { - return nil, err - } - for _, finfo := range sinfo.FieldsList { - if _, found := fieldsMap[finfo.Key]; found { - msg := "Duplicated key '" + finfo.Key + "' in struct " + st.String() - return nil, errors.New(msg) - } - if finfo.Inline == nil { - finfo.Inline = []int{i, finfo.Num} - } else { - finfo.Inline = append([]int{i}, finfo.Inline...) - } - fieldsMap[finfo.Key] = finfo - fieldsList = append(fieldsList, finfo) - } - default: - //return nil, errors.New("Option ,inline needs a struct value or map field") - return nil, errors.New("Option ,inline needs a struct value field") - } - continue - } - - if tag != "" { - info.Key = tag - } else { - info.Key = strings.ToLower(field.Name) - } - - if _, found = fieldsMap[info.Key]; found { - msg := "Duplicated key '" + info.Key + "' in struct " + st.String() - return nil, errors.New(msg) - } - - fieldsList = append(fieldsList, info) - fieldsMap[info.Key] = info - } - - sinfo = &structInfo{fieldsMap, fieldsList, inlineMap} - - fieldMapMutex.Lock() - structMap[st] = sinfo - fieldMapMutex.Unlock() - return sinfo, nil -} - -func isZero(v reflect.Value) bool { - switch v.Kind() { - case reflect.String: - return len(v.String()) == 0 - case reflect.Interface, reflect.Ptr: - return v.IsNil() - case reflect.Slice: - return v.Len() == 0 - case reflect.Map: - return v.Len() == 0 - case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: - return v.Int() == 0 - case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: - return v.Uint() == 0 - case reflect.Bool: - return !v.Bool() - } - return false -} diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/yamlprivateh.go b/Godeps/_workspace/src/gopkg.in/yaml.v1/yamlprivateh.go deleted file mode 100644 index 8110ce3..0000000 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/yamlprivateh.go +++ /dev/null @@ -1,173 +0,0 @@ -package yaml - -const ( - // The size of the input raw buffer. - input_raw_buffer_size = 512 - - // The size of the input buffer. - // It should be possible to decode the whole raw buffer. - input_buffer_size = input_raw_buffer_size * 3 - - // The size of the output buffer. - output_buffer_size = 128 - - // The size of the output raw buffer. - // It should be possible to encode the whole output buffer. - output_raw_buffer_size = (output_buffer_size*2 + 2) - - // The size of other stacks and queues. - initial_stack_size = 16 - initial_queue_size = 16 - initial_string_size = 16 -) - -// Check if the character at the specified position is an alphabetical -// character, a digit, '_', or '-'. -func is_alpha(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'Z' || b[i] >= 'a' && b[i] <= 'z' || b[i] == '_' || b[i] == '-' -} - -// Check if the character at the specified position is a digit. -func is_digit(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' -} - -// Get the value of a digit. -func as_digit(b []byte, i int) int { - return int(b[i]) - '0' -} - -// Check if the character at the specified position is a hex-digit. -func is_hex(b []byte, i int) bool { - return b[i] >= '0' && b[i] <= '9' || b[i] >= 'A' && b[i] <= 'F' || b[i] >= 'a' && b[i] <= 'f' -} - -// Get the value of a hex-digit. -func as_hex(b []byte, i int) int { - bi := b[i] - if bi >= 'A' && bi <= 'F' { - return int(bi) - 'A' + 10 - } - if bi >= 'a' && bi <= 'f' { - return int(bi) - 'a' + 10 - } - return int(bi) - '0' -} - -// Check if the character is ASCII. -func is_ascii(b []byte, i int) bool { - return b[i] <= 0x7F -} - -// Check if the character at the start of the buffer can be printed unescaped. -func is_printable(b []byte, i int) bool { - return ((b[i] == 0x0A) || // . == #x0A - (b[i] >= 0x20 && b[i] <= 0x7E) || // #x20 <= . <= #x7E - (b[i] == 0xC2 && b[i+1] >= 0xA0) || // #0xA0 <= . <= #xD7FF - (b[i] > 0xC2 && b[i] < 0xED) || - (b[i] == 0xED && b[i+1] < 0xA0) || - (b[i] == 0xEE) || - (b[i] == 0xEF && // #xE000 <= . <= #xFFFD - !(b[i+1] == 0xBB && b[i+2] == 0xBF) && // && . != #xFEFF - !(b[i+1] == 0xBF && (b[i+2] == 0xBE || b[i+2] == 0xBF)))) -} - -// Check if the character at the specified position is NUL. -func is_z(b []byte, i int) bool { - return b[i] == 0x00 -} - -// Check if the beginning of the buffer is a BOM. -func is_bom(b []byte, i int) bool { - return b[0] == 0xEF && b[1] == 0xBB && b[2] == 0xBF -} - -// Check if the character at the specified position is space. -func is_space(b []byte, i int) bool { - return b[i] == ' ' -} - -// Check if the character at the specified position is tab. -func is_tab(b []byte, i int) bool { - return b[i] == '\t' -} - -// Check if the character at the specified position is blank (space or tab). -func is_blank(b []byte, i int) bool { - //return is_space(b, i) || is_tab(b, i) - return b[i] == ' ' || b[i] == '\t' -} - -// Check if the character at the specified position is a line break. -func is_break(b []byte, i int) bool { - return (b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9) // PS (#x2029) -} - -func is_crlf(b []byte, i int) bool { - return b[i] == '\r' && b[i+1] == '\n' -} - -// Check if the character is a line break or NUL. -func is_breakz(b []byte, i int) bool { - //return is_break(b, i) || is_z(b, i) - return ( // is_break: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - // is_z: - b[i] == 0) -} - -// Check if the character is a line break, space, or NUL. -func is_spacez(b []byte, i int) bool { - //return is_space(b, i) || is_breakz(b, i) - return ( // is_space: - b[i] == ' ' || - // is_breakz: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - b[i] == 0) -} - -// Check if the character is a line break, space, tab, or NUL. -func is_blankz(b []byte, i int) bool { - //return is_blank(b, i) || is_breakz(b, i) - return ( // is_blank: - b[i] == ' ' || b[i] == '\t' || - // is_breakz: - b[i] == '\r' || // CR (#xD) - b[i] == '\n' || // LF (#xA) - b[i] == 0xC2 && b[i+1] == 0x85 || // NEL (#x85) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA8 || // LS (#x2028) - b[i] == 0xE2 && b[i+1] == 0x80 && b[i+2] == 0xA9 || // PS (#x2029) - b[i] == 0) -} - -// Determine the width of the character. -func width(b byte) int { - // Don't replace these by a switch without first - // confirming that it is being inlined. - if b&0x80 == 0x00 { - return 1 - } - if b&0xE0 == 0xC0 { - return 2 - } - if b&0xF0 == 0xE0 { - return 3 - } - if b&0xF8 == 0xF0 { - return 4 - } - return 0 - -} diff --git a/deploy.go b/deploy.go index 4b71b14..cd91f37 100644 --- a/deploy.go +++ b/deploy.go @@ -12,9 +12,9 @@ import ( "time" "github.com/github/hub/git" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/codegangsta/cli" - hub "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/github" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/google/go-github/github" + "github.com/codegangsta/cli" + hub "github.com/github/hub/github" + "github.com/google/go-github/github" ) const ( diff --git a/deploy_test.go b/deploy_test.go index 683a5a0..d895480 100644 --- a/deploy_test.go +++ b/deploy_test.go @@ -5,7 +5,7 @@ import ( "net/url" "testing" - hub "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/github" + hub "github.com/github/hub/github" ) var remotes = map[string]*url.URL{ diff --git a/github.go b/github.go index f71ca93..1c01020 100644 --- a/github.go +++ b/github.go @@ -3,8 +3,8 @@ package deploy import ( "net/http" - hub "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/github" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/google/go-github/github" + hub "github.com/github/hub/github" + "github.com/google/go-github/github" ) // newGitHubClient returns a new github.Client configured for the GitHub Host. diff --git a/updater.go b/updater.go index 5ed92ab..187e40c 100644 --- a/updater.go +++ b/updater.go @@ -5,8 +5,8 @@ import ( "runtime" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/inconshreveable/go-update" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit" + "github.com/inconshreveable/go-update" + "github.com/octokit/go-octokit/octokit" ) const GitHubHost = "github.com" diff --git a/Godeps/_workspace/src/bitbucket.org/kardianos/osext/LICENSE b/vendor/bitbucket.org/kardianos/osext/LICENSE similarity index 100% rename from Godeps/_workspace/src/bitbucket.org/kardianos/osext/LICENSE rename to vendor/bitbucket.org/kardianos/osext/LICENSE diff --git a/Godeps/_workspace/src/bitbucket.org/kardianos/osext/osext.go b/vendor/bitbucket.org/kardianos/osext/osext.go similarity index 100% rename from Godeps/_workspace/src/bitbucket.org/kardianos/osext/osext.go rename to vendor/bitbucket.org/kardianos/osext/osext.go diff --git a/Godeps/_workspace/src/bitbucket.org/kardianos/osext/osext_plan9.go b/vendor/bitbucket.org/kardianos/osext/osext_plan9.go similarity index 100% rename from Godeps/_workspace/src/bitbucket.org/kardianos/osext/osext_plan9.go rename to vendor/bitbucket.org/kardianos/osext/osext_plan9.go diff --git a/Godeps/_workspace/src/bitbucket.org/kardianos/osext/osext_procfs.go b/vendor/bitbucket.org/kardianos/osext/osext_procfs.go similarity index 100% rename from Godeps/_workspace/src/bitbucket.org/kardianos/osext/osext_procfs.go rename to vendor/bitbucket.org/kardianos/osext/osext_procfs.go diff --git a/Godeps/_workspace/src/bitbucket.org/kardianos/osext/osext_sysctl.go b/vendor/bitbucket.org/kardianos/osext/osext_sysctl.go similarity index 100% rename from Godeps/_workspace/src/bitbucket.org/kardianos/osext/osext_sysctl.go rename to vendor/bitbucket.org/kardianos/osext/osext_sysctl.go diff --git a/Godeps/_workspace/src/bitbucket.org/kardianos/osext/osext_test.go b/vendor/bitbucket.org/kardianos/osext/osext_test.go similarity index 100% rename from Godeps/_workspace/src/bitbucket.org/kardianos/osext/osext_test.go rename to vendor/bitbucket.org/kardianos/osext/osext_test.go diff --git a/Godeps/_workspace/src/bitbucket.org/kardianos/osext/osext_windows.go b/vendor/bitbucket.org/kardianos/osext/osext_windows.go similarity index 100% rename from Godeps/_workspace/src/bitbucket.org/kardianos/osext/osext_windows.go rename to vendor/bitbucket.org/kardianos/osext/osext_windows.go diff --git a/Godeps/_workspace/src/github.com/fhs/go-netrc/netrc/example.netrc b/vendor/code.google.com/p/go-netrc/netrc/example.netrc similarity index 100% rename from Godeps/_workspace/src/github.com/fhs/go-netrc/netrc/example.netrc rename to vendor/code.google.com/p/go-netrc/netrc/example.netrc diff --git a/Godeps/_workspace/src/github.com/fhs/go-netrc/netrc/netrc.go b/vendor/code.google.com/p/go-netrc/netrc/netrc.go similarity index 100% rename from Godeps/_workspace/src/github.com/fhs/go-netrc/netrc/netrc.go rename to vendor/code.google.com/p/go-netrc/netrc/netrc.go diff --git a/Godeps/_workspace/src/github.com/fhs/go-netrc/netrc/netrc_test.go b/vendor/code.google.com/p/go-netrc/netrc/netrc_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/fhs/go-netrc/netrc/netrc_test.go rename to vendor/code.google.com/p/go-netrc/netrc/netrc_test.go diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/COMPATIBLE b/vendor/github.com/BurntSushi/toml/COMPATIBLE similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/COMPATIBLE rename to vendor/github.com/BurntSushi/toml/COMPATIBLE diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/COPYING b/vendor/github.com/BurntSushi/toml/COPYING similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/COPYING rename to vendor/github.com/BurntSushi/toml/COPYING diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/Makefile b/vendor/github.com/BurntSushi/toml/Makefile similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/Makefile rename to vendor/github.com/BurntSushi/toml/Makefile diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/README.md b/vendor/github.com/BurntSushi/toml/README.md similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/README.md rename to vendor/github.com/BurntSushi/toml/README.md diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/decode.go b/vendor/github.com/BurntSushi/toml/decode.go similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/decode.go rename to vendor/github.com/BurntSushi/toml/decode.go diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/decode_meta.go b/vendor/github.com/BurntSushi/toml/decode_meta.go similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/decode_meta.go rename to vendor/github.com/BurntSushi/toml/decode_meta.go diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/decode_test.go b/vendor/github.com/BurntSushi/toml/decode_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/decode_test.go rename to vendor/github.com/BurntSushi/toml/decode_test.go diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/doc.go b/vendor/github.com/BurntSushi/toml/doc.go similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/doc.go rename to vendor/github.com/BurntSushi/toml/doc.go diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/encode.go b/vendor/github.com/BurntSushi/toml/encode.go similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/encode.go rename to vendor/github.com/BurntSushi/toml/encode.go diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/encode_test.go b/vendor/github.com/BurntSushi/toml/encode_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/encode_test.go rename to vendor/github.com/BurntSushi/toml/encode_test.go diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types.go b/vendor/github.com/BurntSushi/toml/encoding_types.go similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types.go rename to vendor/github.com/BurntSushi/toml/encoding_types.go diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types_1.1.go b/vendor/github.com/BurntSushi/toml/encoding_types_1.1.go similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/encoding_types_1.1.go rename to vendor/github.com/BurntSushi/toml/encoding_types_1.1.go diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/lex.go b/vendor/github.com/BurntSushi/toml/lex.go similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/lex.go rename to vendor/github.com/BurntSushi/toml/lex.go diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/parse.go b/vendor/github.com/BurntSushi/toml/parse.go similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/parse.go rename to vendor/github.com/BurntSushi/toml/parse.go diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/session.vim b/vendor/github.com/BurntSushi/toml/session.vim similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/session.vim rename to vendor/github.com/BurntSushi/toml/session.vim diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/type_check.go b/vendor/github.com/BurntSushi/toml/type_check.go similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/type_check.go rename to vendor/github.com/BurntSushi/toml/type_check.go diff --git a/Godeps/_workspace/src/github.com/BurntSushi/toml/type_fields.go b/vendor/github.com/BurntSushi/toml/type_fields.go similarity index 100% rename from Godeps/_workspace/src/github.com/BurntSushi/toml/type_fields.go rename to vendor/github.com/BurntSushi/toml/type_fields.go diff --git a/Godeps/_workspace/src/github.com/bmizerany/assert/README.md b/vendor/github.com/bmizerany/assert/README.md similarity index 100% rename from Godeps/_workspace/src/github.com/bmizerany/assert/README.md rename to vendor/github.com/bmizerany/assert/README.md diff --git a/Godeps/_workspace/src/github.com/bmizerany/assert/assert.go b/vendor/github.com/bmizerany/assert/assert.go similarity index 95% rename from Godeps/_workspace/src/github.com/bmizerany/assert/assert.go rename to vendor/github.com/bmizerany/assert/assert.go index 2d3c325..8770940 100644 --- a/Godeps/_workspace/src/github.com/bmizerany/assert/assert.go +++ b/vendor/github.com/bmizerany/assert/assert.go @@ -3,7 +3,7 @@ package assert // Testing helpers for doozer. import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/kr/pretty" + "github.com/kr/pretty" "reflect" "testing" "runtime" diff --git a/Godeps/_workspace/src/github.com/bmizerany/assert/assert_test.go b/vendor/github.com/bmizerany/assert/assert_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/bmizerany/assert/assert_test.go rename to vendor/github.com/bmizerany/assert/assert_test.go diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/LICENSE b/vendor/github.com/codegangsta/cli/LICENSE similarity index 100% rename from Godeps/_workspace/src/github.com/codegangsta/cli/LICENSE rename to vendor/github.com/codegangsta/cli/LICENSE diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/README.md b/vendor/github.com/codegangsta/cli/README.md similarity index 100% rename from Godeps/_workspace/src/github.com/codegangsta/cli/README.md rename to vendor/github.com/codegangsta/cli/README.md diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/app.go b/vendor/github.com/codegangsta/cli/app.go similarity index 100% rename from Godeps/_workspace/src/github.com/codegangsta/cli/app.go rename to vendor/github.com/codegangsta/cli/app.go diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/app_test.go b/vendor/github.com/codegangsta/cli/app_test.go similarity index 99% rename from Godeps/_workspace/src/github.com/codegangsta/cli/app_test.go rename to vendor/github.com/codegangsta/cli/app_test.go index 1ccf646..4a40b89 100644 --- a/Godeps/_workspace/src/github.com/codegangsta/cli/app_test.go +++ b/vendor/github.com/codegangsta/cli/app_test.go @@ -6,7 +6,7 @@ import ( "os" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/codegangsta/cli" + "github.com/codegangsta/cli" ) func ExampleApp() { diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/cli.go b/vendor/github.com/codegangsta/cli/cli.go similarity index 100% rename from Godeps/_workspace/src/github.com/codegangsta/cli/cli.go rename to vendor/github.com/codegangsta/cli/cli.go diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/cli_test.go b/vendor/github.com/codegangsta/cli/cli_test.go similarity index 96% rename from Godeps/_workspace/src/github.com/codegangsta/cli/cli_test.go rename to vendor/github.com/codegangsta/cli/cli_test.go index 6d9adf8..8a8df97 100644 --- a/Godeps/_workspace/src/github.com/codegangsta/cli/cli_test.go +++ b/vendor/github.com/codegangsta/cli/cli_test.go @@ -3,7 +3,7 @@ package cli_test import ( "os" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/codegangsta/cli" + "github.com/codegangsta/cli" ) func Example() { diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/command.go b/vendor/github.com/codegangsta/cli/command.go similarity index 100% rename from Godeps/_workspace/src/github.com/codegangsta/cli/command.go rename to vendor/github.com/codegangsta/cli/command.go diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/command_test.go b/vendor/github.com/codegangsta/cli/command_test.go similarity index 92% rename from Godeps/_workspace/src/github.com/codegangsta/cli/command_test.go rename to vendor/github.com/codegangsta/cli/command_test.go index 7c0d388..4125b0c 100644 --- a/Godeps/_workspace/src/github.com/codegangsta/cli/command_test.go +++ b/vendor/github.com/codegangsta/cli/command_test.go @@ -4,7 +4,7 @@ import ( "flag" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/codegangsta/cli" + "github.com/codegangsta/cli" ) func TestCommandDoNotIgnoreFlags(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/context.go b/vendor/github.com/codegangsta/cli/context.go similarity index 100% rename from Godeps/_workspace/src/github.com/codegangsta/cli/context.go rename to vendor/github.com/codegangsta/cli/context.go diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/context_test.go b/vendor/github.com/codegangsta/cli/context_test.go similarity index 97% rename from Godeps/_workspace/src/github.com/codegangsta/cli/context_test.go rename to vendor/github.com/codegangsta/cli/context_test.go index 7e90c64..d4a1877 100644 --- a/Godeps/_workspace/src/github.com/codegangsta/cli/context_test.go +++ b/vendor/github.com/codegangsta/cli/context_test.go @@ -5,7 +5,7 @@ import ( "testing" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/codegangsta/cli" + "github.com/codegangsta/cli" ) func TestNewContext(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/flag.go b/vendor/github.com/codegangsta/cli/flag.go similarity index 100% rename from Godeps/_workspace/src/github.com/codegangsta/cli/flag.go rename to vendor/github.com/codegangsta/cli/flag.go diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/flag_test.go b/vendor/github.com/codegangsta/cli/flag_test.go similarity index 99% rename from Godeps/_workspace/src/github.com/codegangsta/cli/flag_test.go rename to vendor/github.com/codegangsta/cli/flag_test.go index c134622..f0f096a 100644 --- a/Godeps/_workspace/src/github.com/codegangsta/cli/flag_test.go +++ b/vendor/github.com/codegangsta/cli/flag_test.go @@ -7,7 +7,7 @@ import ( "strings" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/codegangsta/cli" + "github.com/codegangsta/cli" ) var boolFlagTests = []struct { diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/help.go b/vendor/github.com/codegangsta/cli/help.go similarity index 100% rename from Godeps/_workspace/src/github.com/codegangsta/cli/help.go rename to vendor/github.com/codegangsta/cli/help.go diff --git a/Godeps/_workspace/src/github.com/codegangsta/cli/helpers_test.go b/vendor/github.com/codegangsta/cli/helpers_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/codegangsta/cli/helpers_test.go rename to vendor/github.com/codegangsta/cli/helpers_test.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/fhs/go-netrc/netrc/example.netrc b/vendor/github.com/fhs/go-netrc/netrc/example.netrc similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/fhs/go-netrc/netrc/example.netrc rename to vendor/github.com/fhs/go-netrc/netrc/example.netrc diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/fhs/go-netrc/netrc/netrc.go b/vendor/github.com/fhs/go-netrc/netrc/netrc.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/fhs/go-netrc/netrc/netrc.go rename to vendor/github.com/fhs/go-netrc/netrc/netrc.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/fhs/go-netrc/netrc/netrc_test.go b/vendor/github.com/fhs/go-netrc/netrc/netrc_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/fhs/go-netrc/netrc/netrc_test.go rename to vendor/github.com/fhs/go-netrc/netrc/netrc_test.go diff --git a/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert/README.md b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert/README.md new file mode 100644 index 0000000..8b6b6fc --- /dev/null +++ b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert/README.md @@ -0,0 +1,45 @@ +# Assert (c) Blake Mizerany and Keith Rarick -- MIT LICENCE + +## Assertions for Go tests + +## Install + + $ go get github.com/bmizerany/assert + +## Use + +**point.go** + + package point + + type Point struct { + x, y int + } + +**point_test.go** + + + package point + + import ( + "testing" + "github.com/bmizerany/assert" + ) + + func TestAsserts(t *testing.T) { + p1 := Point{1, 1} + p2 := Point{2, 1} + + assert.Equal(t, p1, p2) + } + +**output** + $ go test + --- FAIL: TestAsserts (0.00 seconds) + assert.go:15: /Users/flavio.barbosa/dev/stewie/src/point_test.go:12 + assert.go:24: ! X: 1 != 2 + FAIL + +## Docs + + http://github.com/bmizerany/assert diff --git a/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert/assert.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert/assert.go new file mode 100644 index 0000000..a3cff2a --- /dev/null +++ b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert/assert.go @@ -0,0 +1,77 @@ +package assert + +// Testing helpers for doozer. + +import ( + "github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty" + "reflect" + "testing" + "runtime" + "fmt" +) + +func assert(t *testing.T, result bool, f func(), cd int) { + if !result { + _, file, line, _ := runtime.Caller(cd + 1) + t.Errorf("%s:%d", file, line) + f() + t.FailNow() + } +} + +func equal(t *testing.T, exp, got interface{}, cd int, args ...interface{}) { + fn := func() { + for _, desc := range pretty.Diff(exp, got) { + t.Error("!", desc) + } + if len(args) > 0 { + t.Error("!", " -", fmt.Sprint(args...)) + } + } + result := reflect.DeepEqual(exp, got) + assert(t, result, fn, cd+1) +} + +func tt(t *testing.T, result bool, cd int, args ...interface{}) { + fn := func() { + t.Errorf("! Failure") + if len(args) > 0 { + t.Error("!", " -", fmt.Sprint(args...)) + } + } + assert(t, result, fn, cd+1) +} + +func T(t *testing.T, result bool, args ...interface{}) { + tt(t, result, 1, args...) +} + +func Tf(t *testing.T, result bool, format string, args ...interface{}) { + tt(t, result, 1, fmt.Sprintf(format, args...)) +} + +func Equal(t *testing.T, exp, got interface{}, args ...interface{}) { + equal(t, exp, got, 1, args...) +} + +func Equalf(t *testing.T, exp, got interface{}, format string, args ...interface{}) { + equal(t, exp, got, 1, fmt.Sprintf(format, args...)) +} + +func NotEqual(t *testing.T, exp, got interface{}, args ...interface{}) { + fn := func() { + t.Errorf("! Unexpected: <%#v>", exp) + if len(args) > 0 { + t.Error("!", " -", fmt.Sprint(args...)) + } + } + result := !reflect.DeepEqual(exp, got) + assert(t, result, fn, 1) +} + +func Panic(t *testing.T, err interface{}, fn func()) { + defer func() { + equal(t, err, recover(), 3) + }() + fn() +} diff --git a/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert/assert_test.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert/assert_test.go new file mode 100644 index 0000000..162a590 --- /dev/null +++ b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert/assert_test.go @@ -0,0 +1,15 @@ +package assert + +import ( + "testing" +) + +func TestLineNumbers(t *testing.T) { + Equal(t, "foo", "foo", "msg!") + //Equal(t, "foo", "bar", "this should blow up") +} + +func TestNotEqual(t *testing.T) { + NotEqual(t, "foo", "bar", "msg!") + //NotEqual(t, "foo", "foo", "this should blow up") +} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/LICENSE b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/LICENSE similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/LICENSE rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/LICENSE diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/README b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/README similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/README rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/README diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/both_test.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/both_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/both_test.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/both_test.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/doc.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/doc.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/doc.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/doc.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote_test.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote_test.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/quote_test.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote_test.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote_test.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote/unquote_test.go diff --git a/Godeps/_workspace/src/github.com/kr/pretty/License b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/License similarity index 100% rename from Godeps/_workspace/src/github.com/kr/pretty/License rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/License diff --git a/Godeps/_workspace/src/github.com/kr/pretty/Readme b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/Readme similarity index 100% rename from Godeps/_workspace/src/github.com/kr/pretty/Readme rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/Readme diff --git a/Godeps/_workspace/src/github.com/kr/pretty/diff.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/diff.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/pretty/diff.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/diff.go diff --git a/Godeps/_workspace/src/github.com/kr/pretty/diff_test.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/diff_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/pretty/diff_test.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/diff_test.go diff --git a/Godeps/_workspace/src/github.com/kr/pretty/example_test.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/example_test.go similarity index 79% rename from Godeps/_workspace/src/github.com/kr/pretty/example_test.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/example_test.go index 5de0fb3..f5b6e8b 100644 --- a/Godeps/_workspace/src/github.com/kr/pretty/example_test.go +++ b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/example_test.go @@ -2,7 +2,7 @@ package pretty_test import ( "fmt" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/kr/pretty" + "github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty" ) func Example() { diff --git a/Godeps/_workspace/src/github.com/kr/pretty/formatter.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/formatter.go similarity index 99% rename from Godeps/_workspace/src/github.com/kr/pretty/formatter.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/formatter.go index c2a210b..632317f 100644 --- a/Godeps/_workspace/src/github.com/kr/pretty/formatter.go +++ b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/formatter.go @@ -7,7 +7,7 @@ import ( "strconv" "text/tabwriter" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/kr/text" + "github.com/github/hub/Godeps/_workspace/src/github.com/kr/text" ) const ( diff --git a/Godeps/_workspace/src/github.com/kr/pretty/formatter_test.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/formatter_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/pretty/formatter_test.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/formatter_test.go diff --git a/Godeps/_workspace/src/github.com/kr/pretty/pretty.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/pretty.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/pretty/pretty.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/pretty.go diff --git a/Godeps/_workspace/src/github.com/kr/pretty/zero.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/zero.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/pretty/zero.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty/zero.go diff --git a/Godeps/_workspace/src/github.com/kr/text/License b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/License similarity index 100% rename from Godeps/_workspace/src/github.com/kr/text/License rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/License diff --git a/Godeps/_workspace/src/github.com/kr/text/Readme b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/Readme similarity index 100% rename from Godeps/_workspace/src/github.com/kr/text/Readme rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/Readme diff --git a/Godeps/_workspace/src/github.com/kr/text/doc.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/doc.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/text/doc.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/doc.go diff --git a/Godeps/_workspace/src/github.com/kr/text/indent.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/indent.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/text/indent.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/indent.go diff --git a/Godeps/_workspace/src/github.com/kr/text/indent_test.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/indent_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/text/indent_test.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/indent_test.go diff --git a/Godeps/_workspace/src/github.com/kr/text/wrap.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/wrap.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/text/wrap.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/wrap.go diff --git a/Godeps/_workspace/src/github.com/kr/text/wrap_test.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/wrap_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/text/wrap_test.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/kr/text/wrap_test.go diff --git a/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable/README.md b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable/README.md new file mode 100644 index 0000000..c69da4a --- /dev/null +++ b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable/README.md @@ -0,0 +1,42 @@ +# go-colorable + +Colorable writer for windows. + +For example, most of logger packages doesn't show colors on windows. (I know we can do it with ansicon. But I don't want.) +This package is possible to handle escape sequence for ansi color on windows. + +## Too Bad! + +![](https://raw.githubusercontent.com/mattn/go-colorable/gh-pages/bad.png) + + +## So Good! + +![](https://raw.githubusercontent.com/mattn/go-colorable/gh-pages/good.png) + +## Usage + +```go +logrus.SetOutput(colorable.NewColorableStdout()) + +logrus.Info("succeeded") +logrus.Warn("not correct") +logrus.Error("something error") +logrus.Fatal("panic") +``` + +You can compile above code on non-windows OSs. + +## Installation + +``` +$ go get github.com/mattn/go-colorable +``` + +# License + +MIT + +# Author + +Yasuhiro Matsumoto (a.k.a mattn) diff --git a/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable/colorable_others.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable/colorable_others.go new file mode 100644 index 0000000..219f02f --- /dev/null +++ b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable/colorable_others.go @@ -0,0 +1,16 @@ +// +build !windows + +package colorable + +import ( + "io" + "os" +) + +func NewColorableStdout() io.Writer { + return os.Stdout +} + +func NewColorableStderr() io.Writer { + return os.Stderr +} diff --git a/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable/colorable_windows.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable/colorable_windows.go new file mode 100644 index 0000000..6b5f8cc --- /dev/null +++ b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable/colorable_windows.go @@ -0,0 +1,594 @@ +package colorable + +import ( + "bytes" + "fmt" + "io" + "os" + "strconv" + "strings" + "syscall" + "unsafe" + + "github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty" +) + +const ( + foregroundBlue = 0x1 + foregroundGreen = 0x2 + foregroundRed = 0x4 + foregroundIntensity = 0x8 + foregroundMask = (foregroundRed | foregroundBlue | foregroundGreen | foregroundIntensity) + backgroundBlue = 0x10 + backgroundGreen = 0x20 + backgroundRed = 0x40 + backgroundIntensity = 0x80 + backgroundMask = (backgroundRed | backgroundBlue | backgroundGreen | backgroundIntensity) +) + +type wchar uint16 +type short int16 +type dword uint32 +type word uint16 + +type coord struct { + x short + y short +} + +type smallRect struct { + left short + top short + right short + bottom short +} + +type consoleScreenBufferInfo struct { + size coord + cursorPosition coord + attributes word + window smallRect + maximumWindowSize coord +} + +var ( + kernel32 = syscall.NewLazyDLL("kernel32.dll") + procGetConsoleScreenBufferInfo = kernel32.NewProc("GetConsoleScreenBufferInfo") + procSetConsoleTextAttribute = kernel32.NewProc("SetConsoleTextAttribute") +) + +type Writer struct { + out io.Writer + handle syscall.Handle + lastbuf bytes.Buffer + oldattr word +} + +func NewColorableStdout() io.Writer { + var csbi consoleScreenBufferInfo + out := os.Stdout + if !isatty.IsTerminal(out.Fd()) { + return out + } + handle := syscall.Handle(out.Fd()) + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + return &Writer{out: out, handle: handle, oldattr: csbi.attributes} +} + +func NewColorableStderr() io.Writer { + var csbi consoleScreenBufferInfo + out := os.Stderr + if !isatty.IsTerminal(out.Fd()) { + return out + } + handle := syscall.Handle(out.Fd()) + procGetConsoleScreenBufferInfo.Call(uintptr(handle), uintptr(unsafe.Pointer(&csbi))) + return &Writer{out: out, handle: handle, oldattr: csbi.attributes} +} + +var color256 = map[int]int{ + 0: 0x000000, + 1: 0x800000, + 2: 0x008000, + 3: 0x808000, + 4: 0x000080, + 5: 0x800080, + 6: 0x008080, + 7: 0xc0c0c0, + 8: 0x808080, + 9: 0xff0000, + 10: 0x00ff00, + 11: 0xffff00, + 12: 0x0000ff, + 13: 0xff00ff, + 14: 0x00ffff, + 15: 0xffffff, + 16: 0x000000, + 17: 0x00005f, + 18: 0x000087, + 19: 0x0000af, + 20: 0x0000d7, + 21: 0x0000ff, + 22: 0x005f00, + 23: 0x005f5f, + 24: 0x005f87, + 25: 0x005faf, + 26: 0x005fd7, + 27: 0x005fff, + 28: 0x008700, + 29: 0x00875f, + 30: 0x008787, + 31: 0x0087af, + 32: 0x0087d7, + 33: 0x0087ff, + 34: 0x00af00, + 35: 0x00af5f, + 36: 0x00af87, + 37: 0x00afaf, + 38: 0x00afd7, + 39: 0x00afff, + 40: 0x00d700, + 41: 0x00d75f, + 42: 0x00d787, + 43: 0x00d7af, + 44: 0x00d7d7, + 45: 0x00d7ff, + 46: 0x00ff00, + 47: 0x00ff5f, + 48: 0x00ff87, + 49: 0x00ffaf, + 50: 0x00ffd7, + 51: 0x00ffff, + 52: 0x5f0000, + 53: 0x5f005f, + 54: 0x5f0087, + 55: 0x5f00af, + 56: 0x5f00d7, + 57: 0x5f00ff, + 58: 0x5f5f00, + 59: 0x5f5f5f, + 60: 0x5f5f87, + 61: 0x5f5faf, + 62: 0x5f5fd7, + 63: 0x5f5fff, + 64: 0x5f8700, + 65: 0x5f875f, + 66: 0x5f8787, + 67: 0x5f87af, + 68: 0x5f87d7, + 69: 0x5f87ff, + 70: 0x5faf00, + 71: 0x5faf5f, + 72: 0x5faf87, + 73: 0x5fafaf, + 74: 0x5fafd7, + 75: 0x5fafff, + 76: 0x5fd700, + 77: 0x5fd75f, + 78: 0x5fd787, + 79: 0x5fd7af, + 80: 0x5fd7d7, + 81: 0x5fd7ff, + 82: 0x5fff00, + 83: 0x5fff5f, + 84: 0x5fff87, + 85: 0x5fffaf, + 86: 0x5fffd7, + 87: 0x5fffff, + 88: 0x870000, + 89: 0x87005f, + 90: 0x870087, + 91: 0x8700af, + 92: 0x8700d7, + 93: 0x8700ff, + 94: 0x875f00, + 95: 0x875f5f, + 96: 0x875f87, + 97: 0x875faf, + 98: 0x875fd7, + 99: 0x875fff, + 100: 0x878700, + 101: 0x87875f, + 102: 0x878787, + 103: 0x8787af, + 104: 0x8787d7, + 105: 0x8787ff, + 106: 0x87af00, + 107: 0x87af5f, + 108: 0x87af87, + 109: 0x87afaf, + 110: 0x87afd7, + 111: 0x87afff, + 112: 0x87d700, + 113: 0x87d75f, + 114: 0x87d787, + 115: 0x87d7af, + 116: 0x87d7d7, + 117: 0x87d7ff, + 118: 0x87ff00, + 119: 0x87ff5f, + 120: 0x87ff87, + 121: 0x87ffaf, + 122: 0x87ffd7, + 123: 0x87ffff, + 124: 0xaf0000, + 125: 0xaf005f, + 126: 0xaf0087, + 127: 0xaf00af, + 128: 0xaf00d7, + 129: 0xaf00ff, + 130: 0xaf5f00, + 131: 0xaf5f5f, + 132: 0xaf5f87, + 133: 0xaf5faf, + 134: 0xaf5fd7, + 135: 0xaf5fff, + 136: 0xaf8700, + 137: 0xaf875f, + 138: 0xaf8787, + 139: 0xaf87af, + 140: 0xaf87d7, + 141: 0xaf87ff, + 142: 0xafaf00, + 143: 0xafaf5f, + 144: 0xafaf87, + 145: 0xafafaf, + 146: 0xafafd7, + 147: 0xafafff, + 148: 0xafd700, + 149: 0xafd75f, + 150: 0xafd787, + 151: 0xafd7af, + 152: 0xafd7d7, + 153: 0xafd7ff, + 154: 0xafff00, + 155: 0xafff5f, + 156: 0xafff87, + 157: 0xafffaf, + 158: 0xafffd7, + 159: 0xafffff, + 160: 0xd70000, + 161: 0xd7005f, + 162: 0xd70087, + 163: 0xd700af, + 164: 0xd700d7, + 165: 0xd700ff, + 166: 0xd75f00, + 167: 0xd75f5f, + 168: 0xd75f87, + 169: 0xd75faf, + 170: 0xd75fd7, + 171: 0xd75fff, + 172: 0xd78700, + 173: 0xd7875f, + 174: 0xd78787, + 175: 0xd787af, + 176: 0xd787d7, + 177: 0xd787ff, + 178: 0xd7af00, + 179: 0xd7af5f, + 180: 0xd7af87, + 181: 0xd7afaf, + 182: 0xd7afd7, + 183: 0xd7afff, + 184: 0xd7d700, + 185: 0xd7d75f, + 186: 0xd7d787, + 187: 0xd7d7af, + 188: 0xd7d7d7, + 189: 0xd7d7ff, + 190: 0xd7ff00, + 191: 0xd7ff5f, + 192: 0xd7ff87, + 193: 0xd7ffaf, + 194: 0xd7ffd7, + 195: 0xd7ffff, + 196: 0xff0000, + 197: 0xff005f, + 198: 0xff0087, + 199: 0xff00af, + 200: 0xff00d7, + 201: 0xff00ff, + 202: 0xff5f00, + 203: 0xff5f5f, + 204: 0xff5f87, + 205: 0xff5faf, + 206: 0xff5fd7, + 207: 0xff5fff, + 208: 0xff8700, + 209: 0xff875f, + 210: 0xff8787, + 211: 0xff87af, + 212: 0xff87d7, + 213: 0xff87ff, + 214: 0xffaf00, + 215: 0xffaf5f, + 216: 0xffaf87, + 217: 0xffafaf, + 218: 0xffafd7, + 219: 0xffafff, + 220: 0xffd700, + 221: 0xffd75f, + 222: 0xffd787, + 223: 0xffd7af, + 224: 0xffd7d7, + 225: 0xffd7ff, + 226: 0xffff00, + 227: 0xffff5f, + 228: 0xffff87, + 229: 0xffffaf, + 230: 0xffffd7, + 231: 0xffffff, + 232: 0x080808, + 233: 0x121212, + 234: 0x1c1c1c, + 235: 0x262626, + 236: 0x303030, + 237: 0x3a3a3a, + 238: 0x444444, + 239: 0x4e4e4e, + 240: 0x585858, + 241: 0x626262, + 242: 0x6c6c6c, + 243: 0x767676, + 244: 0x808080, + 245: 0x8a8a8a, + 246: 0x949494, + 247: 0x9e9e9e, + 248: 0xa8a8a8, + 249: 0xb2b2b2, + 250: 0xbcbcbc, + 251: 0xc6c6c6, + 252: 0xd0d0d0, + 253: 0xdadada, + 254: 0xe4e4e4, + 255: 0xeeeeee, +} + +func (w *Writer) Write(data []byte) (n int, err error) { + var csbi consoleScreenBufferInfo + procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi))) + + er := bytes.NewBuffer(data) +loop: + for { + r1, _, err := procGetConsoleScreenBufferInfo.Call(uintptr(w.handle), uintptr(unsafe.Pointer(&csbi))) + if r1 == 0 { + break loop + } + + c1, _, err := er.ReadRune() + if err != nil { + break loop + } + if c1 != 0x1b { + fmt.Fprint(w.out, string(c1)) + continue + } + c2, _, err := er.ReadRune() + if err != nil { + w.lastbuf.WriteRune(c1) + break loop + } + if c2 != 0x5b { + w.lastbuf.WriteRune(c1) + w.lastbuf.WriteRune(c2) + continue + } + + var buf bytes.Buffer + var m rune + for { + c, _, err := er.ReadRune() + if err != nil { + w.lastbuf.WriteRune(c1) + w.lastbuf.WriteRune(c2) + w.lastbuf.Write(buf.Bytes()) + break loop + } + if ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || c == '@' { + m = c + break + } + buf.Write([]byte(string(c))) + } + + switch m { + case 'm': + attr := csbi.attributes + cs := buf.String() + if cs == "" { + procSetConsoleTextAttribute.Call(uintptr(w.handle), uintptr(w.oldattr)) + continue + } + token := strings.Split(cs, ";") + for i, ns := range token { + if n, err = strconv.Atoi(ns); err == nil { + switch { + case n == 0 || n == 100: + attr = w.oldattr + case 1 <= n && n <= 5: + attr |= foregroundIntensity + case n == 7: + attr = ((attr & foregroundMask) << 4) | ((attr & backgroundMask) >> 4) + case 22 == n || n == 25 || n == 25: + attr |= foregroundIntensity + case n == 27: + attr = ((attr & foregroundMask) << 4) | ((attr & backgroundMask) >> 4) + case 30 <= n && n <= 37: + attr = (attr & backgroundMask) + if (n-30)&1 != 0 { + attr |= foregroundRed + } + if (n-30)&2 != 0 { + attr |= foregroundGreen + } + if (n-30)&4 != 0 { + attr |= foregroundBlue + } + case n == 38: // set foreground color. + if i < len(token)-2 && token[i+1] == "5" { + if n256, err := strconv.Atoi(token[i+2]); err == nil { + if n256foreAttr == nil { + n256setup() + } + attr &= backgroundMask + attr |= n256foreAttr[n256] + i += 2 + } + } else { + attr = attr & (w.oldattr & backgroundMask) + } + case n == 39: // reset foreground color. + attr &= backgroundMask + attr |= w.oldattr & foregroundMask + case 40 <= n && n <= 47: + attr = (attr & foregroundMask) + if (n-40)&1 != 0 { + attr |= backgroundRed + } + if (n-40)&2 != 0 { + attr |= backgroundGreen + } + if (n-40)&4 != 0 { + attr |= backgroundBlue + } + case n == 48: // set background color. + if i < len(token)-2 && token[i+1] == "5" { + if n256, err := strconv.Atoi(token[i+2]); err == nil { + if n256backAttr == nil { + n256setup() + } + attr &= foregroundMask + attr |= n256backAttr[n256] + i += 2 + } + } else { + attr = attr & (w.oldattr & foregroundMask) + } + case n == 49: // reset foreground color. + attr &= foregroundMask + attr |= w.oldattr & backgroundMask + } + procSetConsoleTextAttribute.Call(uintptr(w.handle), uintptr(attr)) + } + } + } + } + return len(data) - w.lastbuf.Len(), nil +} + +type consoleColor struct { + red bool + green bool + blue bool + intensity bool +} + +func minmax3(a, b, c int) (min, max int) { + if a < b { + if b < c { + return a, c + } else if a < c { + return a, b + } else { + return c, b + } + } else { + if a < c { + return b, c + } else if b < c { + return b, a + } else { + return c, a + } + } +} + +func toConsoleColor(rgb int) (c consoleColor) { + r, g, b := (rgb&0xFF0000)>>16, (rgb&0x00FF00)>>8, rgb&0x0000FF + min, max := minmax3(r, g, b) + a := (min + max) / 2 + if r < 128 && g < 128 && b < 128 { + if r >= a { + c.red = true + } + if g >= a { + c.green = true + } + if b >= a { + c.blue = true + } + // non-intensed white is lighter than intensed black, so swap those. + if c.red && c.green && c.blue { + c.red, c.green, c.blue = false, false, false + c.intensity = true + } + } else { + if min < 128 { + min = 128 + a = (min + max) / 2 + } + if r >= a { + c.red = true + } + if g >= a { + c.green = true + } + if b >= a { + c.blue = true + } + c.intensity = true + // intensed black is darker than non-intensed white, so swap those. + if !c.red && !c.green && !c.blue { + c.red, c.green, c.blue = true, true, true + c.intensity = false + } + } + return c +} + +func (c consoleColor) foregroundAttr() (attr word) { + if c.red { + attr |= foregroundRed + } + if c.green { + attr |= foregroundGreen + } + if c.blue { + attr |= foregroundBlue + } + if c.intensity { + attr |= foregroundIntensity + } + return +} + +func (c consoleColor) backgroundAttr() (attr word) { + if c.red { + attr |= backgroundRed + } + if c.green { + attr |= backgroundGreen + } + if c.blue { + attr |= backgroundBlue + } + if c.intensity { + attr |= backgroundIntensity + } + return +} + +var n256foreAttr []word +var n256backAttr []word + +func n256setup() { + n256foreAttr = make([]word, 256) + n256backAttr = make([]word, 256) + for i, rgb := range color256 { + c := toConsoleColor(rgb) + n256foreAttr[i] = c.foregroundAttr() + n256backAttr[i] = c.backgroundAttr() + } +} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/README.md b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/README.md similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/README.md rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/README.md diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/doc.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/doc.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/doc.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/doc.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_bsd.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_bsd.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_bsd.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_bsd.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_linux.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_linux.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_linux.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_linux.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_windows.go b/vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_windows.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_windows.go rename to vendor/github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_windows.go diff --git a/Godeps/_workspace/src/github.com/github/hub/cmd/cmd.go b/vendor/github.com/github/hub/cmd/cmd.go similarity index 79% rename from Godeps/_workspace/src/github.com/github/hub/cmd/cmd.go rename to vendor/github.com/github/hub/cmd/cmd.go index 31b24f1..9490ca8 100644 --- a/Godeps/_workspace/src/github.com/github/hub/cmd/cmd.go +++ b/vendor/github.com/github/hub/cmd/cmd.go @@ -8,8 +8,9 @@ import ( "strings" "syscall" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/kballard/go-shellquote" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/utils" + "github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote" + "github.com/github/hub/ui" + "github.com/github/hub/utils" ) type Cmd struct { @@ -36,6 +37,7 @@ func (cmd *Cmd) WithArgs(args ...string) *Cmd { } func (cmd *Cmd) CombinedOutput() (string, error) { + verboseLog(cmd) output, err := exec.Command(cmd.Name, cmd.Args...).CombinedOutput() return string(output), err @@ -53,6 +55,7 @@ func (cmd *Cmd) Run() error { // Spawn runs command with spawn(3) func (cmd *Cmd) Spawn() error { + verboseLog(cmd) c := exec.Command(cmd.Name, cmd.Args...) c.Stdin = os.Stdin c.Stdout = os.Stdout @@ -72,6 +75,7 @@ func (cmd *Cmd) Exec() error { args := []string{binary} args = append(args, cmd.Args...) + verboseLog(cmd) return syscall.Exec(binary, args, os.Environ()) } @@ -90,3 +94,13 @@ func New(cmd string) *Cmd { func NewWithArray(cmd []string) *Cmd { return &Cmd{Name: cmd[0], Args: cmd[1:]} } + +func verboseLog(cmd *Cmd) { + if os.Getenv("HUB_VERBOSE") != "" { + msg := fmt.Sprintf("$ %s %s", cmd.Name, strings.Join(cmd.Args, " ")) + if ui.IsTerminal(os.Stderr) { + msg = fmt.Sprintf("\033[35m%s\033[0m", msg) + } + ui.Errorln(msg) + } +} diff --git a/Godeps/_workspace/src/github.com/github/hub/cmd/cmd_test.go b/vendor/github.com/github/hub/cmd/cmd_test.go similarity index 84% rename from Godeps/_workspace/src/github.com/github/hub/cmd/cmd_test.go rename to vendor/github.com/github/hub/cmd/cmd_test.go index 28c532b..c735bb4 100644 --- a/Godeps/_workspace/src/github.com/github/hub/cmd/cmd_test.go +++ b/vendor/github.com/github/hub/cmd/cmd_test.go @@ -3,7 +3,7 @@ package cmd import ( "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert" ) func TestNew(t *testing.T) { diff --git a/vendor/github.com/github/hub/fixtures/fixtures.go b/vendor/github.com/github/hub/fixtures/fixtures.go new file mode 100644 index 0000000..7eb2037 --- /dev/null +++ b/vendor/github.com/github/hub/fixtures/fixtures.go @@ -0,0 +1,14 @@ +package fixtures + +import ( + "os" + "path/filepath" +) + +func Path(segment ...string) string { + pwd, _ := os.Getwd() + p := []string{pwd, "..", "fixtures"} + p = append(p, segment...) + + return filepath.Join(p...) +} diff --git a/vendor/github.com/github/hub/fixtures/gh.zip b/vendor/github.com/github/hub/fixtures/gh.zip new file mode 100644 index 0000000000000000000000000000000000000000..16b8de85c7c397323dde84e3bae6df7670242177 GIT binary patch literal 246 zcmWIWW@h1H00Tco7YGAFGBXG;q-W@dM({A43^@{X9*Dm(iZBG=)W8bSke&h30#c69 z!pJ1bjLQ}tn57PH9YIX6og8pG1H5r-W@SKVUeag?(G0Sm18#qSH!B;+R3;#-0n#ZT G4g&ySKPJ5Z literal 0 HcmV?d00001 diff --git a/vendor/github.com/github/hub/fixtures/test_configs.go b/vendor/github.com/github/hub/fixtures/test_configs.go new file mode 100644 index 0000000..8f96f73 --- /dev/null +++ b/vendor/github.com/github/hub/fixtures/test_configs.go @@ -0,0 +1,43 @@ +package fixtures + +import ( + "io/ioutil" + "os" +) + +type TestConfigs struct { + Path string +} + +func (c *TestConfigs) TearDown() { + os.Setenv("HUB_CONFIG", "") + os.RemoveAll(c.Path) +} + +func SetupTomlTestConfig() *TestConfigs { + file, _ := ioutil.TempFile("", "test-gh-config-") + + content := `[[hosts]] + host = "github.com" + user = "jingweno" + access_token = "123" + protocol = "http"` + ioutil.WriteFile(file.Name(), []byte(content), os.ModePerm) + os.Setenv("HUB_CONFIG", file.Name()) + + return &TestConfigs{file.Name()} +} + +func SetupTestConfigs() *TestConfigs { + file, _ := ioutil.TempFile("", "test-gh-config-") + + content := `--- +github.com: +- user: jingweno + oauth_token: 123 + protocol: http` + ioutil.WriteFile(file.Name(), []byte(content), os.ModePerm) + os.Setenv("HUB_CONFIG", file.Name()) + + return &TestConfigs{file.Name()} +} diff --git a/vendor/github.com/github/hub/fixtures/test_repo.go b/vendor/github.com/github/hub/fixtures/test_repo.go new file mode 100644 index 0000000..4482ff8 --- /dev/null +++ b/vendor/github.com/github/hub/fixtures/test_repo.go @@ -0,0 +1,93 @@ +package fixtures + +import ( + "fmt" + "io/ioutil" + "os" + "path/filepath" + + "github.com/github/hub/cmd" +) + +var pwd, home string + +func init() { + // caching `pwd` and $HOME and reset them after test repo is teared down + // `pwd` is changed to the bin temp dir during test run + pwd, _ = os.Getwd() + home = os.Getenv("HOME") +} + +type TestRepo struct { + pwd string + dir string + home string + Remote string +} + +func (r *TestRepo) Setup() { + dir, err := ioutil.TempDir("", "test-repo") + if err != nil { + panic(err) + } + r.dir = dir + + os.Setenv("HOME", r.dir) + + targetPath := filepath.Join(r.dir, "test.git") + err = r.clone(r.Remote, targetPath) + if err != nil { + panic(err) + } + + err = os.Chdir(targetPath) + if err != nil { + panic(err) + } +} + +func (r *TestRepo) AddRemote(name, url, pushURL string) { + add := cmd.New("git").WithArgs("remote", "add", name, url) + if _, err := add.CombinedOutput(); err != nil { + panic(err) + } + if pushURL != "" { + set := cmd.New("git").WithArgs("remote", "set-url", "--push", name, pushURL) + if _, err := set.CombinedOutput(); err != nil { + panic(err) + } + } +} + +func (r *TestRepo) clone(repo, dir string) error { + cmd := cmd.New("git").WithArgs("clone", repo, dir) + output, err := cmd.CombinedOutput() + if err != nil { + err = fmt.Errorf("error cloning %s to %s: %s", repo, dir, output) + } + + return err +} + +func (r *TestRepo) TearDown() { + err := os.Chdir(r.pwd) + if err != nil { + panic(err) + } + + os.Setenv("HOME", r.home) + + err = os.RemoveAll(r.dir) + if err != nil { + panic(err) + } + +} + +func SetupTestRepo() *TestRepo { + remotePath := filepath.Join(pwd, "..", "fixtures", "test.git") + repo := &TestRepo{pwd: pwd, home: home, Remote: remotePath} + repo.Setup() + + return repo +} diff --git a/Godeps/_workspace/src/github.com/github/hub/git/git.go b/vendor/github.com/github/hub/git/git.go similarity index 98% rename from Godeps/_workspace/src/github.com/github/hub/git/git.go rename to vendor/github.com/github/hub/git/git.go index 28be3d7..d82f63c 100644 --- a/Godeps/_workspace/src/github.com/github/hub/git/git.go +++ b/vendor/github.com/github/hub/git/git.go @@ -7,7 +7,7 @@ import ( "path/filepath" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/cmd" + "github.com/github/hub/cmd" ) var GlobalFlags []string diff --git a/Godeps/_workspace/src/github.com/github/hub/git/git_test.go b/vendor/github.com/github/hub/git/git_test.go similarity index 95% rename from Godeps/_workspace/src/github.com/github/hub/git/git_test.go rename to vendor/github.com/github/hub/git/git_test.go index 265b050..f85a77a 100644 --- a/Godeps/_workspace/src/github.com/github/hub/git/git_test.go +++ b/vendor/github.com/github/hub/git/git_test.go @@ -4,7 +4,7 @@ import ( "strings" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" "github.com/github/hub/fixtures" ) diff --git a/Godeps/_workspace/src/github.com/github/hub/git/ssh_config.go b/vendor/github.com/github/hub/git/ssh_config.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/git/ssh_config.go rename to vendor/github.com/github/hub/git/ssh_config.go diff --git a/Godeps/_workspace/src/github.com/github/hub/git/ssh_config_test.go b/vendor/github.com/github/hub/git/ssh_config_test.go similarity index 83% rename from Godeps/_workspace/src/github.com/github/hub/git/ssh_config_test.go rename to vendor/github.com/github/hub/git/ssh_config_test.go index a84fa24..558205b 100644 --- a/Godeps/_workspace/src/github.com/github/hub/git/ssh_config_test.go +++ b/vendor/github.com/github/hub/git/ssh_config_test.go @@ -5,7 +5,7 @@ import ( "os" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestSSHConfigReader_Read(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/git/url.go b/vendor/github.com/github/hub/git/url.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/git/url.go rename to vendor/github.com/github/hub/git/url.go diff --git a/Godeps/_workspace/src/github.com/github/hub/git/url_test.go b/vendor/github.com/github/hub/git/url_test.go similarity index 96% rename from Godeps/_workspace/src/github.com/github/hub/git/url_test.go rename to vendor/github.com/github/hub/git/url_test.go index dad7de5..9fa8f94 100644 --- a/Godeps/_workspace/src/github.com/github/hub/git/url_test.go +++ b/vendor/github.com/github/hub/git/url_test.go @@ -3,7 +3,7 @@ package git import ( "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func createURLParser() *URLParser { diff --git a/Godeps/_workspace/src/github.com/github/hub/github/branch.go b/vendor/github.com/github/hub/github/branch.go similarity index 95% rename from Godeps/_workspace/src/github.com/github/hub/github/branch.go rename to vendor/github.com/github/hub/github/branch.go index e827660..89329ce 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/branch.go +++ b/vendor/github.com/github/hub/github/branch.go @@ -5,7 +5,7 @@ import ( "regexp" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/git" + "github.com/github/hub/git" ) type Branch struct { diff --git a/Godeps/_workspace/src/github.com/github/hub/github/branch_test.go b/vendor/github.com/github/hub/github/branch_test.go similarity index 90% rename from Godeps/_workspace/src/github.com/github/hub/github/branch_test.go rename to vendor/github.com/github/hub/github/branch_test.go index bb2909a..43e96e4 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/branch_test.go +++ b/vendor/github.com/github/hub/github/branch_test.go @@ -3,7 +3,7 @@ package github import ( "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestBranch_ShortName(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/github/client.go b/vendor/github.com/github/hub/github/client.go similarity index 99% rename from Godeps/_workspace/src/github.com/github/hub/github/client.go rename to vendor/github.com/github/hub/github/client.go index 36f215d..6c71f86 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/client.go +++ b/vendor/github.com/github/hub/github/client.go @@ -8,7 +8,7 @@ import ( "os/user" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit" + "github.com/octokit/go-octokit/octokit" ) const ( diff --git a/Godeps/_workspace/src/github.com/github/hub/github/client_test.go b/vendor/github.com/github/hub/github/client_test.go similarity index 91% rename from Godeps/_workspace/src/github.com/github/hub/github/client_test.go rename to vendor/github.com/github/hub/github/client_test.go index 7f9ac11..140a36e 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/client_test.go +++ b/vendor/github.com/github/hub/github/client_test.go @@ -6,8 +6,8 @@ import ( "regexp" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit" + "github.com/bmizerany/assert" + "github.com/octokit/go-octokit/octokit" ) func TestClient_newOctokitClient(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/github/config.go b/vendor/github.com/github/hub/github/config.go similarity index 94% rename from Godeps/_workspace/src/github.com/github/hub/github/config.go rename to vendor/github.com/github/hub/github/config.go index e1c765f..b426cef 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/config.go +++ b/vendor/github.com/github/hub/github/config.go @@ -9,9 +9,9 @@ import ( "path/filepath" "strconv" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/howeyc/gopass" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/ui" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/utils" + "github.com/howeyc/gopass" + "github.com/github/hub/ui" + "github.com/github/hub/utils" ) var defaultConfigsFile string diff --git a/Godeps/_workspace/src/github.com/github/hub/github/config_decoder.go b/vendor/github.com/github/hub/github/config_decoder.go similarity index 83% rename from Godeps/_workspace/src/github.com/github/hub/github/config_decoder.go rename to vendor/github.com/github/hub/github/config_decoder.go index 4527242..b355967 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/config_decoder.go +++ b/vendor/github.com/github/hub/github/config_decoder.go @@ -4,8 +4,8 @@ import ( "io" "io/ioutil" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/BurntSushi/toml" - "github.com/remind101/deploy/Godeps/_workspace/src/gopkg.in/yaml.v1" + "github.com/BurntSushi/toml" + "gopkg.in/yaml.v1" ) type configDecoder interface { diff --git a/Godeps/_workspace/src/github.com/github/hub/github/config_encoder.go b/vendor/github.com/github/hub/github/config_encoder.go similarity index 82% rename from Godeps/_workspace/src/github.com/github/hub/github/config_encoder.go rename to vendor/github.com/github/hub/github/config_encoder.go index 319c929..6d49919 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/config_encoder.go +++ b/vendor/github.com/github/hub/github/config_encoder.go @@ -3,8 +3,8 @@ package github import ( "io" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/BurntSushi/toml" - "github.com/remind101/deploy/Godeps/_workspace/src/gopkg.in/yaml.v1" + "github.com/BurntSushi/toml" + "gopkg.in/yaml.v1" ) type configEncoder interface { diff --git a/Godeps/_workspace/src/github.com/github/hub/github/config_service.go b/vendor/github.com/github/hub/github/config_service.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/github/config_service.go rename to vendor/github.com/github/hub/github/config_service.go diff --git a/Godeps/_workspace/src/github.com/github/hub/github/config_service_test.go b/vendor/github.com/github/hub/github/config_service_test.go similarity index 96% rename from Godeps/_workspace/src/github.com/github/hub/github/config_service_test.go rename to vendor/github.com/github/hub/github/config_service_test.go index 107e298..3abdc52 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/config_service_test.go +++ b/vendor/github.com/github/hub/github/config_service_test.go @@ -6,7 +6,7 @@ import ( "strings" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" "github.com/github/hub/fixtures" ) diff --git a/Godeps/_workspace/src/github.com/github/hub/github/crash_report.go b/vendor/github.com/github/hub/github/crash_report.go similarity index 92% rename from Godeps/_workspace/src/github.com/github/hub/github/crash_report.go rename to vendor/github.com/github/hub/github/crash_report.go index 8275889..8400b4d 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/crash_report.go +++ b/vendor/github.com/github/hub/github/crash_report.go @@ -9,9 +9,9 @@ import ( "runtime" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/git" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/ui" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/utils" + "github.com/github/hub/git" + "github.com/github/hub/ui" + "github.com/github/hub/utils" ) const ( diff --git a/Godeps/_workspace/src/github.com/github/hub/github/crash_report_test.go b/vendor/github.com/github/hub/github/crash_report_test.go similarity index 96% rename from Godeps/_workspace/src/github.com/github/hub/github/crash_report_test.go rename to vendor/github.com/github/hub/github/crash_report_test.go index cc69084..db24b7f 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/crash_report_test.go +++ b/vendor/github.com/github/hub/github/crash_report_test.go @@ -3,7 +3,7 @@ package github import ( "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" "github.com/github/hub/fixtures" ) diff --git a/Godeps/_workspace/src/github.com/github/hub/github/editor.go b/vendor/github.com/github/hub/github/editor.go similarity index 94% rename from Godeps/_workspace/src/github.com/github/hub/github/editor.go rename to vendor/github.com/github/hub/github/editor.go index 2bfa759..43bcd32 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/editor.go +++ b/vendor/github.com/github/hub/github/editor.go @@ -11,8 +11,8 @@ import ( "regexp" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/cmd" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/git" + "github.com/github/hub/cmd" + "github.com/github/hub/git" ) func NewEditor(filePrefix, topic, message string) (editor *Editor, err error) { diff --git a/Godeps/_workspace/src/github.com/github/hub/github/editor_test.go b/vendor/github.com/github/hub/github/editor_test.go similarity index 97% rename from Godeps/_workspace/src/github.com/github/hub/github/editor_test.go rename to vendor/github.com/github/hub/github/editor_test.go index 4016280..a05a80c 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/editor_test.go +++ b/vendor/github.com/github/hub/github/editor_test.go @@ -8,7 +8,7 @@ import ( "strings" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestEditor_openAndEdit_deleteFileWhenOpeningEditorFails(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/github/hosts.go b/vendor/github.com/github/hub/github/hosts.go similarity index 90% rename from Godeps/_workspace/src/github.com/github/hub/github/hosts.go rename to vendor/github.com/github/hub/github/hosts.go index d49011e..5e369dc 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/hosts.go +++ b/vendor/github.com/github/hub/github/hosts.go @@ -4,7 +4,7 @@ import ( "os" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/git" + "github.com/github/hub/git" ) var ( diff --git a/Godeps/_workspace/src/github.com/github/hub/github/http.go b/vendor/github.com/github/hub/github/http.go similarity index 97% rename from Godeps/_workspace/src/github.com/github/hub/github/http.go rename to vendor/github.com/github/hub/github/http.go index 7a3403b..0181a6d 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/http.go +++ b/vendor/github.com/github/hub/github/http.go @@ -13,7 +13,7 @@ import ( "strings" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/utils" + "github.com/github/hub/utils" ) type verboseTransport struct { diff --git a/Godeps/_workspace/src/github.com/github/hub/github/http_test.go b/vendor/github.com/github/hub/github/http_test.go similarity index 94% rename from Godeps/_workspace/src/github.com/github/hub/github/http_test.go rename to vendor/github.com/github/hub/github/http_test.go index 9f82eb2..cf753af 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/http_test.go +++ b/vendor/github.com/github/hub/github/http_test.go @@ -8,7 +8,7 @@ import ( "net/url" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func setupTestServer() *testServer { diff --git a/Godeps/_workspace/src/github.com/github/hub/github/localrepo.go b/vendor/github.com/github/hub/github/localrepo.go similarity index 97% rename from Godeps/_workspace/src/github.com/github/hub/github/localrepo.go rename to vendor/github.com/github/hub/github/localrepo.go index ce24676..95281fa 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/localrepo.go +++ b/vendor/github.com/github/hub/github/localrepo.go @@ -4,7 +4,7 @@ import ( "fmt" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/git" + "github.com/github/hub/git" ) func LocalRepo() (repo *GitHubRepo, err error) { diff --git a/Godeps/_workspace/src/github.com/github/hub/github/localrepo_test.go b/vendor/github.com/github/hub/github/localrepo_test.go similarity index 90% rename from Godeps/_workspace/src/github.com/github/hub/github/localrepo_test.go rename to vendor/github.com/github/hub/github/localrepo_test.go index 8d412ee..b7a28aa 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/localrepo_test.go +++ b/vendor/github.com/github/hub/github/localrepo_test.go @@ -4,7 +4,7 @@ import ( "net/url" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" "github.com/github/hub/fixtures" ) diff --git a/Godeps/_workspace/src/github.com/github/hub/github/project.go b/vendor/github.com/github/hub/github/project.go similarity index 95% rename from Godeps/_workspace/src/github.com/github/hub/github/project.go rename to vendor/github.com/github/hub/github/project.go index 0d441e5..6a8884a 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/project.go +++ b/vendor/github.com/github/hub/github/project.go @@ -6,8 +6,8 @@ import ( "os" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/git" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/utils" + "github.com/github/hub/git" + "github.com/github/hub/utils" ) type Project struct { diff --git a/Godeps/_workspace/src/github.com/github/hub/github/project_test.go b/vendor/github.com/github/hub/github/project_test.go similarity index 97% rename from Godeps/_workspace/src/github.com/github/hub/github/project_test.go rename to vendor/github.com/github/hub/github/project_test.go index d31a0dc..f4d4487 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/project_test.go +++ b/vendor/github.com/github/hub/github/project_test.go @@ -5,7 +5,7 @@ import ( "os" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" "github.com/github/hub/fixtures" ) diff --git a/Godeps/_workspace/src/github.com/github/hub/github/remote.go b/vendor/github.com/github/hub/github/remote.go similarity index 94% rename from Godeps/_workspace/src/github.com/github/hub/github/remote.go rename to vendor/github.com/github/hub/github/remote.go index 803fe7e..08d9a90 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/remote.go +++ b/vendor/github.com/github/hub/github/remote.go @@ -6,7 +6,7 @@ import ( "regexp" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/git" + "github.com/github/hub/git" ) var ( diff --git a/Godeps/_workspace/src/github.com/github/hub/github/url.go b/vendor/github.com/github/hub/github/url.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/github/url.go rename to vendor/github.com/github/hub/github/url.go diff --git a/Godeps/_workspace/src/github.com/github/hub/github/url_test.go b/vendor/github.com/github/hub/github/url_test.go similarity index 90% rename from Godeps/_workspace/src/github.com/github/hub/github/url_test.go rename to vendor/github.com/github/hub/github/url_test.go index cfa8529..2593690 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/url_test.go +++ b/vendor/github.com/github/hub/github/url_test.go @@ -3,7 +3,7 @@ package github import ( "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" "github.com/github/hub/fixtures" ) diff --git a/Godeps/_workspace/src/github.com/github/hub/github/util.go b/vendor/github.com/github/hub/github/util.go similarity index 67% rename from Godeps/_workspace/src/github.com/github/hub/github/util.go rename to vendor/github.com/github/hub/github/util.go index 6ab1f0a..8053f2b 100644 --- a/Godeps/_workspace/src/github.com/github/hub/github/util.go +++ b/vendor/github.com/github/hub/github/util.go @@ -1,8 +1,8 @@ package github import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/git" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/mattn/go-isatty" + "github.com/github/hub/git" + "github.com/mattn/go-isatty" ) func IsHttpsProtocol() bool { diff --git a/Godeps/_workspace/src/github.com/github/hub/ui/ui.go b/vendor/github.com/github/hub/ui/ui.go similarity index 75% rename from Godeps/_workspace/src/github.com/github/hub/ui/ui.go rename to vendor/github.com/github/hub/ui/ui.go index 1c90ff2..8234dd1 100644 --- a/Godeps/_workspace/src/github.com/github/hub/ui/ui.go +++ b/vendor/github.com/github/hub/ui/ui.go @@ -4,6 +4,9 @@ import ( "fmt" "io" "os" + + "github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable" + "github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty" ) type UI interface { @@ -13,7 +16,11 @@ type UI interface { Errorln(a ...interface{}) (n int, err error) } -var Default UI = Console{Stdout: os.Stdout, Stderr: os.Stderr} +var ( + Stdout = colorable.NewColorableStdout() + Stderr = colorable.NewColorableStderr() + Default UI = Console{Stdout: Stdout, Stderr: Stderr} +) func Printf(format string, a ...interface{}) (n int, err error) { return Default.Printf(format, a...) @@ -31,6 +38,10 @@ func Errorln(a ...interface{}) (n int, err error) { return Default.Errorln(a...) } +func IsTerminal(f *os.File) bool { + return isatty.IsTerminal(f.Fd()) +} + type Console struct { Stdout io.Writer Stderr io.Writer diff --git a/Godeps/_workspace/src/github.com/github/hub/utils/utils.go b/vendor/github.com/github/hub/utils/utils.go similarity index 94% rename from Godeps/_workspace/src/github.com/github/hub/utils/utils.go rename to vendor/github.com/github/hub/utils/utils.go index ab40e59..f71d50f 100644 --- a/Godeps/_workspace/src/github.com/github/hub/utils/utils.go +++ b/vendor/github.com/github/hub/utils/utils.go @@ -8,7 +8,7 @@ import ( "runtime" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/github/hub/ui" + "github.com/github/hub/ui" ) func Check(err error) { diff --git a/Godeps/_workspace/src/github.com/github/hub/utils/utils_test.go b/vendor/github.com/github/hub/utils/utils_test.go similarity index 81% rename from Godeps/_workspace/src/github.com/github/hub/utils/utils_test.go rename to vendor/github.com/github/hub/utils/utils_test.go index d2c6073..9b83a8b 100644 --- a/Godeps/_workspace/src/github.com/github/hub/utils/utils_test.go +++ b/vendor/github.com/github/hub/utils/utils_test.go @@ -1,7 +1,7 @@ package utils import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert" "testing" ) diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/activity.go b/vendor/github.com/google/go-github/github/activity.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/activity.go rename to vendor/github.com/google/go-github/github/activity.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/activity_events.go b/vendor/github.com/google/go-github/github/activity_events.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/activity_events.go rename to vendor/github.com/google/go-github/github/activity_events.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/activity_events_test.go b/vendor/github.com/google/go-github/github/activity_events_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/activity_events_test.go rename to vendor/github.com/google/go-github/github/activity_events_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/activity_notifications.go b/vendor/github.com/google/go-github/github/activity_notifications.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/activity_notifications.go rename to vendor/github.com/google/go-github/github/activity_notifications.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/activity_notifications_test.go b/vendor/github.com/google/go-github/github/activity_notifications_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/activity_notifications_test.go rename to vendor/github.com/google/go-github/github/activity_notifications_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/activity_star.go b/vendor/github.com/google/go-github/github/activity_star.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/activity_star.go rename to vendor/github.com/google/go-github/github/activity_star.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/activity_star_test.go b/vendor/github.com/google/go-github/github/activity_star_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/activity_star_test.go rename to vendor/github.com/google/go-github/github/activity_star_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/activity_watching.go b/vendor/github.com/google/go-github/github/activity_watching.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/activity_watching.go rename to vendor/github.com/google/go-github/github/activity_watching.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/activity_watching_test.go b/vendor/github.com/google/go-github/github/activity_watching_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/activity_watching_test.go rename to vendor/github.com/google/go-github/github/activity_watching_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/doc.go b/vendor/github.com/google/go-github/github/doc.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/doc.go rename to vendor/github.com/google/go-github/github/doc.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/gists.go b/vendor/github.com/google/go-github/github/gists.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/gists.go rename to vendor/github.com/google/go-github/github/gists.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/gists_comments.go b/vendor/github.com/google/go-github/github/gists_comments.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/gists_comments.go rename to vendor/github.com/google/go-github/github/gists_comments.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/gists_comments_test.go b/vendor/github.com/google/go-github/github/gists_comments_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/gists_comments_test.go rename to vendor/github.com/google/go-github/github/gists_comments_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/gists_test.go b/vendor/github.com/google/go-github/github/gists_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/gists_test.go rename to vendor/github.com/google/go-github/github/gists_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/git.go b/vendor/github.com/google/go-github/github/git.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/git.go rename to vendor/github.com/google/go-github/github/git.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/git_blobs.go b/vendor/github.com/google/go-github/github/git_blobs.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/git_blobs.go rename to vendor/github.com/google/go-github/github/git_blobs.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/git_blobs_test.go b/vendor/github.com/google/go-github/github/git_blobs_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/git_blobs_test.go rename to vendor/github.com/google/go-github/github/git_blobs_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/git_commits.go b/vendor/github.com/google/go-github/github/git_commits.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/git_commits.go rename to vendor/github.com/google/go-github/github/git_commits.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/git_commits_test.go b/vendor/github.com/google/go-github/github/git_commits_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/git_commits_test.go rename to vendor/github.com/google/go-github/github/git_commits_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/git_refs.go b/vendor/github.com/google/go-github/github/git_refs.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/git_refs.go rename to vendor/github.com/google/go-github/github/git_refs.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/git_refs_test.go b/vendor/github.com/google/go-github/github/git_refs_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/git_refs_test.go rename to vendor/github.com/google/go-github/github/git_refs_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/git_tags.go b/vendor/github.com/google/go-github/github/git_tags.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/git_tags.go rename to vendor/github.com/google/go-github/github/git_tags.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/git_tags_test.go b/vendor/github.com/google/go-github/github/git_tags_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/git_tags_test.go rename to vendor/github.com/google/go-github/github/git_tags_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/git_trees.go b/vendor/github.com/google/go-github/github/git_trees.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/git_trees.go rename to vendor/github.com/google/go-github/github/git_trees.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/git_trees_test.go b/vendor/github.com/google/go-github/github/git_trees_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/git_trees_test.go rename to vendor/github.com/google/go-github/github/git_trees_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/github.go b/vendor/github.com/google/go-github/github/github.go similarity index 99% rename from Godeps/_workspace/src/github.com/google/go-github/github/github.go rename to vendor/github.com/google/go-github/github/github.go index cf0d7cb..fced107 100644 --- a/Godeps/_workspace/src/github.com/google/go-github/github/github.go +++ b/vendor/github.com/google/go-github/github/github.go @@ -19,7 +19,7 @@ import ( "strings" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/google/go-querystring/query" + "github.com/google/go-querystring/query" ) const ( diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/github_test.go b/vendor/github.com/google/go-github/github/github_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/github_test.go rename to vendor/github.com/google/go-github/github/github_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/gitignore.go b/vendor/github.com/google/go-github/github/gitignore.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/gitignore.go rename to vendor/github.com/google/go-github/github/gitignore.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/gitignore_test.go b/vendor/github.com/google/go-github/github/gitignore_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/gitignore_test.go rename to vendor/github.com/google/go-github/github/gitignore_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/issues.go b/vendor/github.com/google/go-github/github/issues.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/issues.go rename to vendor/github.com/google/go-github/github/issues.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/issues_assignees.go b/vendor/github.com/google/go-github/github/issues_assignees.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/issues_assignees.go rename to vendor/github.com/google/go-github/github/issues_assignees.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/issues_assignees_test.go b/vendor/github.com/google/go-github/github/issues_assignees_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/issues_assignees_test.go rename to vendor/github.com/google/go-github/github/issues_assignees_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/issues_comments.go b/vendor/github.com/google/go-github/github/issues_comments.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/issues_comments.go rename to vendor/github.com/google/go-github/github/issues_comments.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/issues_comments_test.go b/vendor/github.com/google/go-github/github/issues_comments_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/issues_comments_test.go rename to vendor/github.com/google/go-github/github/issues_comments_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/issues_events.go b/vendor/github.com/google/go-github/github/issues_events.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/issues_events.go rename to vendor/github.com/google/go-github/github/issues_events.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/issues_events_test.go b/vendor/github.com/google/go-github/github/issues_events_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/issues_events_test.go rename to vendor/github.com/google/go-github/github/issues_events_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/issues_labels.go b/vendor/github.com/google/go-github/github/issues_labels.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/issues_labels.go rename to vendor/github.com/google/go-github/github/issues_labels.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/issues_labels_test.go b/vendor/github.com/google/go-github/github/issues_labels_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/issues_labels_test.go rename to vendor/github.com/google/go-github/github/issues_labels_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/issues_milestones.go b/vendor/github.com/google/go-github/github/issues_milestones.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/issues_milestones.go rename to vendor/github.com/google/go-github/github/issues_milestones.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/issues_milestones_test.go b/vendor/github.com/google/go-github/github/issues_milestones_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/issues_milestones_test.go rename to vendor/github.com/google/go-github/github/issues_milestones_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/issues_test.go b/vendor/github.com/google/go-github/github/issues_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/issues_test.go rename to vendor/github.com/google/go-github/github/issues_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/misc.go b/vendor/github.com/google/go-github/github/misc.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/misc.go rename to vendor/github.com/google/go-github/github/misc.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/misc_test.go b/vendor/github.com/google/go-github/github/misc_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/misc_test.go rename to vendor/github.com/google/go-github/github/misc_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/orgs.go b/vendor/github.com/google/go-github/github/orgs.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/orgs.go rename to vendor/github.com/google/go-github/github/orgs.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/orgs_members.go b/vendor/github.com/google/go-github/github/orgs_members.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/orgs_members.go rename to vendor/github.com/google/go-github/github/orgs_members.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/orgs_members_test.go b/vendor/github.com/google/go-github/github/orgs_members_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/orgs_members_test.go rename to vendor/github.com/google/go-github/github/orgs_members_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/orgs_teams.go b/vendor/github.com/google/go-github/github/orgs_teams.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/orgs_teams.go rename to vendor/github.com/google/go-github/github/orgs_teams.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/orgs_teams_test.go b/vendor/github.com/google/go-github/github/orgs_teams_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/orgs_teams_test.go rename to vendor/github.com/google/go-github/github/orgs_teams_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/orgs_test.go b/vendor/github.com/google/go-github/github/orgs_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/orgs_test.go rename to vendor/github.com/google/go-github/github/orgs_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/pulls.go b/vendor/github.com/google/go-github/github/pulls.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/pulls.go rename to vendor/github.com/google/go-github/github/pulls.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/pulls_comments.go b/vendor/github.com/google/go-github/github/pulls_comments.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/pulls_comments.go rename to vendor/github.com/google/go-github/github/pulls_comments.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/pulls_comments_test.go b/vendor/github.com/google/go-github/github/pulls_comments_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/pulls_comments_test.go rename to vendor/github.com/google/go-github/github/pulls_comments_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/pulls_test.go b/vendor/github.com/google/go-github/github/pulls_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/pulls_test.go rename to vendor/github.com/google/go-github/github/pulls_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos.go b/vendor/github.com/google/go-github/github/repos.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos.go rename to vendor/github.com/google/go-github/github/repos.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_collaborators.go b/vendor/github.com/google/go-github/github/repos_collaborators.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_collaborators.go rename to vendor/github.com/google/go-github/github/repos_collaborators.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_collaborators_test.go b/vendor/github.com/google/go-github/github/repos_collaborators_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_collaborators_test.go rename to vendor/github.com/google/go-github/github/repos_collaborators_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_comments.go b/vendor/github.com/google/go-github/github/repos_comments.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_comments.go rename to vendor/github.com/google/go-github/github/repos_comments.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_comments_test.go b/vendor/github.com/google/go-github/github/repos_comments_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_comments_test.go rename to vendor/github.com/google/go-github/github/repos_comments_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_commits.go b/vendor/github.com/google/go-github/github/repos_commits.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_commits.go rename to vendor/github.com/google/go-github/github/repos_commits.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_commits_test.go b/vendor/github.com/google/go-github/github/repos_commits_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_commits_test.go rename to vendor/github.com/google/go-github/github/repos_commits_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_contents.go b/vendor/github.com/google/go-github/github/repos_contents.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_contents.go rename to vendor/github.com/google/go-github/github/repos_contents.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_contents_test.go b/vendor/github.com/google/go-github/github/repos_contents_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_contents_test.go rename to vendor/github.com/google/go-github/github/repos_contents_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_deployments.go b/vendor/github.com/google/go-github/github/repos_deployments.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_deployments.go rename to vendor/github.com/google/go-github/github/repos_deployments.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_deployments_test.go b/vendor/github.com/google/go-github/github/repos_deployments_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_deployments_test.go rename to vendor/github.com/google/go-github/github/repos_deployments_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_forks.go b/vendor/github.com/google/go-github/github/repos_forks.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_forks.go rename to vendor/github.com/google/go-github/github/repos_forks.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_forks_test.go b/vendor/github.com/google/go-github/github/repos_forks_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_forks_test.go rename to vendor/github.com/google/go-github/github/repos_forks_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_hooks.go b/vendor/github.com/google/go-github/github/repos_hooks.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_hooks.go rename to vendor/github.com/google/go-github/github/repos_hooks.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_hooks_test.go b/vendor/github.com/google/go-github/github/repos_hooks_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_hooks_test.go rename to vendor/github.com/google/go-github/github/repos_hooks_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_keys.go b/vendor/github.com/google/go-github/github/repos_keys.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_keys.go rename to vendor/github.com/google/go-github/github/repos_keys.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_keys_test.go b/vendor/github.com/google/go-github/github/repos_keys_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_keys_test.go rename to vendor/github.com/google/go-github/github/repos_keys_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_merging.go b/vendor/github.com/google/go-github/github/repos_merging.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_merging.go rename to vendor/github.com/google/go-github/github/repos_merging.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_merging_test.go b/vendor/github.com/google/go-github/github/repos_merging_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_merging_test.go rename to vendor/github.com/google/go-github/github/repos_merging_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_pages.go b/vendor/github.com/google/go-github/github/repos_pages.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_pages.go rename to vendor/github.com/google/go-github/github/repos_pages.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_pages_test.go b/vendor/github.com/google/go-github/github/repos_pages_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_pages_test.go rename to vendor/github.com/google/go-github/github/repos_pages_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_releases.go b/vendor/github.com/google/go-github/github/repos_releases.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_releases.go rename to vendor/github.com/google/go-github/github/repos_releases.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_releases_test.go b/vendor/github.com/google/go-github/github/repos_releases_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_releases_test.go rename to vendor/github.com/google/go-github/github/repos_releases_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_stats.go b/vendor/github.com/google/go-github/github/repos_stats.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_stats.go rename to vendor/github.com/google/go-github/github/repos_stats.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_stats_test.go b/vendor/github.com/google/go-github/github/repos_stats_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_stats_test.go rename to vendor/github.com/google/go-github/github/repos_stats_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_statuses.go b/vendor/github.com/google/go-github/github/repos_statuses.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_statuses.go rename to vendor/github.com/google/go-github/github/repos_statuses.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_statuses_test.go b/vendor/github.com/google/go-github/github/repos_statuses_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_statuses_test.go rename to vendor/github.com/google/go-github/github/repos_statuses_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/repos_test.go b/vendor/github.com/google/go-github/github/repos_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/repos_test.go rename to vendor/github.com/google/go-github/github/repos_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/search.go b/vendor/github.com/google/go-github/github/search.go similarity index 98% rename from Godeps/_workspace/src/github.com/google/go-github/github/search.go rename to vendor/github.com/google/go-github/github/search.go index 714166c..d9e9b41 100644 --- a/Godeps/_workspace/src/github.com/google/go-github/github/search.go +++ b/vendor/github.com/google/go-github/github/search.go @@ -8,7 +8,7 @@ package github import ( "fmt" - qs "github.com/remind101/deploy/Godeps/_workspace/src/github.com/google/go-querystring/query" + qs "github.com/google/go-querystring/query" ) // SearchService provides access to the search related functions diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/search_test.go b/vendor/github.com/google/go-github/github/search_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/search_test.go rename to vendor/github.com/google/go-github/github/search_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/strings.go b/vendor/github.com/google/go-github/github/strings.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/strings.go rename to vendor/github.com/google/go-github/github/strings.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/strings_test.go b/vendor/github.com/google/go-github/github/strings_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/strings_test.go rename to vendor/github.com/google/go-github/github/strings_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/timestamp.go b/vendor/github.com/google/go-github/github/timestamp.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/timestamp.go rename to vendor/github.com/google/go-github/github/timestamp.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/timestamp_test.go b/vendor/github.com/google/go-github/github/timestamp_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/timestamp_test.go rename to vendor/github.com/google/go-github/github/timestamp_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/users.go b/vendor/github.com/google/go-github/github/users.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/users.go rename to vendor/github.com/google/go-github/github/users.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/users_administration.go b/vendor/github.com/google/go-github/github/users_administration.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/users_administration.go rename to vendor/github.com/google/go-github/github/users_administration.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/users_administration_test.go b/vendor/github.com/google/go-github/github/users_administration_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/users_administration_test.go rename to vendor/github.com/google/go-github/github/users_administration_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/users_emails.go b/vendor/github.com/google/go-github/github/users_emails.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/users_emails.go rename to vendor/github.com/google/go-github/github/users_emails.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/users_emails_test.go b/vendor/github.com/google/go-github/github/users_emails_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/users_emails_test.go rename to vendor/github.com/google/go-github/github/users_emails_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/users_followers.go b/vendor/github.com/google/go-github/github/users_followers.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/users_followers.go rename to vendor/github.com/google/go-github/github/users_followers.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/users_followers_test.go b/vendor/github.com/google/go-github/github/users_followers_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/users_followers_test.go rename to vendor/github.com/google/go-github/github/users_followers_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/users_keys.go b/vendor/github.com/google/go-github/github/users_keys.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/users_keys.go rename to vendor/github.com/google/go-github/github/users_keys.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/users_keys_test.go b/vendor/github.com/google/go-github/github/users_keys_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/users_keys_test.go rename to vendor/github.com/google/go-github/github/users_keys_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-github/github/users_test.go b/vendor/github.com/google/go-github/github/users_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-github/github/users_test.go rename to vendor/github.com/google/go-github/github/users_test.go diff --git a/Godeps/_workspace/src/github.com/google/go-querystring/query/encode.go b/vendor/github.com/google/go-querystring/query/encode.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-querystring/query/encode.go rename to vendor/github.com/google/go-querystring/query/encode.go diff --git a/Godeps/_workspace/src/github.com/google/go-querystring/query/encode_test.go b/vendor/github.com/google/go-querystring/query/encode_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/google/go-querystring/query/encode_test.go rename to vendor/github.com/google/go-querystring/query/encode_test.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/LICENSE.txt b/vendor/github.com/howeyc/gopass/LICENSE.txt similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/LICENSE.txt rename to vendor/github.com/howeyc/gopass/LICENSE.txt diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/README.md b/vendor/github.com/howeyc/gopass/README.md similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/README.md rename to vendor/github.com/howeyc/gopass/README.md diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/bsd.go b/vendor/github.com/howeyc/gopass/bsd.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/bsd.go rename to vendor/github.com/howeyc/gopass/bsd.go diff --git a/Godeps/_workspace/src/github.com/howeyc/gopass/nix.go b/vendor/github.com/howeyc/gopass/nix.go similarity index 78% rename from Godeps/_workspace/src/github.com/howeyc/gopass/nix.go rename to vendor/github.com/howeyc/gopass/nix.go index 99c4e7a..df25219 100644 --- a/Godeps/_workspace/src/github.com/howeyc/gopass/nix.go +++ b/vendor/github.com/howeyc/gopass/nix.go @@ -5,7 +5,7 @@ package gopass import ( "syscall" - "github.com/remind101/deploy/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal" + "golang.org/x/crypto/ssh/terminal" ) func getch() byte { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/pass.go b/vendor/github.com/howeyc/gopass/pass.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/pass.go rename to vendor/github.com/howeyc/gopass/pass.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/win.go b/vendor/github.com/howeyc/gopass/win.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/howeyc/gopass/win.go rename to vendor/github.com/howeyc/gopass/win.go diff --git a/Godeps/_workspace/src/github.com/inconshreveable/go-update/LICENSE b/vendor/github.com/inconshreveable/go-update/LICENSE similarity index 100% rename from Godeps/_workspace/src/github.com/inconshreveable/go-update/LICENSE rename to vendor/github.com/inconshreveable/go-update/LICENSE diff --git a/Godeps/_workspace/src/github.com/inconshreveable/go-update/README.md b/vendor/github.com/inconshreveable/go-update/README.md similarity index 100% rename from Godeps/_workspace/src/github.com/inconshreveable/go-update/README.md rename to vendor/github.com/inconshreveable/go-update/README.md diff --git a/Godeps/_workspace/src/github.com/inconshreveable/go-update/download/download.go b/vendor/github.com/inconshreveable/go-update/download/download.go similarity index 100% rename from Godeps/_workspace/src/github.com/inconshreveable/go-update/download/download.go rename to vendor/github.com/inconshreveable/go-update/download/download.go diff --git a/Godeps/_workspace/src/github.com/inconshreveable/go-update/hide_noop.go b/vendor/github.com/inconshreveable/go-update/hide_noop.go similarity index 100% rename from Godeps/_workspace/src/github.com/inconshreveable/go-update/hide_noop.go rename to vendor/github.com/inconshreveable/go-update/hide_noop.go diff --git a/Godeps/_workspace/src/github.com/inconshreveable/go-update/hide_windows.go b/vendor/github.com/inconshreveable/go-update/hide_windows.go similarity index 100% rename from Godeps/_workspace/src/github.com/inconshreveable/go-update/hide_windows.go rename to vendor/github.com/inconshreveable/go-update/hide_windows.go diff --git a/Godeps/_workspace/src/github.com/inconshreveable/go-update/update.go b/vendor/github.com/inconshreveable/go-update/update.go similarity index 98% rename from Godeps/_workspace/src/github.com/inconshreveable/go-update/update.go rename to vendor/github.com/inconshreveable/go-update/update.go index 986a035..a907d91 100644 --- a/Godeps/_workspace/src/github.com/inconshreveable/go-update/update.go +++ b/vendor/github.com/inconshreveable/go-update/update.go @@ -113,7 +113,7 @@ while outputting a progress meter and supports resuming partial downloads. package update import ( - "github.com/remind101/deploy/Godeps/_workspace/src/bitbucket.org/kardianos/osext" + "bitbucket.org/kardianos/osext" "bytes" "crypto" "crypto/rsa" @@ -122,8 +122,8 @@ import ( "crypto/x509" "encoding/pem" "fmt" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/inconshreveable/go-update/download" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/kr/binarydist" + "github.com/inconshreveable/go-update/download" + "github.com/kr/binarydist" "io" "io/ioutil" "os" diff --git a/Godeps/_workspace/src/github.com/inconshreveable/go-update/update_test.go b/vendor/github.com/inconshreveable/go-update/update_test.go similarity index 99% rename from Godeps/_workspace/src/github.com/inconshreveable/go-update/update_test.go rename to vendor/github.com/inconshreveable/go-update/update_test.go index 2209b36..e9d8c95 100644 --- a/Godeps/_workspace/src/github.com/inconshreveable/go-update/update_test.go +++ b/vendor/github.com/inconshreveable/go-update/update_test.go @@ -7,7 +7,7 @@ import ( "crypto/rsa" "crypto/x509" "encoding/pem" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/kr/binarydist" + "github.com/kr/binarydist" "io/ioutil" "net" "net/http" diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/LICENSE b/vendor/github.com/jingweno/go-sawyer/LICENSE similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/LICENSE rename to vendor/github.com/jingweno/go-sawyer/LICENSE diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/README.md b/vendor/github.com/jingweno/go-sawyer/README.md similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/README.md rename to vendor/github.com/jingweno/go-sawyer/README.md diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/gopack.config b/vendor/github.com/jingweno/go-sawyer/gopack.config similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/gopack.config rename to vendor/github.com/jingweno/go-sawyer/gopack.config diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia.go b/vendor/github.com/jingweno/go-sawyer/hypermedia/hypermedia.go similarity index 97% rename from Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia.go rename to vendor/github.com/jingweno/go-sawyer/hypermedia/hypermedia.go index 34e7a43..e5e08de 100644 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia.go +++ b/vendor/github.com/jingweno/go-sawyer/hypermedia/hypermedia.go @@ -4,7 +4,7 @@ package hypermedia import ( "fmt" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jtacoma/uritemplates" + "github.com/jtacoma/uritemplates" "net/url" "reflect" ) diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia_test.go b/vendor/github.com/jingweno/go-sawyer/hypermedia/hypermedia_test.go similarity index 97% rename from Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia_test.go rename to vendor/github.com/jingweno/go-sawyer/hypermedia/hypermedia_test.go index da293b5..afbded6 100644 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia/hypermedia_test.go +++ b/vendor/github.com/jingweno/go-sawyer/hypermedia/hypermedia_test.go @@ -3,7 +3,7 @@ package hypermedia import ( "bytes" "encoding/json" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" "testing" ) diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder.go b/vendor/github.com/jingweno/go-sawyer/mediaheader/decoder.go similarity index 92% rename from Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder.go rename to vendor/github.com/jingweno/go-sawyer/mediaheader/decoder.go index be7c01e..6d5c373 100644 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder.go +++ b/vendor/github.com/jingweno/go-sawyer/mediaheader/decoder.go @@ -1,7 +1,7 @@ package mediaheader import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/hypermedia" "net/http" "net/url" "strings" diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder_test.go b/vendor/github.com/jingweno/go-sawyer/mediaheader/decoder_test.go similarity index 87% rename from Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder_test.go rename to vendor/github.com/jingweno/go-sawyer/mediaheader/decoder_test.go index 7264a0f..ba0a32b 100644 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader/decoder_test.go +++ b/vendor/github.com/jingweno/go-sawyer/mediaheader/decoder_test.go @@ -1,7 +1,7 @@ package mediaheader import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" "net/http" "testing" ) diff --git a/vendor/github.com/jingweno/go-sawyer/mediaheader/mediaheader.go b/vendor/github.com/jingweno/go-sawyer/mediaheader/mediaheader.go new file mode 100644 index 0000000..a23c4bd --- /dev/null +++ b/vendor/github.com/jingweno/go-sawyer/mediaheader/mediaheader.go @@ -0,0 +1,9 @@ +package mediaheader + +import ( + "github.com/jingweno/go-sawyer/hypermedia" +) + +type MediaHeader struct { + Relations hypermedia.Relations +} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode.go b/vendor/github.com/jingweno/go-sawyer/mediatype/decode.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode.go rename to vendor/github.com/jingweno/go-sawyer/mediatype/decode.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode_test.go b/vendor/github.com/jingweno/go-sawyer/mediatype/decode_test.go similarity index 94% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode_test.go rename to vendor/github.com/jingweno/go-sawyer/mediatype/decode_test.go index 010a0d2..895d811 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/decode_test.go +++ b/vendor/github.com/jingweno/go-sawyer/mediatype/decode_test.go @@ -2,7 +2,7 @@ package mediatype import ( "bytes" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" "io" "io/ioutil" "strings" diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode.go b/vendor/github.com/jingweno/go-sawyer/mediatype/encode.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode.go rename to vendor/github.com/jingweno/go-sawyer/mediatype/encode.go diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode_test.go b/vendor/github.com/jingweno/go-sawyer/mediatype/encode_test.go similarity index 94% rename from Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode_test.go rename to vendor/github.com/jingweno/go-sawyer/mediatype/encode_test.go index 57b6367..ffa7d8a 100644 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/encode_test.go +++ b/vendor/github.com/jingweno/go-sawyer/mediatype/encode_test.go @@ -1,7 +1,7 @@ package mediatype import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" "io" "strings" "testing" diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype.go b/vendor/github.com/jingweno/go-sawyer/mediatype/mediatype.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype.go rename to vendor/github.com/jingweno/go-sawyer/mediatype/mediatype.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype_test.go b/vendor/github.com/jingweno/go-sawyer/mediatype/mediatype_test.go similarity index 97% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype_test.go rename to vendor/github.com/jingweno/go-sawyer/mediatype/mediatype_test.go index a972c8f..8ed1bbc 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype/mediatype_test.go +++ b/vendor/github.com/jingweno/go-sawyer/mediatype/mediatype_test.go @@ -1,7 +1,7 @@ package mediatype import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" "testing" ) diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/request.go b/vendor/github.com/jingweno/go-sawyer/request.go similarity index 91% rename from Godeps/_workspace/src/github.com/jingweno/go-sawyer/request.go rename to vendor/github.com/jingweno/go-sawyer/request.go index 6cf228f..80889f2 100644 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/request.go +++ b/vendor/github.com/jingweno/go-sawyer/request.go @@ -5,8 +5,8 @@ import ( "net/http" "net/url" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype" + "github.com/jingweno/go-sawyer/mediaheader" + "github.com/jingweno/go-sawyer/mediatype" ) type Request struct { diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/request_test.go b/vendor/github.com/jingweno/go-sawyer/request_test.go similarity index 97% rename from Godeps/_workspace/src/github.com/jingweno/go-sawyer/request_test.go rename to vendor/github.com/jingweno/go-sawyer/request_test.go index b0c17a5..d352f5f 100644 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/request_test.go +++ b/vendor/github.com/jingweno/go-sawyer/request_test.go @@ -7,8 +7,8 @@ import ( "strings" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype" + "github.com/bmizerany/assert" + "github.com/jingweno/go-sawyer/mediatype" ) func TestSuccessfulGet(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/response.go b/vendor/github.com/jingweno/go-sawyer/response.go similarity index 89% rename from Godeps/_workspace/src/github.com/jingweno/go-sawyer/response.go rename to vendor/github.com/jingweno/go-sawyer/response.go index ec7ac4c..dc19d9e 100644 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/response.go +++ b/vendor/github.com/jingweno/go-sawyer/response.go @@ -4,8 +4,8 @@ import ( "errors" "net/http" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype" + "github.com/jingweno/go-sawyer/mediaheader" + "github.com/jingweno/go-sawyer/mediatype" ) type Response struct { diff --git a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer.go b/vendor/github.com/jingweno/go-sawyer/sawyer.go similarity index 95% rename from Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer.go rename to vendor/github.com/jingweno/go-sawyer/sawyer.go index 2217bb4..9e87257 100644 --- a/Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer.go +++ b/vendor/github.com/jingweno/go-sawyer/sawyer.go @@ -7,7 +7,7 @@ import ( "net/url" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype" + "github.com/jingweno/go-sawyer/mediatype" ) // The default httpClient used if one isn't specified. diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer_test.go b/vendor/github.com/jingweno/go-sawyer/sawyer_test.go similarity index 94% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer_test.go rename to vendor/github.com/jingweno/go-sawyer/sawyer_test.go index 27aa1ff..e9e8dd3 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jingweno/go-sawyer/sawyer_test.go +++ b/vendor/github.com/jingweno/go-sawyer/sawyer_test.go @@ -4,8 +4,8 @@ import ( "net/url" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/bmizerany/assert" + "github.com/jingweno/go-sawyer/hypermedia" ) var endpoints = map[string]map[string]string{ diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/LICENSE b/vendor/github.com/jtacoma/uritemplates/LICENSE similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/LICENSE rename to vendor/github.com/jtacoma/uritemplates/LICENSE diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/README.md b/vendor/github.com/jtacoma/uritemplates/README.md similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/README.md rename to vendor/github.com/jtacoma/uritemplates/README.md diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/uritemplates.go b/vendor/github.com/jtacoma/uritemplates/uritemplates.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/uritemplates.go rename to vendor/github.com/jtacoma/uritemplates/uritemplates.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/uritemplates_test.go b/vendor/github.com/jtacoma/uritemplates/uritemplates_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/jtacoma/uritemplates/uritemplates_test.go rename to vendor/github.com/jtacoma/uritemplates/uritemplates_test.go diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/License b/vendor/github.com/kr/binarydist/License similarity index 100% rename from Godeps/_workspace/src/github.com/kr/binarydist/License rename to vendor/github.com/kr/binarydist/License diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/Readme.md b/vendor/github.com/kr/binarydist/Readme.md similarity index 100% rename from Godeps/_workspace/src/github.com/kr/binarydist/Readme.md rename to vendor/github.com/kr/binarydist/Readme.md diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/bzip2.go b/vendor/github.com/kr/binarydist/bzip2.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/binarydist/bzip2.go rename to vendor/github.com/kr/binarydist/bzip2.go diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/common_test.go b/vendor/github.com/kr/binarydist/common_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/binarydist/common_test.go rename to vendor/github.com/kr/binarydist/common_test.go diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/diff.go b/vendor/github.com/kr/binarydist/diff.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/binarydist/diff.go rename to vendor/github.com/kr/binarydist/diff.go diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/diff_test.go b/vendor/github.com/kr/binarydist/diff_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/binarydist/diff_test.go rename to vendor/github.com/kr/binarydist/diff_test.go diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/doc.go b/vendor/github.com/kr/binarydist/doc.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/binarydist/doc.go rename to vendor/github.com/kr/binarydist/doc.go diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/encoding.go b/vendor/github.com/kr/binarydist/encoding.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/binarydist/encoding.go rename to vendor/github.com/kr/binarydist/encoding.go diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/patch.go b/vendor/github.com/kr/binarydist/patch.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/binarydist/patch.go rename to vendor/github.com/kr/binarydist/patch.go diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/patch_test.go b/vendor/github.com/kr/binarydist/patch_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/binarydist/patch_test.go rename to vendor/github.com/kr/binarydist/patch_test.go diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/seek.go b/vendor/github.com/kr/binarydist/seek.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/binarydist/seek.go rename to vendor/github.com/kr/binarydist/seek.go diff --git a/Godeps/_workspace/src/github.com/kr/binarydist/sort_test.go b/vendor/github.com/kr/binarydist/sort_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/kr/binarydist/sort_test.go rename to vendor/github.com/kr/binarydist/sort_test.go diff --git a/vendor/github.com/kr/pretty/License b/vendor/github.com/kr/pretty/License new file mode 100644 index 0000000..05c783c --- /dev/null +++ b/vendor/github.com/kr/pretty/License @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright 2012 Keith Rarick + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/kr/pretty/Readme b/vendor/github.com/kr/pretty/Readme new file mode 100644 index 0000000..c589fc6 --- /dev/null +++ b/vendor/github.com/kr/pretty/Readme @@ -0,0 +1,9 @@ +package pretty + + import "github.com/kr/pretty" + + Package pretty provides pretty-printing for Go values. + +Documentation + + http://godoc.org/github.com/kr/pretty diff --git a/vendor/github.com/kr/pretty/diff.go b/vendor/github.com/kr/pretty/diff.go new file mode 100644 index 0000000..8fe8e24 --- /dev/null +++ b/vendor/github.com/kr/pretty/diff.go @@ -0,0 +1,158 @@ +package pretty + +import ( + "fmt" + "io" + "reflect" +) + +type sbuf []string + +func (s *sbuf) Write(b []byte) (int, error) { + *s = append(*s, string(b)) + return len(b), nil +} + +// Diff returns a slice where each element describes +// a difference between a and b. +func Diff(a, b interface{}) (desc []string) { + Fdiff((*sbuf)(&desc), a, b) + return desc +} + +// Fdiff writes to w a description of the differences between a and b. +func Fdiff(w io.Writer, a, b interface{}) { + diffWriter{w: w}.diff(reflect.ValueOf(a), reflect.ValueOf(b)) +} + +type diffWriter struct { + w io.Writer + l string // label +} + +func (w diffWriter) printf(f string, a ...interface{}) { + var l string + if w.l != "" { + l = w.l + ": " + } + fmt.Fprintf(w.w, l+f, a...) +} + +func (w diffWriter) diff(av, bv reflect.Value) { + if !av.IsValid() && bv.IsValid() { + w.printf("nil != %#v", bv.Interface()) + return + } + if av.IsValid() && !bv.IsValid() { + w.printf("%#v != nil", av.Interface()) + return + } + if !av.IsValid() && !bv.IsValid() { + return + } + + at := av.Type() + bt := bv.Type() + if at != bt { + w.printf("%v != %v", at, bt) + return + } + + // numeric types, including bool + if at.Kind() < reflect.Array { + a, b := av.Interface(), bv.Interface() + if a != b { + w.printf("%#v != %#v", a, b) + } + return + } + + switch at.Kind() { + case reflect.String: + a, b := av.Interface(), bv.Interface() + if a != b { + w.printf("%q != %q", a, b) + } + case reflect.Ptr: + switch { + case av.IsNil() && !bv.IsNil(): + w.printf("nil != %v", bv.Interface()) + case !av.IsNil() && bv.IsNil(): + w.printf("%v != nil", av.Interface()) + case !av.IsNil() && !bv.IsNil(): + w.diff(av.Elem(), bv.Elem()) + } + case reflect.Struct: + for i := 0; i < av.NumField(); i++ { + w.relabel(at.Field(i).Name).diff(av.Field(i), bv.Field(i)) + } + case reflect.Slice: + lenA := av.Len() + lenB := bv.Len() + if lenA != lenB { + w.printf("%s[%d] != %s[%d]", av.Type(), lenA, bv.Type(), lenB) + break + } + for i := 0; i < lenA; i++ { + w.relabel(fmt.Sprintf("[%d]", i)).diff(av.Index(i), bv.Index(i)) + } + case reflect.Map: + ak, both, bk := keyDiff(av.MapKeys(), bv.MapKeys()) + for _, k := range ak { + w := w.relabel(fmt.Sprintf("[%#v]", k.Interface())) + w.printf("%q != (missing)", av.MapIndex(k)) + } + for _, k := range both { + w := w.relabel(fmt.Sprintf("[%#v]", k.Interface())) + w.diff(av.MapIndex(k), bv.MapIndex(k)) + } + for _, k := range bk { + w := w.relabel(fmt.Sprintf("[%#v]", k.Interface())) + w.printf("(missing) != %q", bv.MapIndex(k)) + } + case reflect.Interface: + w.diff(reflect.ValueOf(av.Interface()), reflect.ValueOf(bv.Interface())) + default: + if !reflect.DeepEqual(av.Interface(), bv.Interface()) { + w.printf("%# v != %# v", Formatter(av.Interface()), Formatter(bv.Interface())) + } + } +} + +func (d diffWriter) relabel(name string) (d1 diffWriter) { + d1 = d + if d.l != "" && name[0] != '[' { + d1.l += "." + } + d1.l += name + return d1 +} + +func keyDiff(a, b []reflect.Value) (ak, both, bk []reflect.Value) { + for _, av := range a { + inBoth := false + for _, bv := range b { + if reflect.DeepEqual(av.Interface(), bv.Interface()) { + inBoth = true + both = append(both, av) + break + } + } + if !inBoth { + ak = append(ak, av) + } + } + for _, bv := range b { + inBoth := false + for _, av := range a { + if reflect.DeepEqual(av.Interface(), bv.Interface()) { + inBoth = true + break + } + } + if !inBoth { + bk = append(bk, bv) + } + } + return +} diff --git a/vendor/github.com/kr/pretty/diff_test.go b/vendor/github.com/kr/pretty/diff_test.go new file mode 100644 index 0000000..3c388f1 --- /dev/null +++ b/vendor/github.com/kr/pretty/diff_test.go @@ -0,0 +1,74 @@ +package pretty + +import ( + "testing" +) + +type difftest struct { + a interface{} + b interface{} + exp []string +} + +type S struct { + A int + S *S + I interface{} + C []int +} + +var diffs = []difftest{ + {a: nil, b: nil}, + {a: S{A: 1}, b: S{A: 1}}, + + {0, "", []string{`int != string`}}, + {0, 1, []string{`0 != 1`}}, + {S{}, new(S), []string{`pretty.S != *pretty.S`}}, + {"a", "b", []string{`"a" != "b"`}}, + {S{}, S{A: 1}, []string{`A: 0 != 1`}}, + {new(S), &S{A: 1}, []string{`A: 0 != 1`}}, + {S{S: new(S)}, S{S: &S{A: 1}}, []string{`S.A: 0 != 1`}}, + {S{}, S{I: 0}, []string{`I: nil != 0`}}, + {S{I: 1}, S{I: "x"}, []string{`I: int != string`}}, + {S{}, S{C: []int{1}}, []string{`C: []int[0] != []int[1]`}}, + {S{C: []int{}}, S{C: []int{1}}, []string{`C: []int[0] != []int[1]`}}, + {S{C: []int{1, 2, 3}}, S{C: []int{1, 2, 4}}, []string{`C[2]: 3 != 4`}}, + {S{}, S{A: 1, S: new(S)}, []string{`A: 0 != 1`, `S: nil != &{0 []}`}}, +} + +func TestDiff(t *testing.T) { + for _, tt := range diffs { + got := Diff(tt.a, tt.b) + eq := len(got) == len(tt.exp) + if eq { + for i := range got { + eq = eq && got[i] == tt.exp[i] + } + } + if !eq { + t.Errorf("diffing % #v", tt.a) + t.Errorf("with % #v", tt.b) + diffdiff(t, got, tt.exp) + continue + } + } +} + +func diffdiff(t *testing.T, got, exp []string) { + minus(t, "unexpected:", got, exp) + minus(t, "missing:", exp, got) +} + +func minus(t *testing.T, s string, a, b []string) { + var i, j int + for i = 0; i < len(a); i++ { + for j = 0; j < len(b); j++ { + if a[i] == b[j] { + break + } + } + if j == len(b) { + t.Error(s, a[i]) + } + } +} diff --git a/vendor/github.com/kr/pretty/example_test.go b/vendor/github.com/kr/pretty/example_test.go new file mode 100644 index 0000000..ecf40f3 --- /dev/null +++ b/vendor/github.com/kr/pretty/example_test.go @@ -0,0 +1,20 @@ +package pretty_test + +import ( + "fmt" + "github.com/kr/pretty" +) + +func Example() { + type myType struct { + a, b int + } + var x = []myType{{1, 2}, {3, 4}, {5, 6}} + fmt.Printf("%# v", pretty.Formatter(x)) + // output: + // []pretty_test.myType{ + // {a:1, b:2}, + // {a:3, b:4}, + // {a:5, b:6}, + // } +} diff --git a/vendor/github.com/kr/pretty/formatter.go b/vendor/github.com/kr/pretty/formatter.go new file mode 100644 index 0000000..8dacda2 --- /dev/null +++ b/vendor/github.com/kr/pretty/formatter.go @@ -0,0 +1,337 @@ +package pretty + +import ( + "fmt" + "io" + "reflect" + "strconv" + "text/tabwriter" + + "github.com/kr/text" +) + +const ( + limit = 50 +) + +type formatter struct { + x interface{} + force bool + quote bool +} + +// Formatter makes a wrapper, f, that will format x as go source with line +// breaks and tabs. Object f responds to the "%v" formatting verb when both the +// "#" and " " (space) flags are set, for example: +// +// fmt.Sprintf("%# v", Formatter(x)) +// +// If one of these two flags is not set, or any other verb is used, f will +// format x according to the usual rules of package fmt. +// In particular, if x satisfies fmt.Formatter, then x.Format will be called. +func Formatter(x interface{}) (f fmt.Formatter) { + return formatter{x: x, quote: true} +} + +func (fo formatter) String() string { + return fmt.Sprint(fo.x) // unwrap it +} + +func (fo formatter) passThrough(f fmt.State, c rune) { + s := "%" + for i := 0; i < 128; i++ { + if f.Flag(i) { + s += string(i) + } + } + if w, ok := f.Width(); ok { + s += fmt.Sprintf("%d", w) + } + if p, ok := f.Precision(); ok { + s += fmt.Sprintf(".%d", p) + } + s += string(c) + fmt.Fprintf(f, s, fo.x) +} + +func (fo formatter) Format(f fmt.State, c rune) { + if fo.force || c == 'v' && f.Flag('#') && f.Flag(' ') { + w := tabwriter.NewWriter(f, 4, 4, 1, ' ', 0) + p := &printer{tw: w, Writer: w, visited: make(map[visit]int)} + p.printValue(reflect.ValueOf(fo.x), true, fo.quote) + w.Flush() + return + } + fo.passThrough(f, c) +} + +type printer struct { + io.Writer + tw *tabwriter.Writer + visited map[visit]int + depth int +} + +func (p *printer) indent() *printer { + q := *p + q.tw = tabwriter.NewWriter(p.Writer, 4, 4, 1, ' ', 0) + q.Writer = text.NewIndentWriter(q.tw, []byte{'\t'}) + return &q +} + +func (p *printer) printInline(v reflect.Value, x interface{}, showType bool) { + if showType { + io.WriteString(p, v.Type().String()) + fmt.Fprintf(p, "(%#v)", x) + } else { + fmt.Fprintf(p, "%#v", x) + } +} + +// printValue must keep track of already-printed pointer values to avoid +// infinite recursion. +type visit struct { + v uintptr + typ reflect.Type +} + +func (p *printer) printValue(v reflect.Value, showType, quote bool) { + if p.depth > 10 { + io.WriteString(p, "!%v(DEPTH EXCEEDED)") + return + } + + switch v.Kind() { + case reflect.Bool: + p.printInline(v, v.Bool(), showType) + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + p.printInline(v, v.Int(), showType) + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + p.printInline(v, v.Uint(), showType) + case reflect.Float32, reflect.Float64: + p.printInline(v, v.Float(), showType) + case reflect.Complex64, reflect.Complex128: + fmt.Fprintf(p, "%#v", v.Complex()) + case reflect.String: + p.fmtString(v.String(), quote) + case reflect.Map: + t := v.Type() + if showType { + io.WriteString(p, t.String()) + } + writeByte(p, '{') + if nonzero(v) { + expand := !canInline(v.Type()) + pp := p + if expand { + writeByte(p, '\n') + pp = p.indent() + } + keys := v.MapKeys() + for i := 0; i < v.Len(); i++ { + showTypeInStruct := true + k := keys[i] + mv := v.MapIndex(k) + pp.printValue(k, false, true) + writeByte(pp, ':') + if expand { + writeByte(pp, '\t') + } + showTypeInStruct = t.Elem().Kind() == reflect.Interface + pp.printValue(mv, showTypeInStruct, true) + if expand { + io.WriteString(pp, ",\n") + } else if i < v.Len()-1 { + io.WriteString(pp, ", ") + } + } + if expand { + pp.tw.Flush() + } + } + writeByte(p, '}') + case reflect.Struct: + t := v.Type() + if v.CanAddr() { + addr := v.UnsafeAddr() + vis := visit{addr, t} + if vd, ok := p.visited[vis]; ok && vd < p.depth { + p.fmtString(t.String()+"{(CYCLIC REFERENCE)}", false) + break // don't print v again + } + p.visited[vis] = p.depth + } + + if showType { + io.WriteString(p, t.String()) + } + writeByte(p, '{') + if nonzero(v) { + expand := !canInline(v.Type()) + pp := p + if expand { + writeByte(p, '\n') + pp = p.indent() + } + for i := 0; i < v.NumField(); i++ { + showTypeInStruct := true + if f := t.Field(i); f.Name != "" { + io.WriteString(pp, f.Name) + writeByte(pp, ':') + if expand { + writeByte(pp, '\t') + } + showTypeInStruct = labelType(f.Type) + } + pp.printValue(getField(v, i), showTypeInStruct, true) + if expand { + io.WriteString(pp, ",\n") + } else if i < v.NumField()-1 { + io.WriteString(pp, ", ") + } + } + if expand { + pp.tw.Flush() + } + } + writeByte(p, '}') + case reflect.Interface: + switch e := v.Elem(); { + case e.Kind() == reflect.Invalid: + io.WriteString(p, "nil") + case e.IsValid(): + pp := *p + pp.depth++ + pp.printValue(e, showType, true) + default: + io.WriteString(p, v.Type().String()) + io.WriteString(p, "(nil)") + } + case reflect.Array, reflect.Slice: + t := v.Type() + if showType { + io.WriteString(p, t.String()) + } + if v.Kind() == reflect.Slice && v.IsNil() && showType { + io.WriteString(p, "(nil)") + break + } + if v.Kind() == reflect.Slice && v.IsNil() { + io.WriteString(p, "nil") + break + } + writeByte(p, '{') + expand := !canInline(v.Type()) + pp := p + if expand { + writeByte(p, '\n') + pp = p.indent() + } + for i := 0; i < v.Len(); i++ { + showTypeInSlice := t.Elem().Kind() == reflect.Interface + pp.printValue(v.Index(i), showTypeInSlice, true) + if expand { + io.WriteString(pp, ",\n") + } else if i < v.Len()-1 { + io.WriteString(pp, ", ") + } + } + if expand { + pp.tw.Flush() + } + writeByte(p, '}') + case reflect.Ptr: + e := v.Elem() + if !e.IsValid() { + writeByte(p, '(') + io.WriteString(p, v.Type().String()) + io.WriteString(p, ")(nil)") + } else { + pp := *p + pp.depth++ + writeByte(pp, '&') + pp.printValue(e, true, true) + } + case reflect.Chan: + x := v.Pointer() + if showType { + writeByte(p, '(') + io.WriteString(p, v.Type().String()) + fmt.Fprintf(p, ")(%#v)", x) + } else { + fmt.Fprintf(p, "%#v", x) + } + case reflect.Func: + io.WriteString(p, v.Type().String()) + io.WriteString(p, " {...}") + case reflect.UnsafePointer: + p.printInline(v, v.Pointer(), showType) + case reflect.Invalid: + io.WriteString(p, "nil") + } +} + +func canInline(t reflect.Type) bool { + switch t.Kind() { + case reflect.Map: + return !canExpand(t.Elem()) + case reflect.Struct: + for i := 0; i < t.NumField(); i++ { + if canExpand(t.Field(i).Type) { + return false + } + } + return true + case reflect.Interface: + return false + case reflect.Array, reflect.Slice: + return !canExpand(t.Elem()) + case reflect.Ptr: + return false + case reflect.Chan, reflect.Func, reflect.UnsafePointer: + return false + } + return true +} + +func canExpand(t reflect.Type) bool { + switch t.Kind() { + case reflect.Map, reflect.Struct, + reflect.Interface, reflect.Array, reflect.Slice, + reflect.Ptr: + return true + } + return false +} + +func labelType(t reflect.Type) bool { + switch t.Kind() { + case reflect.Interface, reflect.Struct: + return true + } + return false +} + +func (p *printer) fmtString(s string, quote bool) { + if quote { + s = strconv.Quote(s) + } + io.WriteString(p, s) +} + +func tryDeepEqual(a, b interface{}) bool { + defer func() { recover() }() + return reflect.DeepEqual(a, b) +} + +func writeByte(w io.Writer, b byte) { + w.Write([]byte{b}) +} + +func getField(v reflect.Value, i int) reflect.Value { + val := v.Field(i) + if val.Kind() == reflect.Interface && !val.IsNil() { + val = val.Elem() + } + return val +} diff --git a/vendor/github.com/kr/pretty/formatter_test.go b/vendor/github.com/kr/pretty/formatter_test.go new file mode 100644 index 0000000..1b55ae5 --- /dev/null +++ b/vendor/github.com/kr/pretty/formatter_test.go @@ -0,0 +1,261 @@ +package pretty + +import ( + "fmt" + "io" + "strings" + "testing" + "unsafe" +) + +type test struct { + v interface{} + s string +} + +type LongStructTypeName struct { + longFieldName interface{} + otherLongFieldName interface{} +} + +type SA struct { + t *T + v T +} + +type T struct { + x, y int +} + +type F int + +func (f F) Format(s fmt.State, c rune) { + fmt.Fprintf(s, "F(%d)", int(f)) +} + +var long = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" + +var gosyntax = []test{ + {nil, `nil`}, + {"", `""`}, + {"a", `"a"`}, + {1, "int(1)"}, + {1.0, "float64(1)"}, + {[]int(nil), "[]int(nil)"}, + {[0]int{}, "[0]int{}"}, + {complex(1, 0), "(1+0i)"}, + //{make(chan int), "(chan int)(0x1234)"}, + {unsafe.Pointer(uintptr(1)), "unsafe.Pointer(0x1)"}, + {func(int) {}, "func(int) {...}"}, + {map[int]int{1: 1}, "map[int]int{1:1}"}, + {int32(1), "int32(1)"}, + {io.EOF, `&errors.errorString{s:"EOF"}`}, + {[]string{"a"}, `[]string{"a"}`}, + { + []string{long}, + `[]string{"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"}`, + }, + {F(5), "pretty.F(5)"}, + { + SA{&T{1, 2}, T{3, 4}}, + `pretty.SA{ + t: &pretty.T{x:1, y:2}, + v: pretty.T{x:3, y:4}, +}`, + }, + { + map[int][]byte{1: []byte{}}, + `map[int][]uint8{ + 1: {}, +}`, + }, + { + map[int]T{1: T{}}, + `map[int]pretty.T{ + 1: {}, +}`, + }, + { + long, + `"abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"`, + }, + { + LongStructTypeName{ + longFieldName: LongStructTypeName{}, + otherLongFieldName: long, + }, + `pretty.LongStructTypeName{ + longFieldName: pretty.LongStructTypeName{}, + otherLongFieldName: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789", +}`, + }, + { + &LongStructTypeName{ + longFieldName: &LongStructTypeName{}, + otherLongFieldName: (*LongStructTypeName)(nil), + }, + `&pretty.LongStructTypeName{ + longFieldName: &pretty.LongStructTypeName{}, + otherLongFieldName: (*pretty.LongStructTypeName)(nil), +}`, + }, + { + []LongStructTypeName{ + {nil, nil}, + {3, 3}, + {long, nil}, + }, + `[]pretty.LongStructTypeName{ + {}, + { + longFieldName: int(3), + otherLongFieldName: int(3), + }, + { + longFieldName: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789", + otherLongFieldName: nil, + }, +}`, + }, + { + []interface{}{ + LongStructTypeName{nil, nil}, + []byte{1, 2, 3}, + T{3, 4}, + LongStructTypeName{long, nil}, + }, + `[]interface {}{ + pretty.LongStructTypeName{}, + []uint8{0x1, 0x2, 0x3}, + pretty.T{x:3, y:4}, + pretty.LongStructTypeName{ + longFieldName: "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789", + otherLongFieldName: nil, + }, +}`, + }, +} + +func TestGoSyntax(t *testing.T) { + for _, tt := range gosyntax { + s := fmt.Sprintf("%# v", Formatter(tt.v)) + if tt.s != s { + t.Errorf("expected %q", tt.s) + t.Errorf("got %q", s) + t.Errorf("expraw\n%s", tt.s) + t.Errorf("gotraw\n%s", s) + } + } +} + +type I struct { + i int + R interface{} +} + +func (i *I) I() *I { return i.R.(*I) } + +func TestCycle(t *testing.T) { + type A struct{ *A } + v := &A{} + v.A = v + + // panics from stack overflow without cycle detection + t.Logf("Example cycle:\n%# v", Formatter(v)) + + p := &A{} + s := fmt.Sprintf("%# v", Formatter([]*A{p, p})) + if strings.Contains(s, "CYCLIC") { + t.Errorf("Repeated address detected as cyclic reference:\n%s", s) + } + + type R struct { + i int + *R + } + r := &R{ + i: 1, + R: &R{ + i: 2, + R: &R{ + i: 3, + }, + }, + } + r.R.R.R = r + t.Logf("Example longer cycle:\n%# v", Formatter(r)) + + r = &R{ + i: 1, + R: &R{ + i: 2, + R: &R{ + i: 3, + R: &R{ + i: 4, + R: &R{ + i: 5, + R: &R{ + i: 6, + R: &R{ + i: 7, + R: &R{ + i: 8, + R: &R{ + i: 9, + R: &R{ + i: 10, + R: &R{ + i: 11, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + // here be pirates + r.R.R.R.R.R.R.R.R.R.R.R = r + t.Logf("Example very long cycle:\n%# v", Formatter(r)) + + i := &I{ + i: 1, + R: &I{ + i: 2, + R: &I{ + i: 3, + R: &I{ + i: 4, + R: &I{ + i: 5, + R: &I{ + i: 6, + R: &I{ + i: 7, + R: &I{ + i: 8, + R: &I{ + i: 9, + R: &I{ + i: 10, + R: &I{ + i: 11, + }, + }, + }, + }, + }, + }, + }, + }, + }, + }, + } + iv := i.I().I().I().I().I().I().I().I().I().I() + *iv = *i + t.Logf("Example long interface cycle:\n%# v", Formatter(i)) +} diff --git a/vendor/github.com/kr/pretty/pretty.go b/vendor/github.com/kr/pretty/pretty.go new file mode 100644 index 0000000..d3df868 --- /dev/null +++ b/vendor/github.com/kr/pretty/pretty.go @@ -0,0 +1,98 @@ +// Package pretty provides pretty-printing for Go values. This is +// useful during debugging, to avoid wrapping long output lines in +// the terminal. +// +// It provides a function, Formatter, that can be used with any +// function that accepts a format string. It also provides +// convenience wrappers for functions in packages fmt and log. +package pretty + +import ( + "fmt" + "io" + "log" +) + +// Errorf is a convenience wrapper for fmt.Errorf. +// +// Calling Errorf(f, x, y) is equivalent to +// fmt.Errorf(f, Formatter(x), Formatter(y)). +func Errorf(format string, a ...interface{}) error { + return fmt.Errorf(format, wrap(a, false)...) +} + +// Fprintf is a convenience wrapper for fmt.Fprintf. +// +// Calling Fprintf(w, f, x, y) is equivalent to +// fmt.Fprintf(w, f, Formatter(x), Formatter(y)). +func Fprintf(w io.Writer, format string, a ...interface{}) (n int, error error) { + return fmt.Fprintf(w, format, wrap(a, false)...) +} + +// Log is a convenience wrapper for log.Printf. +// +// Calling Log(x, y) is equivalent to +// log.Print(Formatter(x), Formatter(y)), but each operand is +// formatted with "%# v". +func Log(a ...interface{}) { + log.Print(wrap(a, true)...) +} + +// Logf is a convenience wrapper for log.Printf. +// +// Calling Logf(f, x, y) is equivalent to +// log.Printf(f, Formatter(x), Formatter(y)). +func Logf(format string, a ...interface{}) { + log.Printf(format, wrap(a, false)...) +} + +// Logln is a convenience wrapper for log.Printf. +// +// Calling Logln(x, y) is equivalent to +// log.Println(Formatter(x), Formatter(y)), but each operand is +// formatted with "%# v". +func Logln(a ...interface{}) { + log.Println(wrap(a, true)...) +} + +// Print pretty-prints its operands and writes to standard output. +// +// Calling Print(x, y) is equivalent to +// fmt.Print(Formatter(x), Formatter(y)), but each operand is +// formatted with "%# v". +func Print(a ...interface{}) (n int, errno error) { + return fmt.Print(wrap(a, true)...) +} + +// Printf is a convenience wrapper for fmt.Printf. +// +// Calling Printf(f, x, y) is equivalent to +// fmt.Printf(f, Formatter(x), Formatter(y)). +func Printf(format string, a ...interface{}) (n int, errno error) { + return fmt.Printf(format, wrap(a, false)...) +} + +// Println pretty-prints its operands and writes to standard output. +// +// Calling Print(x, y) is equivalent to +// fmt.Println(Formatter(x), Formatter(y)), but each operand is +// formatted with "%# v". +func Println(a ...interface{}) (n int, errno error) { + return fmt.Println(wrap(a, true)...) +} + +// Sprintf is a convenience wrapper for fmt.Sprintf. +// +// Calling Sprintf(f, x, y) is equivalent to +// fmt.Sprintf(f, Formatter(x), Formatter(y)). +func Sprintf(format string, a ...interface{}) string { + return fmt.Sprintf(format, wrap(a, false)...) +} + +func wrap(a []interface{}, force bool) []interface{} { + w := make([]interface{}, len(a)) + for i, x := range a { + w[i] = formatter{x: x, force: force} + } + return w +} diff --git a/vendor/github.com/kr/pretty/zero.go b/vendor/github.com/kr/pretty/zero.go new file mode 100644 index 0000000..abb5b6f --- /dev/null +++ b/vendor/github.com/kr/pretty/zero.go @@ -0,0 +1,41 @@ +package pretty + +import ( + "reflect" +) + +func nonzero(v reflect.Value) bool { + switch v.Kind() { + case reflect.Bool: + return v.Bool() + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + return v.Int() != 0 + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr: + return v.Uint() != 0 + case reflect.Float32, reflect.Float64: + return v.Float() != 0 + case reflect.Complex64, reflect.Complex128: + return v.Complex() != complex(0, 0) + case reflect.String: + return v.String() != "" + case reflect.Struct: + for i := 0; i < v.NumField(); i++ { + if nonzero(getField(v, i)) { + return true + } + } + return false + case reflect.Array: + for i := 0; i < v.Len(); i++ { + if nonzero(v.Index(i)) { + return true + } + } + return false + case reflect.Map, reflect.Interface, reflect.Slice, reflect.Ptr, reflect.Chan, reflect.Func: + return !v.IsNil() + case reflect.UnsafePointer: + return v.Pointer() != 0 + } + return true +} diff --git a/vendor/github.com/kr/text/License b/vendor/github.com/kr/text/License new file mode 100644 index 0000000..480a328 --- /dev/null +++ b/vendor/github.com/kr/text/License @@ -0,0 +1,19 @@ +Copyright 2012 Keith Rarick + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/kr/text/Readme b/vendor/github.com/kr/text/Readme new file mode 100644 index 0000000..7e6e7c0 --- /dev/null +++ b/vendor/github.com/kr/text/Readme @@ -0,0 +1,3 @@ +This is a Go package for manipulating paragraphs of text. + +See http://go.pkgdoc.org/github.com/kr/text for full documentation. diff --git a/vendor/github.com/kr/text/doc.go b/vendor/github.com/kr/text/doc.go new file mode 100644 index 0000000..cf4c198 --- /dev/null +++ b/vendor/github.com/kr/text/doc.go @@ -0,0 +1,3 @@ +// Package text provides rudimentary functions for manipulating text in +// paragraphs. +package text diff --git a/vendor/github.com/kr/text/indent.go b/vendor/github.com/kr/text/indent.go new file mode 100644 index 0000000..4ebac45 --- /dev/null +++ b/vendor/github.com/kr/text/indent.go @@ -0,0 +1,74 @@ +package text + +import ( + "io" +) + +// Indent inserts prefix at the beginning of each non-empty line of s. The +// end-of-line marker is NL. +func Indent(s, prefix string) string { + return string(IndentBytes([]byte(s), []byte(prefix))) +} + +// IndentBytes inserts prefix at the beginning of each non-empty line of b. +// The end-of-line marker is NL. +func IndentBytes(b, prefix []byte) []byte { + var res []byte + bol := true + for _, c := range b { + if bol && c != '\n' { + res = append(res, prefix...) + } + res = append(res, c) + bol = c == '\n' + } + return res +} + +// Writer indents each line of its input. +type indentWriter struct { + w io.Writer + bol bool + pre [][]byte + sel int + off int +} + +// NewIndentWriter makes a new write filter that indents the input +// lines. Each line is prefixed in order with the corresponding +// element of pre. If there are more lines than elements, the last +// element of pre is repeated for each subsequent line. +func NewIndentWriter(w io.Writer, pre ...[]byte) io.Writer { + return &indentWriter{ + w: w, + pre: pre, + bol: true, + } +} + +// The only errors returned are from the underlying indentWriter. +func (w *indentWriter) Write(p []byte) (n int, err error) { + for _, c := range p { + if w.bol { + var i int + i, err = w.w.Write(w.pre[w.sel][w.off:]) + w.off += i + if err != nil { + return n, err + } + } + _, err = w.w.Write([]byte{c}) + if err != nil { + return n, err + } + n++ + w.bol = c == '\n' + if w.bol { + w.off = 0 + if w.sel < len(w.pre)-1 { + w.sel++ + } + } + } + return n, nil +} diff --git a/vendor/github.com/kr/text/indent_test.go b/vendor/github.com/kr/text/indent_test.go new file mode 100644 index 0000000..5c723ee --- /dev/null +++ b/vendor/github.com/kr/text/indent_test.go @@ -0,0 +1,119 @@ +package text + +import ( + "bytes" + "testing" +) + +type T struct { + inp, exp, pre string +} + +var tests = []T{ + { + "The quick brown fox\njumps over the lazy\ndog.\nBut not quickly.\n", + "xxxThe quick brown fox\nxxxjumps over the lazy\nxxxdog.\nxxxBut not quickly.\n", + "xxx", + }, + { + "The quick brown fox\njumps over the lazy\ndog.\n\nBut not quickly.", + "xxxThe quick brown fox\nxxxjumps over the lazy\nxxxdog.\n\nxxxBut not quickly.", + "xxx", + }, +} + +func TestIndent(t *testing.T) { + for _, test := range tests { + got := Indent(test.inp, test.pre) + if got != test.exp { + t.Errorf("mismatch %q != %q", got, test.exp) + } + } +} + +type IndentWriterTest struct { + inp, exp string + pre []string +} + +var ts = []IndentWriterTest{ + { + ` +The quick brown fox +jumps over the lazy +dog. +But not quickly. +`[1:], + ` +xxxThe quick brown fox +xxxjumps over the lazy +xxxdog. +xxxBut not quickly. +`[1:], + []string{"xxx"}, + }, + { + ` +The quick brown fox +jumps over the lazy +dog. +But not quickly. +`[1:], + ` +xxaThe quick brown fox +xxxjumps over the lazy +xxxdog. +xxxBut not quickly. +`[1:], + []string{"xxa", "xxx"}, + }, + { + ` +The quick brown fox +jumps over the lazy +dog. +But not quickly. +`[1:], + ` +xxaThe quick brown fox +xxbjumps over the lazy +xxcdog. +xxxBut not quickly. +`[1:], + []string{"xxa", "xxb", "xxc", "xxx"}, + }, + { + ` +The quick brown fox +jumps over the lazy +dog. + +But not quickly.`[1:], + ` +xxaThe quick brown fox +xxxjumps over the lazy +xxxdog. +xxx +xxxBut not quickly.`[1:], + []string{"xxa", "xxx"}, + }, +} + +func TestIndentWriter(t *testing.T) { + for _, test := range ts { + b := new(bytes.Buffer) + pre := make([][]byte, len(test.pre)) + for i := range test.pre { + pre[i] = []byte(test.pre[i]) + } + w := NewIndentWriter(b, pre...) + if _, err := w.Write([]byte(test.inp)); err != nil { + t.Error(err) + } + if got := b.String(); got != test.exp { + t.Errorf("mismatch %q != %q", got, test.exp) + t.Log(got) + t.Log(test.exp) + } + } +} diff --git a/vendor/github.com/kr/text/wrap.go b/vendor/github.com/kr/text/wrap.go new file mode 100644 index 0000000..ca88565 --- /dev/null +++ b/vendor/github.com/kr/text/wrap.go @@ -0,0 +1,86 @@ +package text + +import ( + "bytes" + "math" +) + +var ( + nl = []byte{'\n'} + sp = []byte{' '} +) + +const defaultPenalty = 1e5 + +// Wrap wraps s into a paragraph of lines of length lim, with minimal +// raggedness. +func Wrap(s string, lim int) string { + return string(WrapBytes([]byte(s), lim)) +} + +// WrapBytes wraps b into a paragraph of lines of length lim, with minimal +// raggedness. +func WrapBytes(b []byte, lim int) []byte { + words := bytes.Split(bytes.Replace(bytes.TrimSpace(b), nl, sp, -1), sp) + var lines [][]byte + for _, line := range WrapWords(words, 1, lim, defaultPenalty) { + lines = append(lines, bytes.Join(line, sp)) + } + return bytes.Join(lines, nl) +} + +// WrapWords is the low-level line-breaking algorithm, useful if you need more +// control over the details of the text wrapping process. For most uses, either +// Wrap or WrapBytes will be sufficient and more convenient. +// +// WrapWords splits a list of words into lines with minimal "raggedness", +// treating each byte as one unit, accounting for spc units between adjacent +// words on each line, and attempting to limit lines to lim units. Raggedness +// is the total error over all lines, where error is the square of the +// difference of the length of the line and lim. Too-long lines (which only +// happen when a single word is longer than lim units) have pen penalty units +// added to the error. +func WrapWords(words [][]byte, spc, lim, pen int) [][][]byte { + n := len(words) + + length := make([][]int, n) + for i := 0; i < n; i++ { + length[i] = make([]int, n) + length[i][i] = len(words[i]) + for j := i + 1; j < n; j++ { + length[i][j] = length[i][j-1] + spc + len(words[j]) + } + } + + nbrk := make([]int, n) + cost := make([]int, n) + for i := range cost { + cost[i] = math.MaxInt32 + } + for i := n - 1; i >= 0; i-- { + if length[i][n-1] <= lim { + cost[i] = 0 + nbrk[i] = n + } else { + for j := i + 1; j < n; j++ { + d := lim - length[i][j-1] + c := d*d + cost[j] + if length[i][j-1] > lim { + c += pen // too-long lines get a worse penalty + } + if c < cost[i] { + cost[i] = c + nbrk[i] = j + } + } + } + } + + var lines [][][]byte + i := 0 + for i < n { + lines = append(lines, words[i:nbrk[i]]) + i = nbrk[i] + } + return lines +} diff --git a/vendor/github.com/kr/text/wrap_test.go b/vendor/github.com/kr/text/wrap_test.go new file mode 100644 index 0000000..90f065c --- /dev/null +++ b/vendor/github.com/kr/text/wrap_test.go @@ -0,0 +1,44 @@ +package text + +import ( + "bytes" + "testing" +) + +var text = "The quick brown fox jumps over the lazy dog." + +func TestWrap(t *testing.T) { + exp := [][]string{ + {"The", "quick", "brown", "fox"}, + {"jumps", "over", "the", "lazy", "dog."}, + } + words := bytes.Split([]byte(text), sp) + got := WrapWords(words, 1, 24, defaultPenalty) + if len(exp) != len(got) { + t.Fail() + } + for i := range exp { + if len(exp[i]) != len(got[i]) { + t.Fail() + } + for j := range exp[i] { + if exp[i][j] != string(got[i][j]) { + t.Fatal(i, exp[i][j], got[i][j]) + } + } + } +} + +func TestWrapNarrow(t *testing.T) { + exp := "The\nquick\nbrown\nfox\njumps\nover\nthe\nlazy\ndog." + if Wrap(text, 5) != exp { + t.Fail() + } +} + +func TestWrapOneLine(t *testing.T) { + exp := "The quick brown fox jumps over the lazy dog." + if Wrap(text, 500) != exp { + t.Fail() + } +} diff --git a/Godeps/_workspace/src/github.com/mattn/go-isatty/README.md b/vendor/github.com/mattn/go-isatty/README.md similarity index 100% rename from Godeps/_workspace/src/github.com/mattn/go-isatty/README.md rename to vendor/github.com/mattn/go-isatty/README.md diff --git a/Godeps/_workspace/src/github.com/mattn/go-isatty/doc.go b/vendor/github.com/mattn/go-isatty/doc.go similarity index 100% rename from Godeps/_workspace/src/github.com/mattn/go-isatty/doc.go rename to vendor/github.com/mattn/go-isatty/doc.go diff --git a/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_bsd.go b/vendor/github.com/mattn/go-isatty/isatty_bsd.go similarity index 100% rename from Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_bsd.go rename to vendor/github.com/mattn/go-isatty/isatty_bsd.go diff --git a/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_linux.go b/vendor/github.com/mattn/go-isatty/isatty_linux.go similarity index 100% rename from Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_linux.go rename to vendor/github.com/mattn/go-isatty/isatty_linux.go diff --git a/Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_windows.go b/vendor/github.com/mattn/go-isatty/isatty_windows.go similarity index 100% rename from Godeps/_workspace/src/github.com/mattn/go-isatty/isatty_windows.go rename to vendor/github.com/mattn/go-isatty/isatty_windows.go diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method.go b/vendor/github.com/octokit/go-octokit/octokit/auth_method.go similarity index 93% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method.go rename to vendor/github.com/octokit/go-octokit/octokit/auth_method.go index cee2a83..85bc7ab 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method.go +++ b/vendor/github.com/octokit/go-octokit/octokit/auth_method.go @@ -7,7 +7,7 @@ import ( "os" "path/filepath" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/fhs/go-netrc/netrc" + "github.com/fhs/go-netrc/netrc" ) // See http://developer.github.com/v3/auth/ diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method_test.go b/vendor/github.com/octokit/go-octokit/octokit/auth_method_test.go similarity index 87% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method_test.go rename to vendor/github.com/octokit/go-octokit/octokit/auth_method_test.go index 21dc8b2..796f2a2 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/auth_method_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/auth_method_test.go @@ -3,7 +3,7 @@ package octokit import ( "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestBasicAuth(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations.go b/vendor/github.com/octokit/go-octokit/octokit/authorizations.go similarity index 94% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations.go rename to vendor/github.com/octokit/go-octokit/octokit/authorizations.go index 6fe7ef5..9717d87 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations.go +++ b/vendor/github.com/octokit/go-octokit/octokit/authorizations.go @@ -4,7 +4,7 @@ import ( "net/url" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/hypermedia" ) var ( diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations_test.go b/vendor/github.com/octokit/go-octokit/octokit/authorizations_test.go similarity index 97% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations_test.go rename to vendor/github.com/octokit/go-octokit/octokit/authorizations_test.go index 78d3e14..8085f48 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/authorizations_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/authorizations_test.go @@ -6,7 +6,7 @@ import ( "reflect" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestAuthorizationsService_One(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client.go b/vendor/github.com/octokit/go-octokit/octokit/client.go similarity index 95% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client.go rename to vendor/github.com/octokit/go-octokit/octokit/client.go index 992ff9d..fcbdbe0 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client.go +++ b/vendor/github.com/octokit/go-octokit/octokit/client.go @@ -5,8 +5,8 @@ import ( "net/http" "net/url" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer" + "github.com/jingweno/go-sawyer/hypermedia" ) func NewClient(authMethod AuthMethod) *Client { diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client_test.go b/vendor/github.com/octokit/go-octokit/octokit/client_test.go similarity index 97% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client_test.go rename to vendor/github.com/octokit/go-octokit/octokit/client_test.go index 749c740..b5b1ed5 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/client_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/client_test.go @@ -4,7 +4,7 @@ import ( "net/http" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestSuccessfulGet(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits.go b/vendor/github.com/octokit/go-octokit/octokit/commits.go similarity index 96% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits.go rename to vendor/github.com/octokit/go-octokit/octokit/commits.go index b8e6bf2..952b53f 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits.go +++ b/vendor/github.com/octokit/go-octokit/octokit/commits.go @@ -5,7 +5,7 @@ import ( "net/url" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/hypermedia" ) var CommitsURL = Hyperlink("repos/{owner}/{repo}/commits{/sha}") diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits_test.go b/vendor/github.com/octokit/go-octokit/octokit/commits_test.go similarity index 95% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits_test.go rename to vendor/github.com/octokit/go-octokit/octokit/commits_test.go index 3d3d057..582c67e 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/commits_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/commits_test.go @@ -5,7 +5,7 @@ import ( "net/http" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestCommitsService_One(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis.go b/vendor/github.com/octokit/go-octokit/octokit/emojis.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis.go rename to vendor/github.com/octokit/go-octokit/octokit/emojis.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis_test.go b/vendor/github.com/octokit/go-octokit/octokit/emojis_test.go similarity index 89% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis_test.go rename to vendor/github.com/octokit/go-octokit/octokit/emojis_test.go index fafca78..f2b3dc8 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/emojis_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/emojis_test.go @@ -4,7 +4,7 @@ import ( "net/http" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestRootEmojisService_All(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error.go b/vendor/github.com/octokit/go-octokit/octokit/error.go similarity index 98% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error.go rename to vendor/github.com/octokit/go-octokit/octokit/error.go index 5137fff..a1e64b1 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error.go +++ b/vendor/github.com/octokit/go-octokit/octokit/error.go @@ -6,7 +6,7 @@ import ( "regexp" "strings" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer" + "github.com/jingweno/go-sawyer" ) type ResponseErrorType int diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error_test.go b/vendor/github.com/octokit/go-octokit/octokit/error_test.go similarity index 98% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error_test.go rename to vendor/github.com/octokit/go-octokit/octokit/error_test.go index 79fab72..98f49e9 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/error_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/error_test.go @@ -5,7 +5,7 @@ import ( "strings" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestResponseError_empty_body(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gist.go b/vendor/github.com/octokit/go-octokit/octokit/gist.go similarity index 96% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gist.go rename to vendor/github.com/octokit/go-octokit/octokit/gist.go index b246b22..5331919 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gist.go +++ b/vendor/github.com/octokit/go-octokit/octokit/gist.go @@ -5,7 +5,7 @@ import ( "net/url" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/hypermedia" ) var GistsURL = Hyperlink("gists{/gist_id}") diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gists_test.go b/vendor/github.com/octokit/go-octokit/octokit/gists_test.go similarity index 95% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gists_test.go rename to vendor/github.com/octokit/go-octokit/octokit/gists_test.go index acb5e2e..841661c 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/gists_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/gists_test.go @@ -5,7 +5,7 @@ import ( "net/http" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestGistsService_One(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees.go b/vendor/github.com/octokit/go-octokit/octokit/git_trees.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees.go rename to vendor/github.com/octokit/go-octokit/octokit/git_trees.go diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees_test.go b/vendor/github.com/octokit/go-octokit/octokit/git_trees_test.go similarity index 95% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees_test.go rename to vendor/github.com/octokit/go-octokit/octokit/git_trees_test.go index 99945e2..50c242c 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/git_trees_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/git_trees_test.go @@ -4,7 +4,7 @@ import ( "net/http" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestGitTreesService_One(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink.go b/vendor/github.com/octokit/go-octokit/octokit/hyperlink.go similarity index 73% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink.go rename to vendor/github.com/octokit/go-octokit/octokit/hyperlink.go index dffff02..6224ccf 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink.go +++ b/vendor/github.com/octokit/go-octokit/octokit/hyperlink.go @@ -3,7 +3,7 @@ package octokit import ( "net/url" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/hypermedia" ) type M map[string]interface{} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink_test.go b/vendor/github.com/octokit/go-octokit/octokit/hyperlink_test.go similarity index 87% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink_test.go rename to vendor/github.com/octokit/go-octokit/octokit/hyperlink_test.go index b166dc0..b08cd40 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/hyperlink_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/hyperlink_test.go @@ -3,7 +3,7 @@ package octokit import ( "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestHyperlink_Expand(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues.go b/vendor/github.com/octokit/go-octokit/octokit/issues.go similarity index 96% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues.go rename to vendor/github.com/octokit/go-octokit/octokit/issues.go index 77fdc3b..c91922c 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues.go +++ b/vendor/github.com/octokit/go-octokit/octokit/issues.go @@ -4,7 +4,7 @@ import ( "net/url" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/hypermedia" ) var ( diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues_test.go b/vendor/github.com/octokit/go-octokit/octokit/issues_test.go similarity index 98% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues_test.go rename to vendor/github.com/octokit/go-octokit/octokit/issues_test.go index 6a92483..f8f9607 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/issues_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/issues_test.go @@ -5,7 +5,7 @@ import ( "testing" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestIssuesService_All(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit.go b/vendor/github.com/octokit/go-octokit/octokit/octokit.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit.go rename to vendor/github.com/octokit/go-octokit/octokit/octokit.go diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit_test.go b/vendor/github.com/octokit/go-octokit/octokit/octokit_test.go similarity index 97% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit_test.go rename to vendor/github.com/octokit/go-octokit/octokit/octokit_test.go index c6c26b4..b2e0248 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/octokit_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/octokit_test.go @@ -10,7 +10,7 @@ import ( "path" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) var ( diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/organizations.go b/vendor/github.com/octokit/go-octokit/octokit/organizations.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/organizations.go rename to vendor/github.com/octokit/go-octokit/octokit/organizations.go diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests.go b/vendor/github.com/octokit/go-octokit/octokit/pull_requests.go similarity index 97% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests.go rename to vendor/github.com/octokit/go-octokit/octokit/pull_requests.go index 830a985..18fce8c 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests.go +++ b/vendor/github.com/octokit/go-octokit/octokit/pull_requests.go @@ -5,7 +5,7 @@ import ( "net/url" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/hypermedia" ) var ( diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests_test.go b/vendor/github.com/octokit/go-octokit/octokit/pull_requests_test.go similarity index 98% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests_test.go rename to vendor/github.com/octokit/go-octokit/octokit/pull_requests_test.go index cd13645..188795c 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/pull_requests_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/pull_requests_test.go @@ -6,7 +6,7 @@ import ( "net/http" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestPullRequestService_One(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases.go b/vendor/github.com/octokit/go-octokit/octokit/releases.go similarity index 96% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases.go rename to vendor/github.com/octokit/go-octokit/octokit/releases.go index 26e1cef..c3743ca 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases.go +++ b/vendor/github.com/octokit/go-octokit/octokit/releases.go @@ -4,7 +4,7 @@ import ( "net/url" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/hypermedia" ) var ( diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases_test.go b/vendor/github.com/octokit/go-octokit/octokit/releases_test.go similarity index 97% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases_test.go rename to vendor/github.com/octokit/go-octokit/octokit/releases_test.go index 0b9748a..2a37594 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/releases_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/releases_test.go @@ -4,7 +4,7 @@ import ( "net/http" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestReleasesService_All(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories.go b/vendor/github.com/octokit/go-octokit/octokit/repositories.go similarity index 96% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories.go rename to vendor/github.com/octokit/go-octokit/octokit/repositories.go index b026441..752d8ba 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories.go +++ b/vendor/github.com/octokit/go-octokit/octokit/repositories.go @@ -4,7 +4,7 @@ import ( "net/url" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/hypermedia" ) var ( diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories_test.go b/vendor/github.com/octokit/go-octokit/octokit/repositories_test.go similarity index 98% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories_test.go rename to vendor/github.com/octokit/go-octokit/octokit/repositories_test.go index b9e3619..14880cd 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/repositories_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/repositories_test.go @@ -6,7 +6,7 @@ import ( "net/http" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestRepositoresService_One(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/request.go b/vendor/github.com/octokit/go-octokit/octokit/request.go similarity index 90% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/request.go rename to vendor/github.com/octokit/go-octokit/octokit/request.go index e418b4f..5f63167 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/request.go +++ b/vendor/github.com/octokit/go-octokit/octokit/request.go @@ -1,8 +1,8 @@ package octokit import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype" + "github.com/jingweno/go-sawyer" + "github.com/jingweno/go-sawyer/mediatype" ) func newRequest(client *Client, urlStr string) (req *Request, err error) { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/response.go b/vendor/github.com/octokit/go-octokit/octokit/response.go similarity index 65% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/response.go rename to vendor/github.com/octokit/go-octokit/octokit/response.go index 9b99ef0..654dd32 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/response.go +++ b/vendor/github.com/octokit/go-octokit/octokit/response.go @@ -3,9 +3,9 @@ package octokit import ( "net/http" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediatype" + "github.com/jingweno/go-sawyer" + "github.com/jingweno/go-sawyer/mediaheader" + "github.com/jingweno/go-sawyer/mediatype" ) type Response struct { diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result.go b/vendor/github.com/octokit/go-octokit/octokit/result.go similarity index 91% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result.go rename to vendor/github.com/octokit/go-octokit/octokit/result.go index 2a7b9cd..aed4c50 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result.go +++ b/vendor/github.com/octokit/go-octokit/octokit/result.go @@ -1,7 +1,7 @@ package octokit import ( - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader" + "github.com/jingweno/go-sawyer/mediaheader" ) type pageable struct { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result_test.go b/vendor/github.com/octokit/go-octokit/octokit/result_test.go similarity index 60% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result_test.go rename to vendor/github.com/octokit/go-octokit/octokit/result_test.go index 163f12e..eef2b92 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/result_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/result_test.go @@ -3,9 +3,9 @@ package octokit import ( "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/mediaheader" + "github.com/bmizerany/assert" + "github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/mediaheader" ) func TestNewResult_Pageable(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root.go b/vendor/github.com/octokit/go-octokit/octokit/root.go similarity index 97% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root.go rename to vendor/github.com/octokit/go-octokit/octokit/root.go index ebb7b1d..db0898e 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root.go +++ b/vendor/github.com/octokit/go-octokit/octokit/root.go @@ -3,7 +3,7 @@ package octokit import ( "net/url" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/hypermedia" ) var ( diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root_test.go b/vendor/github.com/octokit/go-octokit/octokit/root_test.go similarity index 91% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root_test.go rename to vendor/github.com/octokit/go-octokit/octokit/root_test.go index f2dd0ee..1739759 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/root_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/root_test.go @@ -4,7 +4,7 @@ import ( "net/http" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestRootService_One(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses.go b/vendor/github.com/octokit/go-octokit/octokit/statuses.go similarity index 90% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses.go rename to vendor/github.com/octokit/go-octokit/octokit/statuses.go index 01527d7..91f6212 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses.go +++ b/vendor/github.com/octokit/go-octokit/octokit/statuses.go @@ -4,7 +4,7 @@ import ( "net/url" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/hypermedia" ) var ( diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses_test.go b/vendor/github.com/octokit/go-octokit/octokit/statuses_test.go similarity index 91% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses_test.go rename to vendor/github.com/octokit/go-octokit/octokit/statuses_test.go index 06c692e..0e795d6 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/statuses_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/statuses_test.go @@ -4,7 +4,7 @@ import ( "net/http" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestStatuses(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads.go b/vendor/github.com/octokit/go-octokit/octokit/uploads.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads.go rename to vendor/github.com/octokit/go-octokit/octokit/uploads.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads_test.go b/vendor/github.com/octokit/go-octokit/octokit/uploads_test.go similarity index 92% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads_test.go rename to vendor/github.com/octokit/go-octokit/octokit/uploads_test.go index 3fbdea5..4b6b39d 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/uploads_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/uploads_test.go @@ -7,7 +7,7 @@ import ( "os" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestUploadsService_UploadAsset(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users.go b/vendor/github.com/octokit/go-octokit/octokit/users.go similarity index 96% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users.go rename to vendor/github.com/octokit/go-octokit/octokit/users.go index 08341f0..a03c31b 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users.go +++ b/vendor/github.com/octokit/go-octokit/octokit/users.go @@ -4,7 +4,7 @@ import ( "net/url" "time" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/jingweno/go-sawyer/hypermedia" + "github.com/jingweno/go-sawyer/hypermedia" ) var ( diff --git a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users_test.go b/vendor/github.com/octokit/go-octokit/octokit/users_test.go similarity index 97% rename from Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users_test.go rename to vendor/github.com/octokit/go-octokit/octokit/users_test.go index 8a13a02..59acd1a 100644 --- a/Godeps/_workspace/src/github.com/octokit/go-octokit/octokit/users_test.go +++ b/vendor/github.com/octokit/go-octokit/octokit/users_test.go @@ -6,7 +6,7 @@ import ( "regexp" "testing" - "github.com/remind101/deploy/Godeps/_workspace/src/github.com/bmizerany/assert" + "github.com/bmizerany/assert" ) func TestUsersService_GetCurrentUser(t *testing.T) { diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/LICENSE b/vendor/github.com/ogier/pflag/LICENSE similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/LICENSE rename to vendor/github.com/ogier/pflag/LICENSE diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/README.md b/vendor/github.com/ogier/pflag/README.md similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/README.md rename to vendor/github.com/ogier/pflag/README.md diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/bool.go b/vendor/github.com/ogier/pflag/bool.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/bool.go rename to vendor/github.com/ogier/pflag/bool.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/duration.go b/vendor/github.com/ogier/pflag/duration.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/duration.go rename to vendor/github.com/ogier/pflag/duration.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/example_test.go b/vendor/github.com/ogier/pflag/example_test.go similarity index 96% rename from Godeps/_workspace/src/github.com/ogier/pflag/example_test.go rename to vendor/github.com/ogier/pflag/example_test.go index a833eba..03ebeaa 100644 --- a/Godeps/_workspace/src/github.com/ogier/pflag/example_test.go +++ b/vendor/github.com/ogier/pflag/example_test.go @@ -11,7 +11,7 @@ import ( "strings" "time" - flag "github.com/remind101/deploy/Godeps/_workspace/src/github.com/ogier/pflag" + flag "github.com/ogier/pflag" ) // Example 1: A single string flag called "species" with default value "gopher". diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/export_test.go b/vendor/github.com/ogier/pflag/export_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/export_test.go rename to vendor/github.com/ogier/pflag/export_test.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/flag.go b/vendor/github.com/ogier/pflag/flag.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/flag.go rename to vendor/github.com/ogier/pflag/flag.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/flag_test.go b/vendor/github.com/ogier/pflag/flag_test.go similarity index 99% rename from Godeps/_workspace/src/github.com/ogier/pflag/flag_test.go rename to vendor/github.com/ogier/pflag/flag_test.go index d77ebc0..aa8682f 100644 --- a/Godeps/_workspace/src/github.com/ogier/pflag/flag_test.go +++ b/vendor/github.com/ogier/pflag/flag_test.go @@ -13,7 +13,7 @@ import ( "testing" "time" - . "github.com/remind101/deploy/Godeps/_workspace/src/github.com/ogier/pflag" + . "github.com/ogier/pflag" ) var ( diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/float32.go b/vendor/github.com/ogier/pflag/float32.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/float32.go rename to vendor/github.com/ogier/pflag/float32.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/float64.go b/vendor/github.com/ogier/pflag/float64.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/float64.go rename to vendor/github.com/ogier/pflag/float64.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/int.go b/vendor/github.com/ogier/pflag/int.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/int.go rename to vendor/github.com/ogier/pflag/int.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/int32.go b/vendor/github.com/ogier/pflag/int32.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/int32.go rename to vendor/github.com/ogier/pflag/int32.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/int64.go b/vendor/github.com/ogier/pflag/int64.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/int64.go rename to vendor/github.com/ogier/pflag/int64.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/int8.go b/vendor/github.com/ogier/pflag/int8.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/int8.go rename to vendor/github.com/ogier/pflag/int8.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/ip.go b/vendor/github.com/ogier/pflag/ip.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/ip.go rename to vendor/github.com/ogier/pflag/ip.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/ipmask.go b/vendor/github.com/ogier/pflag/ipmask.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/ipmask.go rename to vendor/github.com/ogier/pflag/ipmask.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/string.go b/vendor/github.com/ogier/pflag/string.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/string.go rename to vendor/github.com/ogier/pflag/string.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/uint.go b/vendor/github.com/ogier/pflag/uint.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/uint.go rename to vendor/github.com/ogier/pflag/uint.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/uint16.go b/vendor/github.com/ogier/pflag/uint16.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/uint16.go rename to vendor/github.com/ogier/pflag/uint16.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/uint32.go b/vendor/github.com/ogier/pflag/uint32.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/uint32.go rename to vendor/github.com/ogier/pflag/uint32.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/uint64.go b/vendor/github.com/ogier/pflag/uint64.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/uint64.go rename to vendor/github.com/ogier/pflag/uint64.go diff --git a/Godeps/_workspace/src/github.com/ogier/pflag/uint8.go b/vendor/github.com/ogier/pflag/uint8.go similarity index 100% rename from Godeps/_workspace/src/github.com/ogier/pflag/uint8.go rename to vendor/github.com/ogier/pflag/uint8.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/terminal.go b/vendor/golang.org/x/crypto/ssh/terminal/terminal.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/terminal.go rename to vendor/golang.org/x/crypto/ssh/terminal/terminal.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/terminal_test.go b/vendor/golang.org/x/crypto/ssh/terminal/terminal_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/terminal_test.go rename to vendor/golang.org/x/crypto/ssh/terminal/terminal_test.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util.go b/vendor/golang.org/x/crypto/ssh/terminal/util.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util.go rename to vendor/golang.org/x/crypto/ssh/terminal/util.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_bsd.go b/vendor/golang.org/x/crypto/ssh/terminal/util_bsd.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_bsd.go rename to vendor/golang.org/x/crypto/ssh/terminal/util_bsd.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_linux.go b/vendor/golang.org/x/crypto/ssh/terminal/util_linux.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_linux.go rename to vendor/golang.org/x/crypto/ssh/terminal/util_linux.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_windows.go b/vendor/golang.org/x/crypto/ssh/terminal/util_windows.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/golang.org/x/crypto/ssh/terminal/util_windows.go rename to vendor/golang.org/x/crypto/ssh/terminal/util_windows.go diff --git a/vendor/gopkg.in/check.v1/LICENSE b/vendor/gopkg.in/check.v1/LICENSE new file mode 100644 index 0000000..545cf2d --- /dev/null +++ b/vendor/gopkg.in/check.v1/LICENSE @@ -0,0 +1,25 @@ +Gocheck - A rich testing framework for Go + +Copyright (c) 2010-2013 Gustavo Niemeyer + +All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND +ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR +ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES +(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND +ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS +SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/gopkg.in/check.v1/README.md b/vendor/gopkg.in/check.v1/README.md new file mode 100644 index 0000000..0ca9e57 --- /dev/null +++ b/vendor/gopkg.in/check.v1/README.md @@ -0,0 +1,20 @@ +Instructions +============ + +Install the package with: + + go get gopkg.in/check.v1 + +Import it with: + + import "gopkg.in/check.v1" + +and use _check_ as the package name inside the code. + +For more details, visit the project page: + +* http://labix.org/gocheck + +and the API documentation: + +* https://gopkg.in/check.v1 diff --git a/vendor/gopkg.in/check.v1/TODO b/vendor/gopkg.in/check.v1/TODO new file mode 100644 index 0000000..3349827 --- /dev/null +++ b/vendor/gopkg.in/check.v1/TODO @@ -0,0 +1,2 @@ +- Assert(slice, Contains, item) +- Parallel test support diff --git a/vendor/gopkg.in/check.v1/benchmark.go b/vendor/gopkg.in/check.v1/benchmark.go new file mode 100644 index 0000000..46ea9dc --- /dev/null +++ b/vendor/gopkg.in/check.v1/benchmark.go @@ -0,0 +1,187 @@ +// Copyright (c) 2012 The Go Authors. All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +package check + +import ( + "fmt" + "runtime" + "time" +) + +var memStats runtime.MemStats + +// testingB is a type passed to Benchmark functions to manage benchmark +// timing and to specify the number of iterations to run. +type timer struct { + start time.Time // Time test or benchmark started + duration time.Duration + N int + bytes int64 + timerOn bool + benchTime time.Duration + // The initial states of memStats.Mallocs and memStats.TotalAlloc. + startAllocs uint64 + startBytes uint64 + // The net total of this test after being run. + netAllocs uint64 + netBytes uint64 +} + +// StartTimer starts timing a test. This function is called automatically +// before a benchmark starts, but it can also used to resume timing after +// a call to StopTimer. +func (c *C) StartTimer() { + if !c.timerOn { + c.start = time.Now() + c.timerOn = true + + runtime.ReadMemStats(&memStats) + c.startAllocs = memStats.Mallocs + c.startBytes = memStats.TotalAlloc + } +} + +// StopTimer stops timing a test. This can be used to pause the timer +// while performing complex initialization that you don't +// want to measure. +func (c *C) StopTimer() { + if c.timerOn { + c.duration += time.Now().Sub(c.start) + c.timerOn = false + runtime.ReadMemStats(&memStats) + c.netAllocs += memStats.Mallocs - c.startAllocs + c.netBytes += memStats.TotalAlloc - c.startBytes + } +} + +// ResetTimer sets the elapsed benchmark time to zero. +// It does not affect whether the timer is running. +func (c *C) ResetTimer() { + if c.timerOn { + c.start = time.Now() + runtime.ReadMemStats(&memStats) + c.startAllocs = memStats.Mallocs + c.startBytes = memStats.TotalAlloc + } + c.duration = 0 + c.netAllocs = 0 + c.netBytes = 0 +} + +// SetBytes informs the number of bytes that the benchmark processes +// on each iteration. If this is called in a benchmark it will also +// report MB/s. +func (c *C) SetBytes(n int64) { + c.bytes = n +} + +func (c *C) nsPerOp() int64 { + if c.N <= 0 { + return 0 + } + return c.duration.Nanoseconds() / int64(c.N) +} + +func (c *C) mbPerSec() float64 { + if c.bytes <= 0 || c.duration <= 0 || c.N <= 0 { + return 0 + } + return (float64(c.bytes) * float64(c.N) / 1e6) / c.duration.Seconds() +} + +func (c *C) timerString() string { + if c.N <= 0 { + return fmt.Sprintf("%3.3fs", float64(c.duration.Nanoseconds())/1e9) + } + mbs := c.mbPerSec() + mb := "" + if mbs != 0 { + mb = fmt.Sprintf("\t%7.2f MB/s", mbs) + } + nsop := c.nsPerOp() + ns := fmt.Sprintf("%10d ns/op", nsop) + if c.N > 0 && nsop < 100 { + // The format specifiers here make sure that + // the ones digits line up for all three possible formats. + if nsop < 10 { + ns = fmt.Sprintf("%13.2f ns/op", float64(c.duration.Nanoseconds())/float64(c.N)) + } else { + ns = fmt.Sprintf("%12.1f ns/op", float64(c.duration.Nanoseconds())/float64(c.N)) + } + } + memStats := "" + if c.benchMem { + allocedBytes := fmt.Sprintf("%8d B/op", int64(c.netBytes)/int64(c.N)) + allocs := fmt.Sprintf("%8d allocs/op", int64(c.netAllocs)/int64(c.N)) + memStats = fmt.Sprintf("\t%s\t%s", allocedBytes, allocs) + } + return fmt.Sprintf("%8d\t%s%s%s", c.N, ns, mb, memStats) +} + +func min(x, y int) int { + if x > y { + return y + } + return x +} + +func max(x, y int) int { + if x < y { + return y + } + return x +} + +// roundDown10 rounds a number down to the nearest power of 10. +func roundDown10(n int) int { + var tens = 0 + // tens = floor(log_10(n)) + for n > 10 { + n = n / 10 + tens++ + } + // result = 10^tens + result := 1 + for i := 0; i < tens; i++ { + result *= 10 + } + return result +} + +// roundUp rounds x up to a number of the form [1eX, 2eX, 5eX]. +func roundUp(n int) int { + base := roundDown10(n) + if n < (2 * base) { + return 2 * base + } + if n < (5 * base) { + return 5 * base + } + return 10 * base +} diff --git a/vendor/gopkg.in/check.v1/benchmark_test.go b/vendor/gopkg.in/check.v1/benchmark_test.go new file mode 100644 index 0000000..4dd827c --- /dev/null +++ b/vendor/gopkg.in/check.v1/benchmark_test.go @@ -0,0 +1,91 @@ +// These tests verify the test running logic. + +package check_test + +import ( + "time" + . "gopkg.in/check.v1" +) + +var benchmarkS = Suite(&BenchmarkS{}) + +type BenchmarkS struct{} + +func (s *BenchmarkS) TestCountSuite(c *C) { + suitesRun += 1 +} + +func (s *BenchmarkS) TestBasicTestTiming(c *C) { + helper := FixtureHelper{sleepOn: "Test1", sleep: 1000000 * time.Nanosecond} + output := String{} + runConf := RunConf{Output: &output, Verbose: true} + Run(&helper, &runConf) + + expected := "PASS: check_test\\.go:[0-9]+: FixtureHelper\\.Test1\t0\\.001s\n" + + "PASS: check_test\\.go:[0-9]+: FixtureHelper\\.Test2\t0\\.000s\n" + c.Assert(output.value, Matches, expected) +} + +func (s *BenchmarkS) TestStreamTestTiming(c *C) { + helper := FixtureHelper{sleepOn: "SetUpSuite", sleep: 1000000 * time.Nanosecond} + output := String{} + runConf := RunConf{Output: &output, Stream: true} + Run(&helper, &runConf) + + expected := "(?s).*\nPASS: check_test\\.go:[0-9]+: FixtureHelper\\.SetUpSuite\t *0\\.001s\n.*" + c.Assert(output.value, Matches, expected) +} + +func (s *BenchmarkS) TestBenchmark(c *C) { + helper := FixtureHelper{sleep: 100000} + output := String{} + runConf := RunConf{ + Output: &output, + Benchmark: true, + BenchmarkTime: 10000000, + Filter: "Benchmark1", + } + Run(&helper, &runConf) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "SetUpTest") + c.Check(helper.calls[2], Equals, "Benchmark1") + c.Check(helper.calls[3], Equals, "TearDownTest") + c.Check(helper.calls[4], Equals, "SetUpTest") + c.Check(helper.calls[5], Equals, "Benchmark1") + c.Check(helper.calls[6], Equals, "TearDownTest") + // ... and more. + + expected := "PASS: check_test\\.go:[0-9]+: FixtureHelper\\.Benchmark1\t *100\t *[12][0-9]{5} ns/op\n" + c.Assert(output.value, Matches, expected) +} + +func (s *BenchmarkS) TestBenchmarkBytes(c *C) { + helper := FixtureHelper{sleep: 100000} + output := String{} + runConf := RunConf{ + Output: &output, + Benchmark: true, + BenchmarkTime: 10000000, + Filter: "Benchmark2", + } + Run(&helper, &runConf) + + expected := "PASS: check_test\\.go:[0-9]+: FixtureHelper\\.Benchmark2\t *100\t *[12][0-9]{5} ns/op\t *[4-9]\\.[0-9]{2} MB/s\n" + c.Assert(output.value, Matches, expected) +} + +func (s *BenchmarkS) TestBenchmarkMem(c *C) { + helper := FixtureHelper{sleep: 100000} + output := String{} + runConf := RunConf{ + Output: &output, + Benchmark: true, + BenchmarkMem: true, + BenchmarkTime: 10000000, + Filter: "Benchmark3", + } + Run(&helper, &runConf) + + expected := "PASS: check_test\\.go:[0-9]+: FixtureHelper\\.Benchmark3\t *100\t *[12][0-9]{5} ns/op\t *[0-9]+ B/op\t *[1-9] allocs/op\n" + c.Assert(output.value, Matches, expected) +} diff --git a/vendor/gopkg.in/check.v1/bootstrap_test.go b/vendor/gopkg.in/check.v1/bootstrap_test.go new file mode 100644 index 0000000..e55f327 --- /dev/null +++ b/vendor/gopkg.in/check.v1/bootstrap_test.go @@ -0,0 +1,82 @@ +// These initial tests are for bootstrapping. They verify that we can +// basically use the testing infrastructure itself to check if the test +// system is working. +// +// These tests use will break down the test runner badly in case of +// errors because if they simply fail, we can't be sure the developer +// will ever see anything (because failing means the failing system +// somehow isn't working! :-) +// +// Do not assume *any* internal functionality works as expected besides +// what's actually tested here. + +package check_test + +import ( + "fmt" + "gopkg.in/check.v1" + "strings" +) + +type BootstrapS struct{} + +var boostrapS = check.Suite(&BootstrapS{}) + +func (s *BootstrapS) TestCountSuite(c *check.C) { + suitesRun += 1 +} + +func (s *BootstrapS) TestFailedAndFail(c *check.C) { + if c.Failed() { + critical("c.Failed() must be false first!") + } + c.Fail() + if !c.Failed() { + critical("c.Fail() didn't put the test in a failed state!") + } + c.Succeed() +} + +func (s *BootstrapS) TestFailedAndSucceed(c *check.C) { + c.Fail() + c.Succeed() + if c.Failed() { + critical("c.Succeed() didn't put the test back in a non-failed state") + } +} + +func (s *BootstrapS) TestLogAndGetTestLog(c *check.C) { + c.Log("Hello there!") + log := c.GetTestLog() + if log != "Hello there!\n" { + critical(fmt.Sprintf("Log() or GetTestLog() is not working! Got: %#v", log)) + } +} + +func (s *BootstrapS) TestLogfAndGetTestLog(c *check.C) { + c.Logf("Hello %v", "there!") + log := c.GetTestLog() + if log != "Hello there!\n" { + critical(fmt.Sprintf("Logf() or GetTestLog() is not working! Got: %#v", log)) + } +} + +func (s *BootstrapS) TestRunShowsErrors(c *check.C) { + output := String{} + check.Run(&FailHelper{}, &check.RunConf{Output: &output}) + if strings.Index(output.value, "Expected failure!") == -1 { + critical(fmt.Sprintf("RunWithWriter() output did not contain the "+ + "expected failure! Got: %#v", + output.value)) + } +} + +func (s *BootstrapS) TestRunDoesntShowSuccesses(c *check.C) { + output := String{} + check.Run(&SuccessHelper{}, &check.RunConf{Output: &output}) + if strings.Index(output.value, "Expected success!") != -1 { + critical(fmt.Sprintf("RunWithWriter() output contained a successful "+ + "test! Got: %#v", + output.value)) + } +} diff --git a/vendor/gopkg.in/check.v1/check.go b/vendor/gopkg.in/check.v1/check.go new file mode 100644 index 0000000..82c26fa --- /dev/null +++ b/vendor/gopkg.in/check.v1/check.go @@ -0,0 +1,873 @@ +// Package check is a rich testing extension for Go's testing package. +// +// For details about the project, see: +// +// http://labix.org/gocheck +// +package check + +import ( + "bytes" + "errors" + "fmt" + "io" + "math/rand" + "os" + "path" + "path/filepath" + "reflect" + "regexp" + "runtime" + "strconv" + "strings" + "sync" + "sync/atomic" + "time" +) + +// ----------------------------------------------------------------------- +// Internal type which deals with suite method calling. + +const ( + fixtureKd = iota + testKd +) + +type funcKind int + +const ( + succeededSt = iota + failedSt + skippedSt + panickedSt + fixturePanickedSt + missedSt +) + +type funcStatus uint32 + +// A method value can't reach its own Method structure. +type methodType struct { + reflect.Value + Info reflect.Method +} + +func newMethod(receiver reflect.Value, i int) *methodType { + return &methodType{receiver.Method(i), receiver.Type().Method(i)} +} + +func (method *methodType) PC() uintptr { + return method.Info.Func.Pointer() +} + +func (method *methodType) suiteName() string { + t := method.Info.Type.In(0) + if t.Kind() == reflect.Ptr { + t = t.Elem() + } + return t.Name() +} + +func (method *methodType) String() string { + return method.suiteName() + "." + method.Info.Name +} + +func (method *methodType) matches(re *regexp.Regexp) bool { + return (re.MatchString(method.Info.Name) || + re.MatchString(method.suiteName()) || + re.MatchString(method.String())) +} + +type C struct { + method *methodType + kind funcKind + testName string + _status funcStatus + logb *logger + logw io.Writer + done chan *C + reason string + mustFail bool + tempDir *tempDir + benchMem bool + startTime time.Time + timer +} + +func (c *C) status() funcStatus { + return funcStatus(atomic.LoadUint32((*uint32)(&c._status))) +} + +func (c *C) setStatus(s funcStatus) { + atomic.StoreUint32((*uint32)(&c._status), uint32(s)) +} + +func (c *C) stopNow() { + runtime.Goexit() +} + +// logger is a concurrency safe byte.Buffer +type logger struct { + sync.Mutex + writer bytes.Buffer +} + +func (l *logger) Write(buf []byte) (int, error) { + l.Lock() + defer l.Unlock() + return l.writer.Write(buf) +} + +func (l *logger) WriteTo(w io.Writer) (int64, error) { + l.Lock() + defer l.Unlock() + return l.writer.WriteTo(w) +} + +func (l *logger) String() string { + l.Lock() + defer l.Unlock() + return l.writer.String() +} + +// ----------------------------------------------------------------------- +// Handling of temporary files and directories. + +type tempDir struct { + sync.Mutex + path string + counter int +} + +func (td *tempDir) newPath() string { + td.Lock() + defer td.Unlock() + if td.path == "" { + var err error + for i := 0; i != 100; i++ { + path := fmt.Sprintf("%s%ccheck-%d", os.TempDir(), os.PathSeparator, rand.Int()) + if err = os.Mkdir(path, 0700); err == nil { + td.path = path + break + } + } + if td.path == "" { + panic("Couldn't create temporary directory: " + err.Error()) + } + } + result := filepath.Join(td.path, strconv.Itoa(td.counter)) + td.counter += 1 + return result +} + +func (td *tempDir) removeAll() { + td.Lock() + defer td.Unlock() + if td.path != "" { + err := os.RemoveAll(td.path) + if err != nil { + fmt.Fprintf(os.Stderr, "WARNING: Error cleaning up temporaries: "+err.Error()) + } + } +} + +// Create a new temporary directory which is automatically removed after +// the suite finishes running. +func (c *C) MkDir() string { + path := c.tempDir.newPath() + if err := os.Mkdir(path, 0700); err != nil { + panic(fmt.Sprintf("Couldn't create temporary directory %s: %s", path, err.Error())) + } + return path +} + +// ----------------------------------------------------------------------- +// Low-level logging functions. + +func (c *C) log(args ...interface{}) { + c.writeLog([]byte(fmt.Sprint(args...) + "\n")) +} + +func (c *C) logf(format string, args ...interface{}) { + c.writeLog([]byte(fmt.Sprintf(format+"\n", args...))) +} + +func (c *C) logNewLine() { + c.writeLog([]byte{'\n'}) +} + +func (c *C) writeLog(buf []byte) { + c.logb.Write(buf) + if c.logw != nil { + c.logw.Write(buf) + } +} + +func hasStringOrError(x interface{}) (ok bool) { + _, ok = x.(fmt.Stringer) + if ok { + return + } + _, ok = x.(error) + return +} + +func (c *C) logValue(label string, value interface{}) { + if label == "" { + if hasStringOrError(value) { + c.logf("... %#v (%q)", value, value) + } else { + c.logf("... %#v", value) + } + } else if value == nil { + c.logf("... %s = nil", label) + } else { + if hasStringOrError(value) { + fv := fmt.Sprintf("%#v", value) + qv := fmt.Sprintf("%q", value) + if fv != qv { + c.logf("... %s %s = %s (%s)", label, reflect.TypeOf(value), fv, qv) + return + } + } + if s, ok := value.(string); ok && isMultiLine(s) { + c.logf(`... %s %s = "" +`, label, reflect.TypeOf(value)) + c.logMultiLine(s) + } else { + c.logf("... %s %s = %#v", label, reflect.TypeOf(value), value) + } + } +} + +func (c *C) logMultiLine(s string) { + b := make([]byte, 0, len(s)*2) + i := 0 + n := len(s) + for i < n { + j := i + 1 + for j < n && s[j-1] != '\n' { + j++ + } + b = append(b, "... "...) + b = strconv.AppendQuote(b, s[i:j]) + if j < n { + b = append(b, " +"...) + } + b = append(b, '\n') + i = j + } + c.writeLog(b) +} + +func isMultiLine(s string) bool { + for i := 0; i+1 < len(s); i++ { + if s[i] == '\n' { + return true + } + } + return false +} + +func (c *C) logString(issue string) { + c.log("... ", issue) +} + +func (c *C) logCaller(skip int) { + // This is a bit heavier than it ought to be. + skip += 1 // Our own frame. + pc, callerFile, callerLine, ok := runtime.Caller(skip) + if !ok { + return + } + var testFile string + var testLine int + testFunc := runtime.FuncForPC(c.method.PC()) + if runtime.FuncForPC(pc) != testFunc { + for { + skip += 1 + if pc, file, line, ok := runtime.Caller(skip); ok { + // Note that the test line may be different on + // distinct calls for the same test. Showing + // the "internal" line is helpful when debugging. + if runtime.FuncForPC(pc) == testFunc { + testFile, testLine = file, line + break + } + } else { + break + } + } + } + if testFile != "" && (testFile != callerFile || testLine != callerLine) { + c.logCode(testFile, testLine) + } + c.logCode(callerFile, callerLine) +} + +func (c *C) logCode(path string, line int) { + c.logf("%s:%d:", nicePath(path), line) + code, err := printLine(path, line) + if code == "" { + code = "..." // XXX Open the file and take the raw line. + if err != nil { + code += err.Error() + } + } + c.log(indent(code, " ")) +} + +var valueGo = filepath.Join("reflect", "value.go") +var asmGo = filepath.Join("runtime", "asm_") + +func (c *C) logPanic(skip int, value interface{}) { + skip++ // Our own frame. + initialSkip := skip + for ; ; skip++ { + if pc, file, line, ok := runtime.Caller(skip); ok { + if skip == initialSkip { + c.logf("... Panic: %s (PC=0x%X)\n", value, pc) + } + name := niceFuncName(pc) + path := nicePath(file) + if strings.Contains(path, "/gopkg.in/check.v") { + continue + } + if name == "Value.call" && strings.HasSuffix(path, valueGo) { + continue + } + if (name == "call16" || name == "call32") && strings.Contains(path, asmGo) { + continue + } + c.logf("%s:%d\n in %s", nicePath(file), line, name) + } else { + break + } + } +} + +func (c *C) logSoftPanic(issue string) { + c.log("... Panic: ", issue) +} + +func (c *C) logArgPanic(method *methodType, expectedType string) { + c.logf("... Panic: %s argument should be %s", + niceFuncName(method.PC()), expectedType) +} + +// ----------------------------------------------------------------------- +// Some simple formatting helpers. + +var initWD, initWDErr = os.Getwd() + +func init() { + if initWDErr == nil { + initWD = strings.Replace(initWD, "\\", "/", -1) + "/" + } +} + +func nicePath(path string) string { + if initWDErr == nil { + if strings.HasPrefix(path, initWD) { + return path[len(initWD):] + } + } + return path +} + +func niceFuncPath(pc uintptr) string { + function := runtime.FuncForPC(pc) + if function != nil { + filename, line := function.FileLine(pc) + return fmt.Sprintf("%s:%d", nicePath(filename), line) + } + return "" +} + +func niceFuncName(pc uintptr) string { + function := runtime.FuncForPC(pc) + if function != nil { + name := path.Base(function.Name()) + if i := strings.Index(name, "."); i > 0 { + name = name[i+1:] + } + if strings.HasPrefix(name, "(*") { + if i := strings.Index(name, ")"); i > 0 { + name = name[2:i] + name[i+1:] + } + } + if i := strings.LastIndex(name, ".*"); i != -1 { + name = name[:i] + "." + name[i+2:] + } + if i := strings.LastIndex(name, "·"); i != -1 { + name = name[:i] + "." + name[i+2:] + } + return name + } + return "" +} + +// ----------------------------------------------------------------------- +// Result tracker to aggregate call results. + +type Result struct { + Succeeded int + Failed int + Skipped int + Panicked int + FixturePanicked int + ExpectedFailures int + Missed int // Not even tried to run, related to a panic in the fixture. + RunError error // Houston, we've got a problem. + WorkDir string // If KeepWorkDir is true +} + +type resultTracker struct { + result Result + _lastWasProblem bool + _waiting int + _missed int + _expectChan chan *C + _doneChan chan *C + _stopChan chan bool +} + +func newResultTracker() *resultTracker { + return &resultTracker{_expectChan: make(chan *C), // Synchronous + _doneChan: make(chan *C, 32), // Asynchronous + _stopChan: make(chan bool)} // Synchronous +} + +func (tracker *resultTracker) start() { + go tracker._loopRoutine() +} + +func (tracker *resultTracker) waitAndStop() { + <-tracker._stopChan +} + +func (tracker *resultTracker) expectCall(c *C) { + tracker._expectChan <- c +} + +func (tracker *resultTracker) callDone(c *C) { + tracker._doneChan <- c +} + +func (tracker *resultTracker) _loopRoutine() { + for { + var c *C + if tracker._waiting > 0 { + // Calls still running. Can't stop. + select { + // XXX Reindent this (not now to make diff clear) + case c = <-tracker._expectChan: + tracker._waiting += 1 + case c = <-tracker._doneChan: + tracker._waiting -= 1 + switch c.status() { + case succeededSt: + if c.kind == testKd { + if c.mustFail { + tracker.result.ExpectedFailures++ + } else { + tracker.result.Succeeded++ + } + } + case failedSt: + tracker.result.Failed++ + case panickedSt: + if c.kind == fixtureKd { + tracker.result.FixturePanicked++ + } else { + tracker.result.Panicked++ + } + case fixturePanickedSt: + // Track it as missed, since the panic + // was on the fixture, not on the test. + tracker.result.Missed++ + case missedSt: + tracker.result.Missed++ + case skippedSt: + if c.kind == testKd { + tracker.result.Skipped++ + } + } + } + } else { + // No calls. Can stop, but no done calls here. + select { + case tracker._stopChan <- true: + return + case c = <-tracker._expectChan: + tracker._waiting += 1 + case c = <-tracker._doneChan: + panic("Tracker got an unexpected done call.") + } + } + } +} + +// ----------------------------------------------------------------------- +// The underlying suite runner. + +type suiteRunner struct { + suite interface{} + setUpSuite, tearDownSuite *methodType + setUpTest, tearDownTest *methodType + tests []*methodType + tracker *resultTracker + tempDir *tempDir + keepDir bool + output *outputWriter + reportedProblemLast bool + benchTime time.Duration + benchMem bool +} + +type RunConf struct { + Output io.Writer + Stream bool + Verbose bool + Filter string + Benchmark bool + BenchmarkTime time.Duration // Defaults to 1 second + BenchmarkMem bool + KeepWorkDir bool +} + +// Create a new suiteRunner able to run all methods in the given suite. +func newSuiteRunner(suite interface{}, runConf *RunConf) *suiteRunner { + var conf RunConf + if runConf != nil { + conf = *runConf + } + if conf.Output == nil { + conf.Output = os.Stdout + } + if conf.Benchmark { + conf.Verbose = true + } + + suiteType := reflect.TypeOf(suite) + suiteNumMethods := suiteType.NumMethod() + suiteValue := reflect.ValueOf(suite) + + runner := &suiteRunner{ + suite: suite, + output: newOutputWriter(conf.Output, conf.Stream, conf.Verbose), + tracker: newResultTracker(), + benchTime: conf.BenchmarkTime, + benchMem: conf.BenchmarkMem, + tempDir: &tempDir{}, + keepDir: conf.KeepWorkDir, + tests: make([]*methodType, 0, suiteNumMethods), + } + if runner.benchTime == 0 { + runner.benchTime = 1 * time.Second + } + + var filterRegexp *regexp.Regexp + if conf.Filter != "" { + if regexp, err := regexp.Compile(conf.Filter); err != nil { + msg := "Bad filter expression: " + err.Error() + runner.tracker.result.RunError = errors.New(msg) + return runner + } else { + filterRegexp = regexp + } + } + + for i := 0; i != suiteNumMethods; i++ { + method := newMethod(suiteValue, i) + switch method.Info.Name { + case "SetUpSuite": + runner.setUpSuite = method + case "TearDownSuite": + runner.tearDownSuite = method + case "SetUpTest": + runner.setUpTest = method + case "TearDownTest": + runner.tearDownTest = method + default: + prefix := "Test" + if conf.Benchmark { + prefix = "Benchmark" + } + if !strings.HasPrefix(method.Info.Name, prefix) { + continue + } + if filterRegexp == nil || method.matches(filterRegexp) { + runner.tests = append(runner.tests, method) + } + } + } + return runner +} + +// Run all methods in the given suite. +func (runner *suiteRunner) run() *Result { + if runner.tracker.result.RunError == nil && len(runner.tests) > 0 { + runner.tracker.start() + if runner.checkFixtureArgs() { + c := runner.runFixture(runner.setUpSuite, "", nil) + if c == nil || c.status() == succeededSt { + for i := 0; i != len(runner.tests); i++ { + c := runner.runTest(runner.tests[i]) + if c.status() == fixturePanickedSt { + runner.skipTests(missedSt, runner.tests[i+1:]) + break + } + } + } else if c != nil && c.status() == skippedSt { + runner.skipTests(skippedSt, runner.tests) + } else { + runner.skipTests(missedSt, runner.tests) + } + runner.runFixture(runner.tearDownSuite, "", nil) + } else { + runner.skipTests(missedSt, runner.tests) + } + runner.tracker.waitAndStop() + if runner.keepDir { + runner.tracker.result.WorkDir = runner.tempDir.path + } else { + runner.tempDir.removeAll() + } + } + return &runner.tracker.result +} + +// Create a call object with the given suite method, and fork a +// goroutine with the provided dispatcher for running it. +func (runner *suiteRunner) forkCall(method *methodType, kind funcKind, testName string, logb *logger, dispatcher func(c *C)) *C { + var logw io.Writer + if runner.output.Stream { + logw = runner.output + } + if logb == nil { + logb = new(logger) + } + c := &C{ + method: method, + kind: kind, + testName: testName, + logb: logb, + logw: logw, + tempDir: runner.tempDir, + done: make(chan *C, 1), + timer: timer{benchTime: runner.benchTime}, + startTime: time.Now(), + benchMem: runner.benchMem, + } + runner.tracker.expectCall(c) + go (func() { + runner.reportCallStarted(c) + defer runner.callDone(c) + dispatcher(c) + })() + return c +} + +// Same as forkCall(), but wait for call to finish before returning. +func (runner *suiteRunner) runFunc(method *methodType, kind funcKind, testName string, logb *logger, dispatcher func(c *C)) *C { + c := runner.forkCall(method, kind, testName, logb, dispatcher) + <-c.done + return c +} + +// Handle a finished call. If there were any panics, update the call status +// accordingly. Then, mark the call as done and report to the tracker. +func (runner *suiteRunner) callDone(c *C) { + value := recover() + if value != nil { + switch v := value.(type) { + case *fixturePanic: + if v.status == skippedSt { + c.setStatus(skippedSt) + } else { + c.logSoftPanic("Fixture has panicked (see related PANIC)") + c.setStatus(fixturePanickedSt) + } + default: + c.logPanic(1, value) + c.setStatus(panickedSt) + } + } + if c.mustFail { + switch c.status() { + case failedSt: + c.setStatus(succeededSt) + case succeededSt: + c.setStatus(failedSt) + c.logString("Error: Test succeeded, but was expected to fail") + c.logString("Reason: " + c.reason) + } + } + + runner.reportCallDone(c) + c.done <- c +} + +// Runs a fixture call synchronously. The fixture will still be run in a +// goroutine like all suite methods, but this method will not return +// while the fixture goroutine is not done, because the fixture must be +// run in a desired order. +func (runner *suiteRunner) runFixture(method *methodType, testName string, logb *logger) *C { + if method != nil { + c := runner.runFunc(method, fixtureKd, testName, logb, func(c *C) { + c.ResetTimer() + c.StartTimer() + defer c.StopTimer() + c.method.Call([]reflect.Value{reflect.ValueOf(c)}) + }) + return c + } + return nil +} + +// Run the fixture method with runFixture(), but panic with a fixturePanic{} +// in case the fixture method panics. This makes it easier to track the +// fixture panic together with other call panics within forkTest(). +func (runner *suiteRunner) runFixtureWithPanic(method *methodType, testName string, logb *logger, skipped *bool) *C { + if skipped != nil && *skipped { + return nil + } + c := runner.runFixture(method, testName, logb) + if c != nil && c.status() != succeededSt { + if skipped != nil { + *skipped = c.status() == skippedSt + } + panic(&fixturePanic{c.status(), method}) + } + return c +} + +type fixturePanic struct { + status funcStatus + method *methodType +} + +// Run the suite test method, together with the test-specific fixture, +// asynchronously. +func (runner *suiteRunner) forkTest(method *methodType) *C { + testName := method.String() + return runner.forkCall(method, testKd, testName, nil, func(c *C) { + var skipped bool + defer runner.runFixtureWithPanic(runner.tearDownTest, testName, nil, &skipped) + defer c.StopTimer() + benchN := 1 + for { + runner.runFixtureWithPanic(runner.setUpTest, testName, c.logb, &skipped) + mt := c.method.Type() + if mt.NumIn() != 1 || mt.In(0) != reflect.TypeOf(c) { + // Rather than a plain panic, provide a more helpful message when + // the argument type is incorrect. + c.setStatus(panickedSt) + c.logArgPanic(c.method, "*check.C") + return + } + if strings.HasPrefix(c.method.Info.Name, "Test") { + c.ResetTimer() + c.StartTimer() + c.method.Call([]reflect.Value{reflect.ValueOf(c)}) + return + } + if !strings.HasPrefix(c.method.Info.Name, "Benchmark") { + panic("unexpected method prefix: " + c.method.Info.Name) + } + + runtime.GC() + c.N = benchN + c.ResetTimer() + c.StartTimer() + c.method.Call([]reflect.Value{reflect.ValueOf(c)}) + c.StopTimer() + if c.status() != succeededSt || c.duration >= c.benchTime || benchN >= 1e9 { + return + } + perOpN := int(1e9) + if c.nsPerOp() != 0 { + perOpN = int(c.benchTime.Nanoseconds() / c.nsPerOp()) + } + + // Logic taken from the stock testing package: + // - Run more iterations than we think we'll need for a second (1.5x). + // - Don't grow too fast in case we had timing errors previously. + // - Be sure to run at least one more than last time. + benchN = max(min(perOpN+perOpN/2, 100*benchN), benchN+1) + benchN = roundUp(benchN) + + skipped = true // Don't run the deferred one if this panics. + runner.runFixtureWithPanic(runner.tearDownTest, testName, nil, nil) + skipped = false + } + }) +} + +// Same as forkTest(), but wait for the test to finish before returning. +func (runner *suiteRunner) runTest(method *methodType) *C { + c := runner.forkTest(method) + <-c.done + return c +} + +// Helper to mark tests as skipped or missed. A bit heavy for what +// it does, but it enables homogeneous handling of tracking, including +// nice verbose output. +func (runner *suiteRunner) skipTests(status funcStatus, methods []*methodType) { + for _, method := range methods { + runner.runFunc(method, testKd, "", nil, func(c *C) { + c.setStatus(status) + }) + } +} + +// Verify if the fixture arguments are *check.C. In case of errors, +// log the error as a panic in the fixture method call, and return false. +func (runner *suiteRunner) checkFixtureArgs() bool { + succeeded := true + argType := reflect.TypeOf(&C{}) + for _, method := range []*methodType{runner.setUpSuite, runner.tearDownSuite, runner.setUpTest, runner.tearDownTest} { + if method != nil { + mt := method.Type() + if mt.NumIn() != 1 || mt.In(0) != argType { + succeeded = false + runner.runFunc(method, fixtureKd, "", nil, func(c *C) { + c.logArgPanic(method, "*check.C") + c.setStatus(panickedSt) + }) + } + } + } + return succeeded +} + +func (runner *suiteRunner) reportCallStarted(c *C) { + runner.output.WriteCallStarted("START", c) +} + +func (runner *suiteRunner) reportCallDone(c *C) { + runner.tracker.callDone(c) + switch c.status() { + case succeededSt: + if c.mustFail { + runner.output.WriteCallSuccess("FAIL EXPECTED", c) + } else { + runner.output.WriteCallSuccess("PASS", c) + } + case skippedSt: + runner.output.WriteCallSuccess("SKIP", c) + case failedSt: + runner.output.WriteCallProblem("FAIL", c) + case panickedSt: + runner.output.WriteCallProblem("PANIC", c) + case fixturePanickedSt: + // That's a testKd call reporting that its fixture + // has panicked. The fixture call which caused the + // panic itself was tracked above. We'll report to + // aid debugging. + runner.output.WriteCallProblem("PANIC", c) + case missedSt: + runner.output.WriteCallSuccess("MISS", c) + } +} diff --git a/vendor/gopkg.in/check.v1/check_test.go b/vendor/gopkg.in/check.v1/check_test.go new file mode 100644 index 0000000..871b325 --- /dev/null +++ b/vendor/gopkg.in/check.v1/check_test.go @@ -0,0 +1,207 @@ +// This file contains just a few generic helpers which are used by the +// other test files. + +package check_test + +import ( + "flag" + "fmt" + "os" + "regexp" + "runtime" + "testing" + "time" + + "gopkg.in/check.v1" +) + +// We count the number of suites run at least to get a vague hint that the +// test suite is behaving as it should. Otherwise a bug introduced at the +// very core of the system could go unperceived. +const suitesRunExpected = 8 + +var suitesRun int = 0 + +func Test(t *testing.T) { + check.TestingT(t) + if suitesRun != suitesRunExpected && flag.Lookup("check.f").Value.String() == "" { + critical(fmt.Sprintf("Expected %d suites to run rather than %d", + suitesRunExpected, suitesRun)) + } +} + +// ----------------------------------------------------------------------- +// Helper functions. + +// Break down badly. This is used in test cases which can't yet assume +// that the fundamental bits are working. +func critical(error string) { + fmt.Fprintln(os.Stderr, "CRITICAL: "+error) + os.Exit(1) +} + +// Return the file line where it's called. +func getMyLine() int { + if _, _, line, ok := runtime.Caller(1); ok { + return line + } + return -1 +} + +// ----------------------------------------------------------------------- +// Helper type implementing a basic io.Writer for testing output. + +// Type implementing the io.Writer interface for analyzing output. +type String struct { + value string +} + +// The only function required by the io.Writer interface. Will append +// written data to the String.value string. +func (s *String) Write(p []byte) (n int, err error) { + s.value += string(p) + return len(p), nil +} + +// Trivial wrapper to test errors happening on a different file +// than the test itself. +func checkEqualWrapper(c *check.C, obtained, expected interface{}) (result bool, line int) { + return c.Check(obtained, check.Equals, expected), getMyLine() +} + +// ----------------------------------------------------------------------- +// Helper suite for testing basic fail behavior. + +type FailHelper struct { + testLine int +} + +func (s *FailHelper) TestLogAndFail(c *check.C) { + s.testLine = getMyLine() - 1 + c.Log("Expected failure!") + c.Fail() +} + +// ----------------------------------------------------------------------- +// Helper suite for testing basic success behavior. + +type SuccessHelper struct{} + +func (s *SuccessHelper) TestLogAndSucceed(c *check.C) { + c.Log("Expected success!") +} + +// ----------------------------------------------------------------------- +// Helper suite for testing ordering and behavior of fixture. + +type FixtureHelper struct { + calls []string + panicOn string + skip bool + skipOnN int + sleepOn string + sleep time.Duration + bytes int64 +} + +func (s *FixtureHelper) trace(name string, c *check.C) { + s.calls = append(s.calls, name) + if name == s.panicOn { + panic(name) + } + if s.sleep > 0 && s.sleepOn == name { + time.Sleep(s.sleep) + } + if s.skip && s.skipOnN == len(s.calls)-1 { + c.Skip("skipOnN == n") + } +} + +func (s *FixtureHelper) SetUpSuite(c *check.C) { + s.trace("SetUpSuite", c) +} + +func (s *FixtureHelper) TearDownSuite(c *check.C) { + s.trace("TearDownSuite", c) +} + +func (s *FixtureHelper) SetUpTest(c *check.C) { + s.trace("SetUpTest", c) +} + +func (s *FixtureHelper) TearDownTest(c *check.C) { + s.trace("TearDownTest", c) +} + +func (s *FixtureHelper) Test1(c *check.C) { + s.trace("Test1", c) +} + +func (s *FixtureHelper) Test2(c *check.C) { + s.trace("Test2", c) +} + +func (s *FixtureHelper) Benchmark1(c *check.C) { + s.trace("Benchmark1", c) + for i := 0; i < c.N; i++ { + time.Sleep(s.sleep) + } +} + +func (s *FixtureHelper) Benchmark2(c *check.C) { + s.trace("Benchmark2", c) + c.SetBytes(1024) + for i := 0; i < c.N; i++ { + time.Sleep(s.sleep) + } +} + +func (s *FixtureHelper) Benchmark3(c *check.C) { + var x []int64 + s.trace("Benchmark3", c) + for i := 0; i < c.N; i++ { + time.Sleep(s.sleep) + x = make([]int64, 5) + _ = x + } +} + +// ----------------------------------------------------------------------- +// Helper which checks the state of the test and ensures that it matches +// the given expectations. Depends on c.Errorf() working, so shouldn't +// be used to test this one function. + +type expectedState struct { + name string + result interface{} + failed bool + log string +} + +// Verify the state of the test. Note that since this also verifies if +// the test is supposed to be in a failed state, no other checks should +// be done in addition to what is being tested. +func checkState(c *check.C, result interface{}, expected *expectedState) { + failed := c.Failed() + c.Succeed() + log := c.GetTestLog() + matched, matchError := regexp.MatchString("^"+expected.log+"$", log) + if matchError != nil { + c.Errorf("Error in matching expression used in testing %s", + expected.name) + } else if !matched { + c.Errorf("%s logged:\n----------\n%s----------\n\nExpected:\n----------\n%s\n----------", + expected.name, log, expected.log) + } + if result != expected.result { + c.Errorf("%s returned %#v rather than %#v", + expected.name, result, expected.result) + } + if failed != expected.failed { + if failed { + c.Errorf("%s has failed when it shouldn't", expected.name) + } else { + c.Errorf("%s has not failed when it should", expected.name) + } + } +} diff --git a/vendor/gopkg.in/check.v1/checkers.go b/vendor/gopkg.in/check.v1/checkers.go new file mode 100644 index 0000000..bac3387 --- /dev/null +++ b/vendor/gopkg.in/check.v1/checkers.go @@ -0,0 +1,458 @@ +package check + +import ( + "fmt" + "reflect" + "regexp" +) + +// ----------------------------------------------------------------------- +// CommentInterface and Commentf helper, to attach extra information to checks. + +type comment struct { + format string + args []interface{} +} + +// Commentf returns an infomational value to use with Assert or Check calls. +// If the checker test fails, the provided arguments will be passed to +// fmt.Sprintf, and will be presented next to the logged failure. +// +// For example: +// +// c.Assert(v, Equals, 42, Commentf("Iteration #%d failed.", i)) +// +// Note that if the comment is constant, a better option is to +// simply use a normal comment right above or next to the line, as +// it will also get printed with any errors: +// +// c.Assert(l, Equals, 8192) // Ensure buffer size is correct (bug #123) +// +func Commentf(format string, args ...interface{}) CommentInterface { + return &comment{format, args} +} + +// CommentInterface must be implemented by types that attach extra +// information to failed checks. See the Commentf function for details. +type CommentInterface interface { + CheckCommentString() string +} + +func (c *comment) CheckCommentString() string { + return fmt.Sprintf(c.format, c.args...) +} + +// ----------------------------------------------------------------------- +// The Checker interface. + +// The Checker interface must be provided by checkers used with +// the Assert and Check verification methods. +type Checker interface { + Info() *CheckerInfo + Check(params []interface{}, names []string) (result bool, error string) +} + +// See the Checker interface. +type CheckerInfo struct { + Name string + Params []string +} + +func (info *CheckerInfo) Info() *CheckerInfo { + return info +} + +// ----------------------------------------------------------------------- +// Not checker logic inverter. + +// The Not checker inverts the logic of the provided checker. The +// resulting checker will succeed where the original one failed, and +// vice-versa. +// +// For example: +// +// c.Assert(a, Not(Equals), b) +// +func Not(checker Checker) Checker { + return ¬Checker{checker} +} + +type notChecker struct { + sub Checker +} + +func (checker *notChecker) Info() *CheckerInfo { + info := *checker.sub.Info() + info.Name = "Not(" + info.Name + ")" + return &info +} + +func (checker *notChecker) Check(params []interface{}, names []string) (result bool, error string) { + result, error = checker.sub.Check(params, names) + result = !result + return +} + +// ----------------------------------------------------------------------- +// IsNil checker. + +type isNilChecker struct { + *CheckerInfo +} + +// The IsNil checker tests whether the obtained value is nil. +// +// For example: +// +// c.Assert(err, IsNil) +// +var IsNil Checker = &isNilChecker{ + &CheckerInfo{Name: "IsNil", Params: []string{"value"}}, +} + +func (checker *isNilChecker) Check(params []interface{}, names []string) (result bool, error string) { + return isNil(params[0]), "" +} + +func isNil(obtained interface{}) (result bool) { + if obtained == nil { + result = true + } else { + switch v := reflect.ValueOf(obtained); v.Kind() { + case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Ptr, reflect.Slice: + return v.IsNil() + } + } + return +} + +// ----------------------------------------------------------------------- +// NotNil checker. Alias for Not(IsNil), since it's so common. + +type notNilChecker struct { + *CheckerInfo +} + +// The NotNil checker verifies that the obtained value is not nil. +// +// For example: +// +// c.Assert(iface, NotNil) +// +// This is an alias for Not(IsNil), made available since it's a +// fairly common check. +// +var NotNil Checker = ¬NilChecker{ + &CheckerInfo{Name: "NotNil", Params: []string{"value"}}, +} + +func (checker *notNilChecker) Check(params []interface{}, names []string) (result bool, error string) { + return !isNil(params[0]), "" +} + +// ----------------------------------------------------------------------- +// Equals checker. + +type equalsChecker struct { + *CheckerInfo +} + +// The Equals checker verifies that the obtained value is equal to +// the expected value, according to usual Go semantics for ==. +// +// For example: +// +// c.Assert(value, Equals, 42) +// +var Equals Checker = &equalsChecker{ + &CheckerInfo{Name: "Equals", Params: []string{"obtained", "expected"}}, +} + +func (checker *equalsChecker) Check(params []interface{}, names []string) (result bool, error string) { + defer func() { + if v := recover(); v != nil { + result = false + error = fmt.Sprint(v) + } + }() + return params[0] == params[1], "" +} + +// ----------------------------------------------------------------------- +// DeepEquals checker. + +type deepEqualsChecker struct { + *CheckerInfo +} + +// The DeepEquals checker verifies that the obtained value is deep-equal to +// the expected value. The check will work correctly even when facing +// slices, interfaces, and values of different types (which always fail +// the test). +// +// For example: +// +// c.Assert(value, DeepEquals, 42) +// c.Assert(array, DeepEquals, []string{"hi", "there"}) +// +var DeepEquals Checker = &deepEqualsChecker{ + &CheckerInfo{Name: "DeepEquals", Params: []string{"obtained", "expected"}}, +} + +func (checker *deepEqualsChecker) Check(params []interface{}, names []string) (result bool, error string) { + return reflect.DeepEqual(params[0], params[1]), "" +} + +// ----------------------------------------------------------------------- +// HasLen checker. + +type hasLenChecker struct { + *CheckerInfo +} + +// The HasLen checker verifies that the obtained value has the +// provided length. In many cases this is superior to using Equals +// in conjuction with the len function because in case the check +// fails the value itself will be printed, instead of its length, +// providing more details for figuring the problem. +// +// For example: +// +// c.Assert(list, HasLen, 5) +// +var HasLen Checker = &hasLenChecker{ + &CheckerInfo{Name: "HasLen", Params: []string{"obtained", "n"}}, +} + +func (checker *hasLenChecker) Check(params []interface{}, names []string) (result bool, error string) { + n, ok := params[1].(int) + if !ok { + return false, "n must be an int" + } + value := reflect.ValueOf(params[0]) + switch value.Kind() { + case reflect.Map, reflect.Array, reflect.Slice, reflect.Chan, reflect.String: + default: + return false, "obtained value type has no length" + } + return value.Len() == n, "" +} + +// ----------------------------------------------------------------------- +// ErrorMatches checker. + +type errorMatchesChecker struct { + *CheckerInfo +} + +// The ErrorMatches checker verifies that the error value +// is non nil and matches the regular expression provided. +// +// For example: +// +// c.Assert(err, ErrorMatches, "perm.*denied") +// +var ErrorMatches Checker = errorMatchesChecker{ + &CheckerInfo{Name: "ErrorMatches", Params: []string{"value", "regex"}}, +} + +func (checker errorMatchesChecker) Check(params []interface{}, names []string) (result bool, errStr string) { + if params[0] == nil { + return false, "Error value is nil" + } + err, ok := params[0].(error) + if !ok { + return false, "Value is not an error" + } + params[0] = err.Error() + names[0] = "error" + return matches(params[0], params[1]) +} + +// ----------------------------------------------------------------------- +// Matches checker. + +type matchesChecker struct { + *CheckerInfo +} + +// The Matches checker verifies that the string provided as the obtained +// value (or the string resulting from obtained.String()) matches the +// regular expression provided. +// +// For example: +// +// c.Assert(err, Matches, "perm.*denied") +// +var Matches Checker = &matchesChecker{ + &CheckerInfo{Name: "Matches", Params: []string{"value", "regex"}}, +} + +func (checker *matchesChecker) Check(params []interface{}, names []string) (result bool, error string) { + return matches(params[0], params[1]) +} + +func matches(value, regex interface{}) (result bool, error string) { + reStr, ok := regex.(string) + if !ok { + return false, "Regex must be a string" + } + valueStr, valueIsStr := value.(string) + if !valueIsStr { + if valueWithStr, valueHasStr := value.(fmt.Stringer); valueHasStr { + valueStr, valueIsStr = valueWithStr.String(), true + } + } + if valueIsStr { + matches, err := regexp.MatchString("^"+reStr+"$", valueStr) + if err != nil { + return false, "Can't compile regex: " + err.Error() + } + return matches, "" + } + return false, "Obtained value is not a string and has no .String()" +} + +// ----------------------------------------------------------------------- +// Panics checker. + +type panicsChecker struct { + *CheckerInfo +} + +// The Panics checker verifies that calling the provided zero-argument +// function will cause a panic which is deep-equal to the provided value. +// +// For example: +// +// c.Assert(func() { f(1, 2) }, Panics, &SomeErrorType{"BOOM"}). +// +// +var Panics Checker = &panicsChecker{ + &CheckerInfo{Name: "Panics", Params: []string{"function", "expected"}}, +} + +func (checker *panicsChecker) Check(params []interface{}, names []string) (result bool, error string) { + f := reflect.ValueOf(params[0]) + if f.Kind() != reflect.Func || f.Type().NumIn() != 0 { + return false, "Function must take zero arguments" + } + defer func() { + // If the function has not panicked, then don't do the check. + if error != "" { + return + } + params[0] = recover() + names[0] = "panic" + result = reflect.DeepEqual(params[0], params[1]) + }() + f.Call(nil) + return false, "Function has not panicked" +} + +type panicMatchesChecker struct { + *CheckerInfo +} + +// The PanicMatches checker verifies that calling the provided zero-argument +// function will cause a panic with an error value matching +// the regular expression provided. +// +// For example: +// +// c.Assert(func() { f(1, 2) }, PanicMatches, `open.*: no such file or directory`). +// +// +var PanicMatches Checker = &panicMatchesChecker{ + &CheckerInfo{Name: "PanicMatches", Params: []string{"function", "expected"}}, +} + +func (checker *panicMatchesChecker) Check(params []interface{}, names []string) (result bool, errmsg string) { + f := reflect.ValueOf(params[0]) + if f.Kind() != reflect.Func || f.Type().NumIn() != 0 { + return false, "Function must take zero arguments" + } + defer func() { + // If the function has not panicked, then don't do the check. + if errmsg != "" { + return + } + obtained := recover() + names[0] = "panic" + if e, ok := obtained.(error); ok { + params[0] = e.Error() + } else if _, ok := obtained.(string); ok { + params[0] = obtained + } else { + errmsg = "Panic value is not a string or an error" + return + } + result, errmsg = matches(params[0], params[1]) + }() + f.Call(nil) + return false, "Function has not panicked" +} + +// ----------------------------------------------------------------------- +// FitsTypeOf checker. + +type fitsTypeChecker struct { + *CheckerInfo +} + +// The FitsTypeOf checker verifies that the obtained value is +// assignable to a variable with the same type as the provided +// sample value. +// +// For example: +// +// c.Assert(value, FitsTypeOf, int64(0)) +// c.Assert(value, FitsTypeOf, os.Error(nil)) +// +var FitsTypeOf Checker = &fitsTypeChecker{ + &CheckerInfo{Name: "FitsTypeOf", Params: []string{"obtained", "sample"}}, +} + +func (checker *fitsTypeChecker) Check(params []interface{}, names []string) (result bool, error string) { + obtained := reflect.ValueOf(params[0]) + sample := reflect.ValueOf(params[1]) + if !obtained.IsValid() { + return false, "" + } + if !sample.IsValid() { + return false, "Invalid sample value" + } + return obtained.Type().AssignableTo(sample.Type()), "" +} + +// ----------------------------------------------------------------------- +// Implements checker. + +type implementsChecker struct { + *CheckerInfo +} + +// The Implements checker verifies that the obtained value +// implements the interface specified via a pointer to an interface +// variable. +// +// For example: +// +// var e os.Error +// c.Assert(err, Implements, &e) +// +var Implements Checker = &implementsChecker{ + &CheckerInfo{Name: "Implements", Params: []string{"obtained", "ifaceptr"}}, +} + +func (checker *implementsChecker) Check(params []interface{}, names []string) (result bool, error string) { + obtained := reflect.ValueOf(params[0]) + ifaceptr := reflect.ValueOf(params[1]) + if !obtained.IsValid() { + return false, "" + } + if !ifaceptr.IsValid() || ifaceptr.Kind() != reflect.Ptr || ifaceptr.Elem().Kind() != reflect.Interface { + return false, "ifaceptr should be a pointer to an interface variable" + } + return obtained.Type().Implements(ifaceptr.Elem().Type()), "" +} diff --git a/vendor/gopkg.in/check.v1/checkers_test.go b/vendor/gopkg.in/check.v1/checkers_test.go new file mode 100644 index 0000000..5c69747 --- /dev/null +++ b/vendor/gopkg.in/check.v1/checkers_test.go @@ -0,0 +1,272 @@ +package check_test + +import ( + "errors" + "gopkg.in/check.v1" + "reflect" + "runtime" +) + +type CheckersS struct{} + +var _ = check.Suite(&CheckersS{}) + +func testInfo(c *check.C, checker check.Checker, name string, paramNames []string) { + info := checker.Info() + if info.Name != name { + c.Fatalf("Got name %s, expected %s", info.Name, name) + } + if !reflect.DeepEqual(info.Params, paramNames) { + c.Fatalf("Got param names %#v, expected %#v", info.Params, paramNames) + } +} + +func testCheck(c *check.C, checker check.Checker, result bool, error string, params ...interface{}) ([]interface{}, []string) { + info := checker.Info() + if len(params) != len(info.Params) { + c.Fatalf("unexpected param count in test; expected %d got %d", len(info.Params), len(params)) + } + names := append([]string{}, info.Params...) + result_, error_ := checker.Check(params, names) + if result_ != result || error_ != error { + c.Fatalf("%s.Check(%#v) returned (%#v, %#v) rather than (%#v, %#v)", + info.Name, params, result_, error_, result, error) + } + return params, names +} + +func (s *CheckersS) TestComment(c *check.C) { + bug := check.Commentf("a %d bc", 42) + comment := bug.CheckCommentString() + if comment != "a 42 bc" { + c.Fatalf("Commentf returned %#v", comment) + } +} + +func (s *CheckersS) TestIsNil(c *check.C) { + testInfo(c, check.IsNil, "IsNil", []string{"value"}) + + testCheck(c, check.IsNil, true, "", nil) + testCheck(c, check.IsNil, false, "", "a") + + testCheck(c, check.IsNil, true, "", (chan int)(nil)) + testCheck(c, check.IsNil, false, "", make(chan int)) + testCheck(c, check.IsNil, true, "", (error)(nil)) + testCheck(c, check.IsNil, false, "", errors.New("")) + testCheck(c, check.IsNil, true, "", ([]int)(nil)) + testCheck(c, check.IsNil, false, "", make([]int, 1)) + testCheck(c, check.IsNil, false, "", int(0)) +} + +func (s *CheckersS) TestNotNil(c *check.C) { + testInfo(c, check.NotNil, "NotNil", []string{"value"}) + + testCheck(c, check.NotNil, false, "", nil) + testCheck(c, check.NotNil, true, "", "a") + + testCheck(c, check.NotNil, false, "", (chan int)(nil)) + testCheck(c, check.NotNil, true, "", make(chan int)) + testCheck(c, check.NotNil, false, "", (error)(nil)) + testCheck(c, check.NotNil, true, "", errors.New("")) + testCheck(c, check.NotNil, false, "", ([]int)(nil)) + testCheck(c, check.NotNil, true, "", make([]int, 1)) +} + +func (s *CheckersS) TestNot(c *check.C) { + testInfo(c, check.Not(check.IsNil), "Not(IsNil)", []string{"value"}) + + testCheck(c, check.Not(check.IsNil), false, "", nil) + testCheck(c, check.Not(check.IsNil), true, "", "a") +} + +type simpleStruct struct { + i int +} + +func (s *CheckersS) TestEquals(c *check.C) { + testInfo(c, check.Equals, "Equals", []string{"obtained", "expected"}) + + // The simplest. + testCheck(c, check.Equals, true, "", 42, 42) + testCheck(c, check.Equals, false, "", 42, 43) + + // Different native types. + testCheck(c, check.Equals, false, "", int32(42), int64(42)) + + // With nil. + testCheck(c, check.Equals, false, "", 42, nil) + + // Slices + testCheck(c, check.Equals, false, "runtime error: comparing uncomparable type []uint8", []byte{1, 2}, []byte{1, 2}) + + // Struct values + testCheck(c, check.Equals, true, "", simpleStruct{1}, simpleStruct{1}) + testCheck(c, check.Equals, false, "", simpleStruct{1}, simpleStruct{2}) + + // Struct pointers + testCheck(c, check.Equals, false, "", &simpleStruct{1}, &simpleStruct{1}) + testCheck(c, check.Equals, false, "", &simpleStruct{1}, &simpleStruct{2}) +} + +func (s *CheckersS) TestDeepEquals(c *check.C) { + testInfo(c, check.DeepEquals, "DeepEquals", []string{"obtained", "expected"}) + + // The simplest. + testCheck(c, check.DeepEquals, true, "", 42, 42) + testCheck(c, check.DeepEquals, false, "", 42, 43) + + // Different native types. + testCheck(c, check.DeepEquals, false, "", int32(42), int64(42)) + + // With nil. + testCheck(c, check.DeepEquals, false, "", 42, nil) + + // Slices + testCheck(c, check.DeepEquals, true, "", []byte{1, 2}, []byte{1, 2}) + testCheck(c, check.DeepEquals, false, "", []byte{1, 2}, []byte{1, 3}) + + // Struct values + testCheck(c, check.DeepEquals, true, "", simpleStruct{1}, simpleStruct{1}) + testCheck(c, check.DeepEquals, false, "", simpleStruct{1}, simpleStruct{2}) + + // Struct pointers + testCheck(c, check.DeepEquals, true, "", &simpleStruct{1}, &simpleStruct{1}) + testCheck(c, check.DeepEquals, false, "", &simpleStruct{1}, &simpleStruct{2}) +} + +func (s *CheckersS) TestHasLen(c *check.C) { + testInfo(c, check.HasLen, "HasLen", []string{"obtained", "n"}) + + testCheck(c, check.HasLen, true, "", "abcd", 4) + testCheck(c, check.HasLen, true, "", []int{1, 2}, 2) + testCheck(c, check.HasLen, false, "", []int{1, 2}, 3) + + testCheck(c, check.HasLen, false, "n must be an int", []int{1, 2}, "2") + testCheck(c, check.HasLen, false, "obtained value type has no length", nil, 2) +} + +func (s *CheckersS) TestErrorMatches(c *check.C) { + testInfo(c, check.ErrorMatches, "ErrorMatches", []string{"value", "regex"}) + + testCheck(c, check.ErrorMatches, false, "Error value is nil", nil, "some error") + testCheck(c, check.ErrorMatches, false, "Value is not an error", 1, "some error") + testCheck(c, check.ErrorMatches, true, "", errors.New("some error"), "some error") + testCheck(c, check.ErrorMatches, true, "", errors.New("some error"), "so.*or") + + // Verify params mutation + params, names := testCheck(c, check.ErrorMatches, false, "", errors.New("some error"), "other error") + c.Assert(params[0], check.Equals, "some error") + c.Assert(names[0], check.Equals, "error") +} + +func (s *CheckersS) TestMatches(c *check.C) { + testInfo(c, check.Matches, "Matches", []string{"value", "regex"}) + + // Simple matching + testCheck(c, check.Matches, true, "", "abc", "abc") + testCheck(c, check.Matches, true, "", "abc", "a.c") + + // Must match fully + testCheck(c, check.Matches, false, "", "abc", "ab") + testCheck(c, check.Matches, false, "", "abc", "bc") + + // String()-enabled values accepted + testCheck(c, check.Matches, true, "", reflect.ValueOf("abc"), "a.c") + testCheck(c, check.Matches, false, "", reflect.ValueOf("abc"), "a.d") + + // Some error conditions. + testCheck(c, check.Matches, false, "Obtained value is not a string and has no .String()", 1, "a.c") + testCheck(c, check.Matches, false, "Can't compile regex: error parsing regexp: missing closing ]: `[c$`", "abc", "a[c") +} + +func (s *CheckersS) TestPanics(c *check.C) { + testInfo(c, check.Panics, "Panics", []string{"function", "expected"}) + + // Some errors. + testCheck(c, check.Panics, false, "Function has not panicked", func() bool { return false }, "BOOM") + testCheck(c, check.Panics, false, "Function must take zero arguments", 1, "BOOM") + + // Plain strings. + testCheck(c, check.Panics, true, "", func() { panic("BOOM") }, "BOOM") + testCheck(c, check.Panics, false, "", func() { panic("KABOOM") }, "BOOM") + testCheck(c, check.Panics, true, "", func() bool { panic("BOOM") }, "BOOM") + + // Error values. + testCheck(c, check.Panics, true, "", func() { panic(errors.New("BOOM")) }, errors.New("BOOM")) + testCheck(c, check.Panics, false, "", func() { panic(errors.New("KABOOM")) }, errors.New("BOOM")) + + type deep struct{ i int } + // Deep value + testCheck(c, check.Panics, true, "", func() { panic(&deep{99}) }, &deep{99}) + + // Verify params/names mutation + params, names := testCheck(c, check.Panics, false, "", func() { panic(errors.New("KABOOM")) }, errors.New("BOOM")) + c.Assert(params[0], check.ErrorMatches, "KABOOM") + c.Assert(names[0], check.Equals, "panic") + + // Verify a nil panic + testCheck(c, check.Panics, true, "", func() { panic(nil) }, nil) + testCheck(c, check.Panics, false, "", func() { panic(nil) }, "NOPE") +} + +func (s *CheckersS) TestPanicMatches(c *check.C) { + testInfo(c, check.PanicMatches, "PanicMatches", []string{"function", "expected"}) + + // Error matching. + testCheck(c, check.PanicMatches, true, "", func() { panic(errors.New("BOOM")) }, "BO.M") + testCheck(c, check.PanicMatches, false, "", func() { panic(errors.New("KABOOM")) }, "BO.M") + + // Some errors. + testCheck(c, check.PanicMatches, false, "Function has not panicked", func() bool { return false }, "BOOM") + testCheck(c, check.PanicMatches, false, "Function must take zero arguments", 1, "BOOM") + + // Plain strings. + testCheck(c, check.PanicMatches, true, "", func() { panic("BOOM") }, "BO.M") + testCheck(c, check.PanicMatches, false, "", func() { panic("KABOOM") }, "BOOM") + testCheck(c, check.PanicMatches, true, "", func() bool { panic("BOOM") }, "BO.M") + + // Verify params/names mutation + params, names := testCheck(c, check.PanicMatches, false, "", func() { panic(errors.New("KABOOM")) }, "BOOM") + c.Assert(params[0], check.Equals, "KABOOM") + c.Assert(names[0], check.Equals, "panic") + + // Verify a nil panic + testCheck(c, check.PanicMatches, false, "Panic value is not a string or an error", func() { panic(nil) }, "") +} + +func (s *CheckersS) TestFitsTypeOf(c *check.C) { + testInfo(c, check.FitsTypeOf, "FitsTypeOf", []string{"obtained", "sample"}) + + // Basic types + testCheck(c, check.FitsTypeOf, true, "", 1, 0) + testCheck(c, check.FitsTypeOf, false, "", 1, int64(0)) + + // Aliases + testCheck(c, check.FitsTypeOf, false, "", 1, errors.New("")) + testCheck(c, check.FitsTypeOf, false, "", "error", errors.New("")) + testCheck(c, check.FitsTypeOf, true, "", errors.New("error"), errors.New("")) + + // Structures + testCheck(c, check.FitsTypeOf, false, "", 1, simpleStruct{}) + testCheck(c, check.FitsTypeOf, false, "", simpleStruct{42}, &simpleStruct{}) + testCheck(c, check.FitsTypeOf, true, "", simpleStruct{42}, simpleStruct{}) + testCheck(c, check.FitsTypeOf, true, "", &simpleStruct{42}, &simpleStruct{}) + + // Some bad values + testCheck(c, check.FitsTypeOf, false, "Invalid sample value", 1, interface{}(nil)) + testCheck(c, check.FitsTypeOf, false, "", interface{}(nil), 0) +} + +func (s *CheckersS) TestImplements(c *check.C) { + testInfo(c, check.Implements, "Implements", []string{"obtained", "ifaceptr"}) + + var e error + var re runtime.Error + testCheck(c, check.Implements, true, "", errors.New(""), &e) + testCheck(c, check.Implements, false, "", errors.New(""), &re) + + // Some bad values + testCheck(c, check.Implements, false, "ifaceptr should be a pointer to an interface variable", 0, errors.New("")) + testCheck(c, check.Implements, false, "ifaceptr should be a pointer to an interface variable", 0, interface{}(nil)) + testCheck(c, check.Implements, false, "", interface{}(nil), &e) +} diff --git a/vendor/gopkg.in/check.v1/export_test.go b/vendor/gopkg.in/check.v1/export_test.go new file mode 100644 index 0000000..abb89a2 --- /dev/null +++ b/vendor/gopkg.in/check.v1/export_test.go @@ -0,0 +1,19 @@ +package check + +import "io" + +func PrintLine(filename string, line int) (string, error) { + return printLine(filename, line) +} + +func Indent(s, with string) string { + return indent(s, with) +} + +func NewOutputWriter(writer io.Writer, stream, verbose bool) *outputWriter { + return newOutputWriter(writer, stream, verbose) +} + +func (c *C) FakeSkip(reason string) { + c.reason = reason +} diff --git a/vendor/gopkg.in/check.v1/fixture_test.go b/vendor/gopkg.in/check.v1/fixture_test.go new file mode 100644 index 0000000..2bff9e1 --- /dev/null +++ b/vendor/gopkg.in/check.v1/fixture_test.go @@ -0,0 +1,484 @@ +// Tests for the behavior of the test fixture system. + +package check_test + +import ( + . "gopkg.in/check.v1" +) + +// ----------------------------------------------------------------------- +// Fixture test suite. + +type FixtureS struct{} + +var fixtureS = Suite(&FixtureS{}) + +func (s *FixtureS) TestCountSuite(c *C) { + suitesRun += 1 +} + +// ----------------------------------------------------------------------- +// Basic fixture ordering verification. + +func (s *FixtureS) TestOrder(c *C) { + helper := FixtureHelper{} + Run(&helper, nil) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "SetUpTest") + c.Check(helper.calls[2], Equals, "Test1") + c.Check(helper.calls[3], Equals, "TearDownTest") + c.Check(helper.calls[4], Equals, "SetUpTest") + c.Check(helper.calls[5], Equals, "Test2") + c.Check(helper.calls[6], Equals, "TearDownTest") + c.Check(helper.calls[7], Equals, "TearDownSuite") + c.Check(len(helper.calls), Equals, 8) +} + +// ----------------------------------------------------------------------- +// Check the behavior when panics occur within tests and fixtures. + +func (s *FixtureS) TestPanicOnTest(c *C) { + helper := FixtureHelper{panicOn: "Test1"} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "SetUpTest") + c.Check(helper.calls[2], Equals, "Test1") + c.Check(helper.calls[3], Equals, "TearDownTest") + c.Check(helper.calls[4], Equals, "SetUpTest") + c.Check(helper.calls[5], Equals, "Test2") + c.Check(helper.calls[6], Equals, "TearDownTest") + c.Check(helper.calls[7], Equals, "TearDownSuite") + c.Check(len(helper.calls), Equals, 8) + + expected := "^\n-+\n" + + "PANIC: check_test\\.go:[0-9]+: FixtureHelper.Test1\n\n" + + "\\.\\.\\. Panic: Test1 \\(PC=[xA-F0-9]+\\)\n\n" + + ".+:[0-9]+\n" + + " in (go)?panic\n" + + ".*check_test.go:[0-9]+\n" + + " in FixtureHelper.trace\n" + + ".*check_test.go:[0-9]+\n" + + " in FixtureHelper.Test1\n" + + "(.|\n)*$" + + c.Check(output.value, Matches, expected) +} + +func (s *FixtureS) TestPanicOnSetUpTest(c *C) { + helper := FixtureHelper{panicOn: "SetUpTest"} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "SetUpTest") + c.Check(helper.calls[2], Equals, "TearDownTest") + c.Check(helper.calls[3], Equals, "TearDownSuite") + c.Check(len(helper.calls), Equals, 4) + + expected := "^\n-+\n" + + "PANIC: check_test\\.go:[0-9]+: " + + "FixtureHelper\\.SetUpTest\n\n" + + "\\.\\.\\. Panic: SetUpTest \\(PC=[xA-F0-9]+\\)\n\n" + + ".+:[0-9]+\n" + + " in (go)?panic\n" + + ".*check_test.go:[0-9]+\n" + + " in FixtureHelper.trace\n" + + ".*check_test.go:[0-9]+\n" + + " in FixtureHelper.SetUpTest\n" + + "(.|\n)*" + + "\n-+\n" + + "PANIC: check_test\\.go:[0-9]+: " + + "FixtureHelper\\.Test1\n\n" + + "\\.\\.\\. Panic: Fixture has panicked " + + "\\(see related PANIC\\)\n$" + + c.Check(output.value, Matches, expected) +} + +func (s *FixtureS) TestPanicOnTearDownTest(c *C) { + helper := FixtureHelper{panicOn: "TearDownTest"} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "SetUpTest") + c.Check(helper.calls[2], Equals, "Test1") + c.Check(helper.calls[3], Equals, "TearDownTest") + c.Check(helper.calls[4], Equals, "TearDownSuite") + c.Check(len(helper.calls), Equals, 5) + + expected := "^\n-+\n" + + "PANIC: check_test\\.go:[0-9]+: " + + "FixtureHelper.TearDownTest\n\n" + + "\\.\\.\\. Panic: TearDownTest \\(PC=[xA-F0-9]+\\)\n\n" + + ".+:[0-9]+\n" + + " in (go)?panic\n" + + ".*check_test.go:[0-9]+\n" + + " in FixtureHelper.trace\n" + + ".*check_test.go:[0-9]+\n" + + " in FixtureHelper.TearDownTest\n" + + "(.|\n)*" + + "\n-+\n" + + "PANIC: check_test\\.go:[0-9]+: " + + "FixtureHelper\\.Test1\n\n" + + "\\.\\.\\. Panic: Fixture has panicked " + + "\\(see related PANIC\\)\n$" + + c.Check(output.value, Matches, expected) +} + +func (s *FixtureS) TestPanicOnSetUpSuite(c *C) { + helper := FixtureHelper{panicOn: "SetUpSuite"} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "TearDownSuite") + c.Check(len(helper.calls), Equals, 2) + + expected := "^\n-+\n" + + "PANIC: check_test\\.go:[0-9]+: " + + "FixtureHelper.SetUpSuite\n\n" + + "\\.\\.\\. Panic: SetUpSuite \\(PC=[xA-F0-9]+\\)\n\n" + + ".+:[0-9]+\n" + + " in (go)?panic\n" + + ".*check_test.go:[0-9]+\n" + + " in FixtureHelper.trace\n" + + ".*check_test.go:[0-9]+\n" + + " in FixtureHelper.SetUpSuite\n" + + "(.|\n)*$" + + c.Check(output.value, Matches, expected) +} + +func (s *FixtureS) TestPanicOnTearDownSuite(c *C) { + helper := FixtureHelper{panicOn: "TearDownSuite"} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "SetUpTest") + c.Check(helper.calls[2], Equals, "Test1") + c.Check(helper.calls[3], Equals, "TearDownTest") + c.Check(helper.calls[4], Equals, "SetUpTest") + c.Check(helper.calls[5], Equals, "Test2") + c.Check(helper.calls[6], Equals, "TearDownTest") + c.Check(helper.calls[7], Equals, "TearDownSuite") + c.Check(len(helper.calls), Equals, 8) + + expected := "^\n-+\n" + + "PANIC: check_test\\.go:[0-9]+: " + + "FixtureHelper.TearDownSuite\n\n" + + "\\.\\.\\. Panic: TearDownSuite \\(PC=[xA-F0-9]+\\)\n\n" + + ".+:[0-9]+\n" + + " in (go)?panic\n" + + ".*check_test.go:[0-9]+\n" + + " in FixtureHelper.trace\n" + + ".*check_test.go:[0-9]+\n" + + " in FixtureHelper.TearDownSuite\n" + + "(.|\n)*$" + + c.Check(output.value, Matches, expected) +} + +// ----------------------------------------------------------------------- +// A wrong argument on a test or fixture will produce a nice error. + +func (s *FixtureS) TestPanicOnWrongTestArg(c *C) { + helper := WrongTestArgHelper{} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "SetUpTest") + c.Check(helper.calls[2], Equals, "TearDownTest") + c.Check(helper.calls[3], Equals, "SetUpTest") + c.Check(helper.calls[4], Equals, "Test2") + c.Check(helper.calls[5], Equals, "TearDownTest") + c.Check(helper.calls[6], Equals, "TearDownSuite") + c.Check(len(helper.calls), Equals, 7) + + expected := "^\n-+\n" + + "PANIC: fixture_test\\.go:[0-9]+: " + + "WrongTestArgHelper\\.Test1\n\n" + + "\\.\\.\\. Panic: WrongTestArgHelper\\.Test1 argument " + + "should be \\*check\\.C\n" + + c.Check(output.value, Matches, expected) +} + +func (s *FixtureS) TestPanicOnWrongSetUpTestArg(c *C) { + helper := WrongSetUpTestArgHelper{} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Check(len(helper.calls), Equals, 0) + + expected := + "^\n-+\n" + + "PANIC: fixture_test\\.go:[0-9]+: " + + "WrongSetUpTestArgHelper\\.SetUpTest\n\n" + + "\\.\\.\\. Panic: WrongSetUpTestArgHelper\\.SetUpTest argument " + + "should be \\*check\\.C\n" + + c.Check(output.value, Matches, expected) +} + +func (s *FixtureS) TestPanicOnWrongSetUpSuiteArg(c *C) { + helper := WrongSetUpSuiteArgHelper{} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Check(len(helper.calls), Equals, 0) + + expected := + "^\n-+\n" + + "PANIC: fixture_test\\.go:[0-9]+: " + + "WrongSetUpSuiteArgHelper\\.SetUpSuite\n\n" + + "\\.\\.\\. Panic: WrongSetUpSuiteArgHelper\\.SetUpSuite argument " + + "should be \\*check\\.C\n" + + c.Check(output.value, Matches, expected) +} + +// ----------------------------------------------------------------------- +// Nice errors also when tests or fixture have wrong arg count. + +func (s *FixtureS) TestPanicOnWrongTestArgCount(c *C) { + helper := WrongTestArgCountHelper{} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "SetUpTest") + c.Check(helper.calls[2], Equals, "TearDownTest") + c.Check(helper.calls[3], Equals, "SetUpTest") + c.Check(helper.calls[4], Equals, "Test2") + c.Check(helper.calls[5], Equals, "TearDownTest") + c.Check(helper.calls[6], Equals, "TearDownSuite") + c.Check(len(helper.calls), Equals, 7) + + expected := "^\n-+\n" + + "PANIC: fixture_test\\.go:[0-9]+: " + + "WrongTestArgCountHelper\\.Test1\n\n" + + "\\.\\.\\. Panic: WrongTestArgCountHelper\\.Test1 argument " + + "should be \\*check\\.C\n" + + c.Check(output.value, Matches, expected) +} + +func (s *FixtureS) TestPanicOnWrongSetUpTestArgCount(c *C) { + helper := WrongSetUpTestArgCountHelper{} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Check(len(helper.calls), Equals, 0) + + expected := + "^\n-+\n" + + "PANIC: fixture_test\\.go:[0-9]+: " + + "WrongSetUpTestArgCountHelper\\.SetUpTest\n\n" + + "\\.\\.\\. Panic: WrongSetUpTestArgCountHelper\\.SetUpTest argument " + + "should be \\*check\\.C\n" + + c.Check(output.value, Matches, expected) +} + +func (s *FixtureS) TestPanicOnWrongSetUpSuiteArgCount(c *C) { + helper := WrongSetUpSuiteArgCountHelper{} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Check(len(helper.calls), Equals, 0) + + expected := + "^\n-+\n" + + "PANIC: fixture_test\\.go:[0-9]+: " + + "WrongSetUpSuiteArgCountHelper\\.SetUpSuite\n\n" + + "\\.\\.\\. Panic: WrongSetUpSuiteArgCountHelper" + + "\\.SetUpSuite argument should be \\*check\\.C\n" + + c.Check(output.value, Matches, expected) +} + +// ----------------------------------------------------------------------- +// Helper test suites with wrong function arguments. + +type WrongTestArgHelper struct { + FixtureHelper +} + +func (s *WrongTestArgHelper) Test1(t int) { +} + +type WrongSetUpTestArgHelper struct { + FixtureHelper +} + +func (s *WrongSetUpTestArgHelper) SetUpTest(t int) { +} + +type WrongSetUpSuiteArgHelper struct { + FixtureHelper +} + +func (s *WrongSetUpSuiteArgHelper) SetUpSuite(t int) { +} + +type WrongTestArgCountHelper struct { + FixtureHelper +} + +func (s *WrongTestArgCountHelper) Test1(c *C, i int) { +} + +type WrongSetUpTestArgCountHelper struct { + FixtureHelper +} + +func (s *WrongSetUpTestArgCountHelper) SetUpTest(c *C, i int) { +} + +type WrongSetUpSuiteArgCountHelper struct { + FixtureHelper +} + +func (s *WrongSetUpSuiteArgCountHelper) SetUpSuite(c *C, i int) { +} + +// ----------------------------------------------------------------------- +// Ensure fixture doesn't run without tests. + +type NoTestsHelper struct { + hasRun bool +} + +func (s *NoTestsHelper) SetUpSuite(c *C) { + s.hasRun = true +} + +func (s *NoTestsHelper) TearDownSuite(c *C) { + s.hasRun = true +} + +func (s *FixtureS) TestFixtureDoesntRunWithoutTests(c *C) { + helper := NoTestsHelper{} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Check(helper.hasRun, Equals, false) +} + +// ----------------------------------------------------------------------- +// Verify that checks and assertions work correctly inside the fixture. + +type FixtureCheckHelper struct { + fail string + completed bool +} + +func (s *FixtureCheckHelper) SetUpSuite(c *C) { + switch s.fail { + case "SetUpSuiteAssert": + c.Assert(false, Equals, true) + case "SetUpSuiteCheck": + c.Check(false, Equals, true) + } + s.completed = true +} + +func (s *FixtureCheckHelper) SetUpTest(c *C) { + switch s.fail { + case "SetUpTestAssert": + c.Assert(false, Equals, true) + case "SetUpTestCheck": + c.Check(false, Equals, true) + } + s.completed = true +} + +func (s *FixtureCheckHelper) Test(c *C) { + // Do nothing. +} + +func (s *FixtureS) TestSetUpSuiteCheck(c *C) { + helper := FixtureCheckHelper{fail: "SetUpSuiteCheck"} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Assert(output.value, Matches, + "\n---+\n"+ + "FAIL: fixture_test\\.go:[0-9]+: "+ + "FixtureCheckHelper\\.SetUpSuite\n\n"+ + "fixture_test\\.go:[0-9]+:\n"+ + " c\\.Check\\(false, Equals, true\\)\n"+ + "\\.+ obtained bool = false\n"+ + "\\.+ expected bool = true\n\n") + c.Assert(helper.completed, Equals, true) +} + +func (s *FixtureS) TestSetUpSuiteAssert(c *C) { + helper := FixtureCheckHelper{fail: "SetUpSuiteAssert"} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Assert(output.value, Matches, + "\n---+\n"+ + "FAIL: fixture_test\\.go:[0-9]+: "+ + "FixtureCheckHelper\\.SetUpSuite\n\n"+ + "fixture_test\\.go:[0-9]+:\n"+ + " c\\.Assert\\(false, Equals, true\\)\n"+ + "\\.+ obtained bool = false\n"+ + "\\.+ expected bool = true\n\n") + c.Assert(helper.completed, Equals, false) +} + +// ----------------------------------------------------------------------- +// Verify that logging within SetUpTest() persists within the test log itself. + +type FixtureLogHelper struct { + c *C +} + +func (s *FixtureLogHelper) SetUpTest(c *C) { + s.c = c + c.Log("1") +} + +func (s *FixtureLogHelper) Test(c *C) { + c.Log("2") + s.c.Log("3") + c.Log("4") + c.Fail() +} + +func (s *FixtureLogHelper) TearDownTest(c *C) { + s.c.Log("5") +} + +func (s *FixtureS) TestFixtureLogging(c *C) { + helper := FixtureLogHelper{} + output := String{} + Run(&helper, &RunConf{Output: &output}) + c.Assert(output.value, Matches, + "\n---+\n"+ + "FAIL: fixture_test\\.go:[0-9]+: "+ + "FixtureLogHelper\\.Test\n\n"+ + "1\n2\n3\n4\n5\n") +} + +// ----------------------------------------------------------------------- +// Skip() within fixture methods. + +func (s *FixtureS) TestSkipSuite(c *C) { + helper := FixtureHelper{skip: true, skipOnN: 0} + output := String{} + result := Run(&helper, &RunConf{Output: &output}) + c.Assert(output.value, Equals, "") + c.Assert(helper.calls[0], Equals, "SetUpSuite") + c.Assert(helper.calls[1], Equals, "TearDownSuite") + c.Assert(len(helper.calls), Equals, 2) + c.Assert(result.Skipped, Equals, 2) +} + +func (s *FixtureS) TestSkipTest(c *C) { + helper := FixtureHelper{skip: true, skipOnN: 1} + output := String{} + result := Run(&helper, &RunConf{Output: &output}) + c.Assert(helper.calls[0], Equals, "SetUpSuite") + c.Assert(helper.calls[1], Equals, "SetUpTest") + c.Assert(helper.calls[2], Equals, "SetUpTest") + c.Assert(helper.calls[3], Equals, "Test2") + c.Assert(helper.calls[4], Equals, "TearDownTest") + c.Assert(helper.calls[5], Equals, "TearDownSuite") + c.Assert(len(helper.calls), Equals, 6) + c.Assert(result.Skipped, Equals, 1) +} diff --git a/vendor/gopkg.in/check.v1/foundation_test.go b/vendor/gopkg.in/check.v1/foundation_test.go new file mode 100644 index 0000000..8ecf791 --- /dev/null +++ b/vendor/gopkg.in/check.v1/foundation_test.go @@ -0,0 +1,335 @@ +// These tests check that the foundations of gocheck are working properly. +// They already assume that fundamental failing is working already, though, +// since this was tested in bootstrap_test.go. Even then, some care may +// still have to be taken when using external functions, since they should +// of course not rely on functionality tested here. + +package check_test + +import ( + "fmt" + "gopkg.in/check.v1" + "log" + "os" + "regexp" + "strings" +) + +// ----------------------------------------------------------------------- +// Foundation test suite. + +type FoundationS struct{} + +var foundationS = check.Suite(&FoundationS{}) + +func (s *FoundationS) TestCountSuite(c *check.C) { + suitesRun += 1 +} + +func (s *FoundationS) TestErrorf(c *check.C) { + // Do not use checkState() here. It depends on Errorf() working. + expectedLog := fmt.Sprintf("foundation_test.go:%d:\n"+ + " c.Errorf(\"Error %%v!\", \"message\")\n"+ + "... Error: Error message!\n\n", + getMyLine()+1) + c.Errorf("Error %v!", "message") + failed := c.Failed() + c.Succeed() + if log := c.GetTestLog(); log != expectedLog { + c.Logf("Errorf() logged %#v rather than %#v", log, expectedLog) + c.Fail() + } + if !failed { + c.Logf("Errorf() didn't put the test in a failed state") + c.Fail() + } +} + +func (s *FoundationS) TestError(c *check.C) { + expectedLog := fmt.Sprintf("foundation_test.go:%d:\n"+ + " c\\.Error\\(\"Error \", \"message!\"\\)\n"+ + "\\.\\.\\. Error: Error message!\n\n", + getMyLine()+1) + c.Error("Error ", "message!") + checkState(c, nil, + &expectedState{ + name: "Error(`Error `, `message!`)", + failed: true, + log: expectedLog, + }) +} + +func (s *FoundationS) TestFailNow(c *check.C) { + defer (func() { + if !c.Failed() { + c.Error("FailNow() didn't fail the test") + } else { + c.Succeed() + if c.GetTestLog() != "" { + c.Error("Something got logged:\n" + c.GetTestLog()) + } + } + })() + + c.FailNow() + c.Log("FailNow() didn't stop the test") +} + +func (s *FoundationS) TestSucceedNow(c *check.C) { + defer (func() { + if c.Failed() { + c.Error("SucceedNow() didn't succeed the test") + } + if c.GetTestLog() != "" { + c.Error("Something got logged:\n" + c.GetTestLog()) + } + })() + + c.Fail() + c.SucceedNow() + c.Log("SucceedNow() didn't stop the test") +} + +func (s *FoundationS) TestFailureHeader(c *check.C) { + output := String{} + failHelper := FailHelper{} + check.Run(&failHelper, &check.RunConf{Output: &output}) + header := fmt.Sprintf(""+ + "\n-----------------------------------"+ + "-----------------------------------\n"+ + "FAIL: check_test.go:%d: FailHelper.TestLogAndFail\n", + failHelper.testLine) + if strings.Index(output.value, header) == -1 { + c.Errorf(""+ + "Failure didn't print a proper header.\n"+ + "... Got:\n%s... Expected something with:\n%s", + output.value, header) + } +} + +func (s *FoundationS) TestFatal(c *check.C) { + var line int + defer (func() { + if !c.Failed() { + c.Error("Fatal() didn't fail the test") + } else { + c.Succeed() + expected := fmt.Sprintf("foundation_test.go:%d:\n"+ + " c.Fatal(\"Die \", \"now!\")\n"+ + "... Error: Die now!\n\n", + line) + if c.GetTestLog() != expected { + c.Error("Incorrect log:", c.GetTestLog()) + } + } + })() + + line = getMyLine() + 1 + c.Fatal("Die ", "now!") + c.Log("Fatal() didn't stop the test") +} + +func (s *FoundationS) TestFatalf(c *check.C) { + var line int + defer (func() { + if !c.Failed() { + c.Error("Fatalf() didn't fail the test") + } else { + c.Succeed() + expected := fmt.Sprintf("foundation_test.go:%d:\n"+ + " c.Fatalf(\"Die %%s!\", \"now\")\n"+ + "... Error: Die now!\n\n", + line) + if c.GetTestLog() != expected { + c.Error("Incorrect log:", c.GetTestLog()) + } + } + })() + + line = getMyLine() + 1 + c.Fatalf("Die %s!", "now") + c.Log("Fatalf() didn't stop the test") +} + +func (s *FoundationS) TestCallerLoggingInsideTest(c *check.C) { + log := fmt.Sprintf(""+ + "foundation_test.go:%d:\n"+ + " result := c.Check\\(10, check.Equals, 20\\)\n"+ + "\\.\\.\\. obtained int = 10\n"+ + "\\.\\.\\. expected int = 20\n\n", + getMyLine()+1) + result := c.Check(10, check.Equals, 20) + checkState(c, result, + &expectedState{ + name: "Check(10, Equals, 20)", + result: false, + failed: true, + log: log, + }) +} + +func (s *FoundationS) TestCallerLoggingInDifferentFile(c *check.C) { + result, line := checkEqualWrapper(c, 10, 20) + testLine := getMyLine() - 1 + log := fmt.Sprintf(""+ + "foundation_test.go:%d:\n"+ + " result, line := checkEqualWrapper\\(c, 10, 20\\)\n"+ + "check_test.go:%d:\n"+ + " return c.Check\\(obtained, check.Equals, expected\\), getMyLine\\(\\)\n"+ + "\\.\\.\\. obtained int = 10\n"+ + "\\.\\.\\. expected int = 20\n\n", + testLine, line) + checkState(c, result, + &expectedState{ + name: "Check(10, Equals, 20)", + result: false, + failed: true, + log: log, + }) +} + +// ----------------------------------------------------------------------- +// ExpectFailure() inverts the logic of failure. + +type ExpectFailureSucceedHelper struct{} + +func (s *ExpectFailureSucceedHelper) TestSucceed(c *check.C) { + c.ExpectFailure("It booms!") + c.Error("Boom!") +} + +type ExpectFailureFailHelper struct{} + +func (s *ExpectFailureFailHelper) TestFail(c *check.C) { + c.ExpectFailure("Bug #XYZ") +} + +func (s *FoundationS) TestExpectFailureFail(c *check.C) { + helper := ExpectFailureFailHelper{} + output := String{} + result := check.Run(&helper, &check.RunConf{Output: &output}) + + expected := "" + + "^\n-+\n" + + "FAIL: foundation_test\\.go:[0-9]+:" + + " ExpectFailureFailHelper\\.TestFail\n\n" + + "\\.\\.\\. Error: Test succeeded, but was expected to fail\n" + + "\\.\\.\\. Reason: Bug #XYZ\n$" + + matched, err := regexp.MatchString(expected, output.value) + if err != nil { + c.Error("Bad expression: ", expected) + } else if !matched { + c.Error("ExpectFailure() didn't log properly:\n", output.value) + } + + c.Assert(result.ExpectedFailures, check.Equals, 0) +} + +func (s *FoundationS) TestExpectFailureSucceed(c *check.C) { + helper := ExpectFailureSucceedHelper{} + output := String{} + result := check.Run(&helper, &check.RunConf{Output: &output}) + + c.Assert(output.value, check.Equals, "") + c.Assert(result.ExpectedFailures, check.Equals, 1) +} + +func (s *FoundationS) TestExpectFailureSucceedVerbose(c *check.C) { + helper := ExpectFailureSucceedHelper{} + output := String{} + result := check.Run(&helper, &check.RunConf{Output: &output, Verbose: true}) + + expected := "" + + "FAIL EXPECTED: foundation_test\\.go:[0-9]+:" + + " ExpectFailureSucceedHelper\\.TestSucceed \\(It booms!\\)\t *[.0-9]+s\n" + + matched, err := regexp.MatchString(expected, output.value) + if err != nil { + c.Error("Bad expression: ", expected) + } else if !matched { + c.Error("ExpectFailure() didn't log properly:\n", output.value) + } + + c.Assert(result.ExpectedFailures, check.Equals, 1) +} + +// ----------------------------------------------------------------------- +// Skip() allows stopping a test without positive/negative results. + +type SkipTestHelper struct{} + +func (s *SkipTestHelper) TestFail(c *check.C) { + c.Skip("Wrong platform or whatever") + c.Error("Boom!") +} + +func (s *FoundationS) TestSkip(c *check.C) { + helper := SkipTestHelper{} + output := String{} + check.Run(&helper, &check.RunConf{Output: &output}) + + if output.value != "" { + c.Error("Skip() logged something:\n", output.value) + } +} + +func (s *FoundationS) TestSkipVerbose(c *check.C) { + helper := SkipTestHelper{} + output := String{} + check.Run(&helper, &check.RunConf{Output: &output, Verbose: true}) + + expected := "SKIP: foundation_test\\.go:[0-9]+: SkipTestHelper\\.TestFail" + + " \\(Wrong platform or whatever\\)" + matched, err := regexp.MatchString(expected, output.value) + if err != nil { + c.Error("Bad expression: ", expected) + } else if !matched { + c.Error("Skip() didn't log properly:\n", output.value) + } +} + +// ----------------------------------------------------------------------- +// Check minimum *log.Logger interface provided by *check.C. + +type minLogger interface { + Output(calldepth int, s string) error +} + +func (s *BootstrapS) TestMinLogger(c *check.C) { + var logger minLogger + logger = log.New(os.Stderr, "", 0) + logger = c + logger.Output(0, "Hello there") + expected := `\[LOG\] [0-9]+:[0-9][0-9]\.[0-9][0-9][0-9] +Hello there\n` + output := c.GetTestLog() + c.Assert(output, check.Matches, expected) +} + +// ----------------------------------------------------------------------- +// Ensure that suites with embedded types are working fine, including the +// the workaround for issue 906. + +type EmbeddedInternalS struct { + called bool +} + +type EmbeddedS struct { + EmbeddedInternalS +} + +var embeddedS = check.Suite(&EmbeddedS{}) + +func (s *EmbeddedS) TestCountSuite(c *check.C) { + suitesRun += 1 +} + +func (s *EmbeddedInternalS) TestMethod(c *check.C) { + c.Error("TestMethod() of the embedded type was called!?") +} + +func (s *EmbeddedS) TestMethod(c *check.C) { + // http://code.google.com/p/go/issues/detail?id=906 + c.Check(s.called, check.Equals, false) // Go issue 906 is affecting the runner? + s.called = true +} diff --git a/vendor/gopkg.in/check.v1/helpers.go b/vendor/gopkg.in/check.v1/helpers.go new file mode 100644 index 0000000..58a733b --- /dev/null +++ b/vendor/gopkg.in/check.v1/helpers.go @@ -0,0 +1,231 @@ +package check + +import ( + "fmt" + "strings" + "time" +) + +// TestName returns the current test name in the form "SuiteName.TestName" +func (c *C) TestName() string { + return c.testName +} + +// ----------------------------------------------------------------------- +// Basic succeeding/failing logic. + +// Failed returns whether the currently running test has already failed. +func (c *C) Failed() bool { + return c.status() == failedSt +} + +// Fail marks the currently running test as failed. +// +// Something ought to have been previously logged so the developer can tell +// what went wrong. The higher level helper functions will fail the test +// and do the logging properly. +func (c *C) Fail() { + c.setStatus(failedSt) +} + +// FailNow marks the currently running test as failed and stops running it. +// Something ought to have been previously logged so the developer can tell +// what went wrong. The higher level helper functions will fail the test +// and do the logging properly. +func (c *C) FailNow() { + c.Fail() + c.stopNow() +} + +// Succeed marks the currently running test as succeeded, undoing any +// previous failures. +func (c *C) Succeed() { + c.setStatus(succeededSt) +} + +// SucceedNow marks the currently running test as succeeded, undoing any +// previous failures, and stops running the test. +func (c *C) SucceedNow() { + c.Succeed() + c.stopNow() +} + +// ExpectFailure informs that the running test is knowingly broken for +// the provided reason. If the test does not fail, an error will be reported +// to raise attention to this fact. This method is useful to temporarily +// disable tests which cover well known problems until a better time to +// fix the problem is found, without forgetting about the fact that a +// failure still exists. +func (c *C) ExpectFailure(reason string) { + if reason == "" { + panic("Missing reason why the test is expected to fail") + } + c.mustFail = true + c.reason = reason +} + +// Skip skips the running test for the provided reason. If run from within +// SetUpTest, the individual test being set up will be skipped, and if run +// from within SetUpSuite, the whole suite is skipped. +func (c *C) Skip(reason string) { + if reason == "" { + panic("Missing reason why the test is being skipped") + } + c.reason = reason + c.setStatus(skippedSt) + c.stopNow() +} + +// ----------------------------------------------------------------------- +// Basic logging. + +// GetTestLog returns the current test error output. +func (c *C) GetTestLog() string { + return c.logb.String() +} + +// Log logs some information into the test error output. +// The provided arguments are assembled together into a string with fmt.Sprint. +func (c *C) Log(args ...interface{}) { + c.log(args...) +} + +// Log logs some information into the test error output. +// The provided arguments are assembled together into a string with fmt.Sprintf. +func (c *C) Logf(format string, args ...interface{}) { + c.logf(format, args...) +} + +// Output enables *C to be used as a logger in functions that require only +// the minimum interface of *log.Logger. +func (c *C) Output(calldepth int, s string) error { + d := time.Now().Sub(c.startTime) + msec := d / time.Millisecond + sec := d / time.Second + min := d / time.Minute + + c.Logf("[LOG] %d:%02d.%03d %s", min, sec%60, msec%1000, s) + return nil +} + +// Error logs an error into the test error output and marks the test as failed. +// The provided arguments are assembled together into a string with fmt.Sprint. +func (c *C) Error(args ...interface{}) { + c.logCaller(1) + c.logString(fmt.Sprint("Error: ", fmt.Sprint(args...))) + c.logNewLine() + c.Fail() +} + +// Errorf logs an error into the test error output and marks the test as failed. +// The provided arguments are assembled together into a string with fmt.Sprintf. +func (c *C) Errorf(format string, args ...interface{}) { + c.logCaller(1) + c.logString(fmt.Sprintf("Error: "+format, args...)) + c.logNewLine() + c.Fail() +} + +// Fatal logs an error into the test error output, marks the test as failed, and +// stops the test execution. The provided arguments are assembled together into +// a string with fmt.Sprint. +func (c *C) Fatal(args ...interface{}) { + c.logCaller(1) + c.logString(fmt.Sprint("Error: ", fmt.Sprint(args...))) + c.logNewLine() + c.FailNow() +} + +// Fatlaf logs an error into the test error output, marks the test as failed, and +// stops the test execution. The provided arguments are assembled together into +// a string with fmt.Sprintf. +func (c *C) Fatalf(format string, args ...interface{}) { + c.logCaller(1) + c.logString(fmt.Sprint("Error: ", fmt.Sprintf(format, args...))) + c.logNewLine() + c.FailNow() +} + +// ----------------------------------------------------------------------- +// Generic checks and assertions based on checkers. + +// Check verifies if the first value matches the expected value according +// to the provided checker. If they do not match, an error is logged, the +// test is marked as failed, and the test execution continues. +// +// Some checkers may not need the expected argument (e.g. IsNil). +// +// Extra arguments provided to the function are logged next to the reported +// problem when the matching fails. +func (c *C) Check(obtained interface{}, checker Checker, args ...interface{}) bool { + return c.internalCheck("Check", obtained, checker, args...) +} + +// Assert ensures that the first value matches the expected value according +// to the provided checker. If they do not match, an error is logged, the +// test is marked as failed, and the test execution stops. +// +// Some checkers may not need the expected argument (e.g. IsNil). +// +// Extra arguments provided to the function are logged next to the reported +// problem when the matching fails. +func (c *C) Assert(obtained interface{}, checker Checker, args ...interface{}) { + if !c.internalCheck("Assert", obtained, checker, args...) { + c.stopNow() + } +} + +func (c *C) internalCheck(funcName string, obtained interface{}, checker Checker, args ...interface{}) bool { + if checker == nil { + c.logCaller(2) + c.logString(fmt.Sprintf("%s(obtained, nil!?, ...):", funcName)) + c.logString("Oops.. you've provided a nil checker!") + c.logNewLine() + c.Fail() + return false + } + + // If the last argument is a bug info, extract it out. + var comment CommentInterface + if len(args) > 0 { + if c, ok := args[len(args)-1].(CommentInterface); ok { + comment = c + args = args[:len(args)-1] + } + } + + params := append([]interface{}{obtained}, args...) + info := checker.Info() + + if len(params) != len(info.Params) { + names := append([]string{info.Params[0], info.Name}, info.Params[1:]...) + c.logCaller(2) + c.logString(fmt.Sprintf("%s(%s):", funcName, strings.Join(names, ", "))) + c.logString(fmt.Sprintf("Wrong number of parameters for %s: want %d, got %d", info.Name, len(names), len(params)+1)) + c.logNewLine() + c.Fail() + return false + } + + // Copy since it may be mutated by Check. + names := append([]string{}, info.Params...) + + // Do the actual check. + result, error := checker.Check(params, names) + if !result || error != "" { + c.logCaller(2) + for i := 0; i != len(params); i++ { + c.logValue(names[i], params[i]) + } + if comment != nil { + c.logString(comment.CheckCommentString()) + } + if error != "" { + c.logString(error) + } + c.logNewLine() + c.Fail() + return false + } + return true +} diff --git a/vendor/gopkg.in/check.v1/helpers_test.go b/vendor/gopkg.in/check.v1/helpers_test.go new file mode 100644 index 0000000..4baa656 --- /dev/null +++ b/vendor/gopkg.in/check.v1/helpers_test.go @@ -0,0 +1,519 @@ +// These tests verify the inner workings of the helper methods associated +// with check.T. + +package check_test + +import ( + "gopkg.in/check.v1" + "os" + "reflect" + "runtime" + "sync" +) + +var helpersS = check.Suite(&HelpersS{}) + +type HelpersS struct{} + +func (s *HelpersS) TestCountSuite(c *check.C) { + suitesRun += 1 +} + +// ----------------------------------------------------------------------- +// Fake checker and bug info to verify the behavior of Assert() and Check(). + +type MyChecker struct { + info *check.CheckerInfo + params []interface{} + names []string + result bool + error string +} + +func (checker *MyChecker) Info() *check.CheckerInfo { + if checker.info == nil { + return &check.CheckerInfo{Name: "MyChecker", Params: []string{"myobtained", "myexpected"}} + } + return checker.info +} + +func (checker *MyChecker) Check(params []interface{}, names []string) (bool, string) { + rparams := checker.params + rnames := checker.names + checker.params = append([]interface{}{}, params...) + checker.names = append([]string{}, names...) + if rparams != nil { + copy(params, rparams) + } + if rnames != nil { + copy(names, rnames) + } + return checker.result, checker.error +} + +type myCommentType string + +func (c myCommentType) CheckCommentString() string { + return string(c) +} + +func myComment(s string) myCommentType { + return myCommentType(s) +} + +// ----------------------------------------------------------------------- +// Ensure a real checker actually works fine. + +func (s *HelpersS) TestCheckerInterface(c *check.C) { + testHelperSuccess(c, "Check(1, Equals, 1)", true, func() interface{} { + return c.Check(1, check.Equals, 1) + }) +} + +// ----------------------------------------------------------------------- +// Tests for Check(), mostly the same as for Assert() following these. + +func (s *HelpersS) TestCheckSucceedWithExpected(c *check.C) { + checker := &MyChecker{result: true} + testHelperSuccess(c, "Check(1, checker, 2)", true, func() interface{} { + return c.Check(1, checker, 2) + }) + if !reflect.DeepEqual(checker.params, []interface{}{1, 2}) { + c.Fatalf("Bad params for check: %#v", checker.params) + } +} + +func (s *HelpersS) TestCheckSucceedWithoutExpected(c *check.C) { + checker := &MyChecker{result: true, info: &check.CheckerInfo{Params: []string{"myvalue"}}} + testHelperSuccess(c, "Check(1, checker)", true, func() interface{} { + return c.Check(1, checker) + }) + if !reflect.DeepEqual(checker.params, []interface{}{1}) { + c.Fatalf("Bad params for check: %#v", checker.params) + } +} + +func (s *HelpersS) TestCheckFailWithExpected(c *check.C) { + checker := &MyChecker{result: false} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " return c\\.Check\\(1, checker, 2\\)\n" + + "\\.+ myobtained int = 1\n" + + "\\.+ myexpected int = 2\n\n" + testHelperFailure(c, "Check(1, checker, 2)", false, false, log, + func() interface{} { + return c.Check(1, checker, 2) + }) +} + +func (s *HelpersS) TestCheckFailWithExpectedAndComment(c *check.C) { + checker := &MyChecker{result: false} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " return c\\.Check\\(1, checker, 2, myComment\\(\"Hello world!\"\\)\\)\n" + + "\\.+ myobtained int = 1\n" + + "\\.+ myexpected int = 2\n" + + "\\.+ Hello world!\n\n" + testHelperFailure(c, "Check(1, checker, 2, msg)", false, false, log, + func() interface{} { + return c.Check(1, checker, 2, myComment("Hello world!")) + }) +} + +func (s *HelpersS) TestCheckFailWithExpectedAndStaticComment(c *check.C) { + checker := &MyChecker{result: false} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " // Nice leading comment\\.\n" + + " return c\\.Check\\(1, checker, 2\\) // Hello there\n" + + "\\.+ myobtained int = 1\n" + + "\\.+ myexpected int = 2\n\n" + testHelperFailure(c, "Check(1, checker, 2, msg)", false, false, log, + func() interface{} { + // Nice leading comment. + return c.Check(1, checker, 2) // Hello there + }) +} + +func (s *HelpersS) TestCheckFailWithoutExpected(c *check.C) { + checker := &MyChecker{result: false, info: &check.CheckerInfo{Params: []string{"myvalue"}}} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " return c\\.Check\\(1, checker\\)\n" + + "\\.+ myvalue int = 1\n\n" + testHelperFailure(c, "Check(1, checker)", false, false, log, + func() interface{} { + return c.Check(1, checker) + }) +} + +func (s *HelpersS) TestCheckFailWithoutExpectedAndMessage(c *check.C) { + checker := &MyChecker{result: false, info: &check.CheckerInfo{Params: []string{"myvalue"}}} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " return c\\.Check\\(1, checker, myComment\\(\"Hello world!\"\\)\\)\n" + + "\\.+ myvalue int = 1\n" + + "\\.+ Hello world!\n\n" + testHelperFailure(c, "Check(1, checker, msg)", false, false, log, + func() interface{} { + return c.Check(1, checker, myComment("Hello world!")) + }) +} + +func (s *HelpersS) TestCheckWithMissingExpected(c *check.C) { + checker := &MyChecker{result: true} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " return c\\.Check\\(1, checker\\)\n" + + "\\.+ Check\\(myobtained, MyChecker, myexpected\\):\n" + + "\\.+ Wrong number of parameters for MyChecker: " + + "want 3, got 2\n\n" + testHelperFailure(c, "Check(1, checker, !?)", false, false, log, + func() interface{} { + return c.Check(1, checker) + }) +} + +func (s *HelpersS) TestCheckWithTooManyExpected(c *check.C) { + checker := &MyChecker{result: true} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " return c\\.Check\\(1, checker, 2, 3\\)\n" + + "\\.+ Check\\(myobtained, MyChecker, myexpected\\):\n" + + "\\.+ Wrong number of parameters for MyChecker: " + + "want 3, got 4\n\n" + testHelperFailure(c, "Check(1, checker, 2, 3)", false, false, log, + func() interface{} { + return c.Check(1, checker, 2, 3) + }) +} + +func (s *HelpersS) TestCheckWithError(c *check.C) { + checker := &MyChecker{result: false, error: "Some not so cool data provided!"} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " return c\\.Check\\(1, checker, 2\\)\n" + + "\\.+ myobtained int = 1\n" + + "\\.+ myexpected int = 2\n" + + "\\.+ Some not so cool data provided!\n\n" + testHelperFailure(c, "Check(1, checker, 2)", false, false, log, + func() interface{} { + return c.Check(1, checker, 2) + }) +} + +func (s *HelpersS) TestCheckWithNilChecker(c *check.C) { + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " return c\\.Check\\(1, nil\\)\n" + + "\\.+ Check\\(obtained, nil!\\?, \\.\\.\\.\\):\n" + + "\\.+ Oops\\.\\. you've provided a nil checker!\n\n" + testHelperFailure(c, "Check(obtained, nil)", false, false, log, + func() interface{} { + return c.Check(1, nil) + }) +} + +func (s *HelpersS) TestCheckWithParamsAndNamesMutation(c *check.C) { + checker := &MyChecker{result: false, params: []interface{}{3, 4}, names: []string{"newobtained", "newexpected"}} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " return c\\.Check\\(1, checker, 2\\)\n" + + "\\.+ newobtained int = 3\n" + + "\\.+ newexpected int = 4\n\n" + testHelperFailure(c, "Check(1, checker, 2) with mutation", false, false, log, + func() interface{} { + return c.Check(1, checker, 2) + }) +} + +// ----------------------------------------------------------------------- +// Tests for Assert(), mostly the same as for Check() above. + +func (s *HelpersS) TestAssertSucceedWithExpected(c *check.C) { + checker := &MyChecker{result: true} + testHelperSuccess(c, "Assert(1, checker, 2)", nil, func() interface{} { + c.Assert(1, checker, 2) + return nil + }) + if !reflect.DeepEqual(checker.params, []interface{}{1, 2}) { + c.Fatalf("Bad params for check: %#v", checker.params) + } +} + +func (s *HelpersS) TestAssertSucceedWithoutExpected(c *check.C) { + checker := &MyChecker{result: true, info: &check.CheckerInfo{Params: []string{"myvalue"}}} + testHelperSuccess(c, "Assert(1, checker)", nil, func() interface{} { + c.Assert(1, checker) + return nil + }) + if !reflect.DeepEqual(checker.params, []interface{}{1}) { + c.Fatalf("Bad params for check: %#v", checker.params) + } +} + +func (s *HelpersS) TestAssertFailWithExpected(c *check.C) { + checker := &MyChecker{result: false} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " c\\.Assert\\(1, checker, 2\\)\n" + + "\\.+ myobtained int = 1\n" + + "\\.+ myexpected int = 2\n\n" + testHelperFailure(c, "Assert(1, checker, 2)", nil, true, log, + func() interface{} { + c.Assert(1, checker, 2) + return nil + }) +} + +func (s *HelpersS) TestAssertFailWithExpectedAndMessage(c *check.C) { + checker := &MyChecker{result: false} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " c\\.Assert\\(1, checker, 2, myComment\\(\"Hello world!\"\\)\\)\n" + + "\\.+ myobtained int = 1\n" + + "\\.+ myexpected int = 2\n" + + "\\.+ Hello world!\n\n" + testHelperFailure(c, "Assert(1, checker, 2, msg)", nil, true, log, + func() interface{} { + c.Assert(1, checker, 2, myComment("Hello world!")) + return nil + }) +} + +func (s *HelpersS) TestAssertFailWithoutExpected(c *check.C) { + checker := &MyChecker{result: false, info: &check.CheckerInfo{Params: []string{"myvalue"}}} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " c\\.Assert\\(1, checker\\)\n" + + "\\.+ myvalue int = 1\n\n" + testHelperFailure(c, "Assert(1, checker)", nil, true, log, + func() interface{} { + c.Assert(1, checker) + return nil + }) +} + +func (s *HelpersS) TestAssertFailWithoutExpectedAndMessage(c *check.C) { + checker := &MyChecker{result: false, info: &check.CheckerInfo{Params: []string{"myvalue"}}} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " c\\.Assert\\(1, checker, myComment\\(\"Hello world!\"\\)\\)\n" + + "\\.+ myvalue int = 1\n" + + "\\.+ Hello world!\n\n" + testHelperFailure(c, "Assert(1, checker, msg)", nil, true, log, + func() interface{} { + c.Assert(1, checker, myComment("Hello world!")) + return nil + }) +} + +func (s *HelpersS) TestAssertWithMissingExpected(c *check.C) { + checker := &MyChecker{result: true} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " c\\.Assert\\(1, checker\\)\n" + + "\\.+ Assert\\(myobtained, MyChecker, myexpected\\):\n" + + "\\.+ Wrong number of parameters for MyChecker: " + + "want 3, got 2\n\n" + testHelperFailure(c, "Assert(1, checker, !?)", nil, true, log, + func() interface{} { + c.Assert(1, checker) + return nil + }) +} + +func (s *HelpersS) TestAssertWithError(c *check.C) { + checker := &MyChecker{result: false, error: "Some not so cool data provided!"} + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " c\\.Assert\\(1, checker, 2\\)\n" + + "\\.+ myobtained int = 1\n" + + "\\.+ myexpected int = 2\n" + + "\\.+ Some not so cool data provided!\n\n" + testHelperFailure(c, "Assert(1, checker, 2)", nil, true, log, + func() interface{} { + c.Assert(1, checker, 2) + return nil + }) +} + +func (s *HelpersS) TestAssertWithNilChecker(c *check.C) { + log := "(?s)helpers_test\\.go:[0-9]+:.*\nhelpers_test\\.go:[0-9]+:\n" + + " c\\.Assert\\(1, nil\\)\n" + + "\\.+ Assert\\(obtained, nil!\\?, \\.\\.\\.\\):\n" + + "\\.+ Oops\\.\\. you've provided a nil checker!\n\n" + testHelperFailure(c, "Assert(obtained, nil)", nil, true, log, + func() interface{} { + c.Assert(1, nil) + return nil + }) +} + +// ----------------------------------------------------------------------- +// Ensure that values logged work properly in some interesting cases. + +func (s *HelpersS) TestValueLoggingWithArrays(c *check.C) { + checker := &MyChecker{result: false} + log := "(?s)helpers_test.go:[0-9]+:.*\nhelpers_test.go:[0-9]+:\n" + + " return c\\.Check\\(\\[\\]byte{1, 2}, checker, \\[\\]byte{1, 3}\\)\n" + + "\\.+ myobtained \\[\\]uint8 = \\[\\]byte{0x1, 0x2}\n" + + "\\.+ myexpected \\[\\]uint8 = \\[\\]byte{0x1, 0x3}\n\n" + testHelperFailure(c, "Check([]byte{1}, chk, []byte{3})", false, false, log, + func() interface{} { + return c.Check([]byte{1, 2}, checker, []byte{1, 3}) + }) +} + +func (s *HelpersS) TestValueLoggingWithMultiLine(c *check.C) { + checker := &MyChecker{result: false} + log := "(?s)helpers_test.go:[0-9]+:.*\nhelpers_test.go:[0-9]+:\n" + + " return c\\.Check\\(\"a\\\\nb\\\\n\", checker, \"a\\\\nb\\\\nc\"\\)\n" + + "\\.+ myobtained string = \"\" \\+\n" + + "\\.+ \"a\\\\n\" \\+\n" + + "\\.+ \"b\\\\n\"\n" + + "\\.+ myexpected string = \"\" \\+\n" + + "\\.+ \"a\\\\n\" \\+\n" + + "\\.+ \"b\\\\n\" \\+\n" + + "\\.+ \"c\"\n\n" + testHelperFailure(c, `Check("a\nb\n", chk, "a\nb\nc")`, false, false, log, + func() interface{} { + return c.Check("a\nb\n", checker, "a\nb\nc") + }) +} + +func (s *HelpersS) TestValueLoggingWithMultiLineException(c *check.C) { + // If the newline is at the end of the string, don't log as multi-line. + checker := &MyChecker{result: false} + log := "(?s)helpers_test.go:[0-9]+:.*\nhelpers_test.go:[0-9]+:\n" + + " return c\\.Check\\(\"a b\\\\n\", checker, \"a\\\\nb\"\\)\n" + + "\\.+ myobtained string = \"a b\\\\n\"\n" + + "\\.+ myexpected string = \"\" \\+\n" + + "\\.+ \"a\\\\n\" \\+\n" + + "\\.+ \"b\"\n\n" + testHelperFailure(c, `Check("a b\n", chk, "a\nb")`, false, false, log, + func() interface{} { + return c.Check("a b\n", checker, "a\nb") + }) +} + +// ----------------------------------------------------------------------- +// MakeDir() tests. + +type MkDirHelper struct { + path1 string + path2 string + isDir1 bool + isDir2 bool + isDir3 bool + isDir4 bool +} + +func (s *MkDirHelper) SetUpSuite(c *check.C) { + s.path1 = c.MkDir() + s.isDir1 = isDir(s.path1) +} + +func (s *MkDirHelper) Test(c *check.C) { + s.path2 = c.MkDir() + s.isDir2 = isDir(s.path2) +} + +func (s *MkDirHelper) TearDownSuite(c *check.C) { + s.isDir3 = isDir(s.path1) + s.isDir4 = isDir(s.path2) +} + +func (s *HelpersS) TestMkDir(c *check.C) { + helper := MkDirHelper{} + output := String{} + check.Run(&helper, &check.RunConf{Output: &output}) + c.Assert(output.value, check.Equals, "") + c.Check(helper.isDir1, check.Equals, true) + c.Check(helper.isDir2, check.Equals, true) + c.Check(helper.isDir3, check.Equals, true) + c.Check(helper.isDir4, check.Equals, true) + c.Check(helper.path1, check.Not(check.Equals), + helper.path2) + c.Check(isDir(helper.path1), check.Equals, false) + c.Check(isDir(helper.path2), check.Equals, false) +} + +func isDir(path string) bool { + if stat, err := os.Stat(path); err == nil { + return stat.IsDir() + } + return false +} + +// Concurrent logging should not corrupt the underling buffer. +// Use go test -race to detect the race in this test. +func (s *HelpersS) TestConcurrentLogging(c *check.C) { + defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(runtime.NumCPU())) + var start, stop sync.WaitGroup + start.Add(1) + for i, n := 0, runtime.NumCPU()*2; i < n; i++ { + stop.Add(1) + go func(i int) { + start.Wait() + for j := 0; j < 30; j++ { + c.Logf("Worker %d: line %d", i, j) + } + stop.Done() + }(i) + } + start.Done() + stop.Wait() +} + +// ----------------------------------------------------------------------- +// Test the TestName function + +type TestNameHelper struct { + name1 string + name2 string + name3 string + name4 string + name5 string +} + +func (s *TestNameHelper) SetUpSuite(c *check.C) { s.name1 = c.TestName() } +func (s *TestNameHelper) SetUpTest(c *check.C) { s.name2 = c.TestName() } +func (s *TestNameHelper) Test(c *check.C) { s.name3 = c.TestName() } +func (s *TestNameHelper) TearDownTest(c *check.C) { s.name4 = c.TestName() } +func (s *TestNameHelper) TearDownSuite(c *check.C) { s.name5 = c.TestName() } + +func (s *HelpersS) TestTestName(c *check.C) { + helper := TestNameHelper{} + output := String{} + check.Run(&helper, &check.RunConf{Output: &output}) + c.Check(helper.name1, check.Equals, "") + c.Check(helper.name2, check.Equals, "TestNameHelper.Test") + c.Check(helper.name3, check.Equals, "TestNameHelper.Test") + c.Check(helper.name4, check.Equals, "TestNameHelper.Test") + c.Check(helper.name5, check.Equals, "") +} + +// ----------------------------------------------------------------------- +// A couple of helper functions to test helper functions. :-) + +func testHelperSuccess(c *check.C, name string, expectedResult interface{}, closure func() interface{}) { + var result interface{} + defer (func() { + if err := recover(); err != nil { + panic(err) + } + checkState(c, result, + &expectedState{ + name: name, + result: expectedResult, + failed: false, + log: "", + }) + })() + result = closure() +} + +func testHelperFailure(c *check.C, name string, expectedResult interface{}, shouldStop bool, log string, closure func() interface{}) { + var result interface{} + defer (func() { + if err := recover(); err != nil { + panic(err) + } + checkState(c, result, + &expectedState{ + name: name, + result: expectedResult, + failed: true, + log: log, + }) + })() + result = closure() + if shouldStop { + c.Logf("%s didn't stop when it should", name) + } +} diff --git a/vendor/gopkg.in/check.v1/printer.go b/vendor/gopkg.in/check.v1/printer.go new file mode 100644 index 0000000..e0f7557 --- /dev/null +++ b/vendor/gopkg.in/check.v1/printer.go @@ -0,0 +1,168 @@ +package check + +import ( + "bytes" + "go/ast" + "go/parser" + "go/printer" + "go/token" + "os" +) + +func indent(s, with string) (r string) { + eol := true + for i := 0; i != len(s); i++ { + c := s[i] + switch { + case eol && c == '\n' || c == '\r': + case c == '\n' || c == '\r': + eol = true + case eol: + eol = false + s = s[:i] + with + s[i:] + i += len(with) + } + } + return s +} + +func printLine(filename string, line int) (string, error) { + fset := token.NewFileSet() + file, err := os.Open(filename) + if err != nil { + return "", err + } + fnode, err := parser.ParseFile(fset, filename, file, parser.ParseComments) + if err != nil { + return "", err + } + config := &printer.Config{Mode: printer.UseSpaces, Tabwidth: 4} + lp := &linePrinter{fset: fset, fnode: fnode, line: line, config: config} + ast.Walk(lp, fnode) + result := lp.output.Bytes() + // Comments leave \n at the end. + n := len(result) + for n > 0 && result[n-1] == '\n' { + n-- + } + return string(result[:n]), nil +} + +type linePrinter struct { + config *printer.Config + fset *token.FileSet + fnode *ast.File + line int + output bytes.Buffer + stmt ast.Stmt +} + +func (lp *linePrinter) emit() bool { + if lp.stmt != nil { + lp.trim(lp.stmt) + lp.printWithComments(lp.stmt) + lp.stmt = nil + return true + } + return false +} + +func (lp *linePrinter) printWithComments(n ast.Node) { + nfirst := lp.fset.Position(n.Pos()).Line + nlast := lp.fset.Position(n.End()).Line + for _, g := range lp.fnode.Comments { + cfirst := lp.fset.Position(g.Pos()).Line + clast := lp.fset.Position(g.End()).Line + if clast == nfirst-1 && lp.fset.Position(n.Pos()).Column == lp.fset.Position(g.Pos()).Column { + for _, c := range g.List { + lp.output.WriteString(c.Text) + lp.output.WriteByte('\n') + } + } + if cfirst >= nfirst && cfirst <= nlast && n.End() <= g.List[0].Slash { + // The printer will not include the comment if it starts past + // the node itself. Trick it into printing by overlapping the + // slash with the end of the statement. + g.List[0].Slash = n.End() - 1 + } + } + node := &printer.CommentedNode{n, lp.fnode.Comments} + lp.config.Fprint(&lp.output, lp.fset, node) +} + +func (lp *linePrinter) Visit(n ast.Node) (w ast.Visitor) { + if n == nil { + if lp.output.Len() == 0 { + lp.emit() + } + return nil + } + first := lp.fset.Position(n.Pos()).Line + last := lp.fset.Position(n.End()).Line + if first <= lp.line && last >= lp.line { + // Print the innermost statement containing the line. + if stmt, ok := n.(ast.Stmt); ok { + if _, ok := n.(*ast.BlockStmt); !ok { + lp.stmt = stmt + } + } + if first == lp.line && lp.emit() { + return nil + } + return lp + } + return nil +} + +func (lp *linePrinter) trim(n ast.Node) bool { + stmt, ok := n.(ast.Stmt) + if !ok { + return true + } + line := lp.fset.Position(n.Pos()).Line + if line != lp.line { + return false + } + switch stmt := stmt.(type) { + case *ast.IfStmt: + stmt.Body = lp.trimBlock(stmt.Body) + case *ast.SwitchStmt: + stmt.Body = lp.trimBlock(stmt.Body) + case *ast.TypeSwitchStmt: + stmt.Body = lp.trimBlock(stmt.Body) + case *ast.CaseClause: + stmt.Body = lp.trimList(stmt.Body) + case *ast.CommClause: + stmt.Body = lp.trimList(stmt.Body) + case *ast.BlockStmt: + stmt.List = lp.trimList(stmt.List) + } + return true +} + +func (lp *linePrinter) trimBlock(stmt *ast.BlockStmt) *ast.BlockStmt { + if !lp.trim(stmt) { + return lp.emptyBlock(stmt) + } + stmt.Rbrace = stmt.Lbrace + return stmt +} + +func (lp *linePrinter) trimList(stmts []ast.Stmt) []ast.Stmt { + for i := 0; i != len(stmts); i++ { + if !lp.trim(stmts[i]) { + stmts[i] = lp.emptyStmt(stmts[i]) + break + } + } + return stmts +} + +func (lp *linePrinter) emptyStmt(n ast.Node) *ast.ExprStmt { + return &ast.ExprStmt{&ast.Ellipsis{n.Pos(), nil}} +} + +func (lp *linePrinter) emptyBlock(n ast.Node) *ast.BlockStmt { + p := n.Pos() + return &ast.BlockStmt{p, []ast.Stmt{lp.emptyStmt(n)}, p} +} diff --git a/vendor/gopkg.in/check.v1/printer_test.go b/vendor/gopkg.in/check.v1/printer_test.go new file mode 100644 index 0000000..538b2d5 --- /dev/null +++ b/vendor/gopkg.in/check.v1/printer_test.go @@ -0,0 +1,104 @@ +package check_test + +import ( + . "gopkg.in/check.v1" +) + +var _ = Suite(&PrinterS{}) + +type PrinterS struct{} + +func (s *PrinterS) TestCountSuite(c *C) { + suitesRun += 1 +} + +var printTestFuncLine int + +func init() { + printTestFuncLine = getMyLine() + 3 +} + +func printTestFunc() { + println(1) // Comment1 + if 2 == 2 { // Comment2 + println(3) // Comment3 + } + switch 5 { + case 6: println(6) // Comment6 + println(7) + } + switch interface{}(9).(type) {// Comment9 + case int: println(10) + println(11) + } + select { + case <-(chan bool)(nil): println(14) + println(15) + default: println(16) + println(17) + } + println(19, + 20) + _ = func() { println(21) + println(22) + } + println(24, func() { + println(25) + }) + // Leading comment + // with multiple lines. + println(29) // Comment29 +} + +var printLineTests = []struct { + line int + output string +}{ + {1, "println(1) // Comment1"}, + {2, "if 2 == 2 { // Comment2\n ...\n}"}, + {3, "println(3) // Comment3"}, + {5, "switch 5 {\n...\n}"}, + {6, "case 6:\n println(6) // Comment6\n ..."}, + {7, "println(7)"}, + {9, "switch interface{}(9).(type) { // Comment9\n...\n}"}, + {10, "case int:\n println(10)\n ..."}, + {14, "case <-(chan bool)(nil):\n println(14)\n ..."}, + {15, "println(15)"}, + {16, "default:\n println(16)\n ..."}, + {17, "println(17)"}, + {19, "println(19,\n 20)"}, + {20, "println(19,\n 20)"}, + {21, "_ = func() {\n println(21)\n println(22)\n}"}, + {22, "println(22)"}, + {24, "println(24, func() {\n println(25)\n})"}, + {25, "println(25)"}, + {26, "println(24, func() {\n println(25)\n})"}, + {29, "// Leading comment\n// with multiple lines.\nprintln(29) // Comment29"}, +} + +func (s *PrinterS) TestPrintLine(c *C) { + for _, test := range printLineTests { + output, err := PrintLine("printer_test.go", printTestFuncLine+test.line) + c.Assert(err, IsNil) + c.Assert(output, Equals, test.output) + } +} + +var indentTests = []struct { + in, out string +}{ + {"", ""}, + {"\n", "\n"}, + {"a", ">>>a"}, + {"a\n", ">>>a\n"}, + {"a\nb", ">>>a\n>>>b"}, + {" ", ">>> "}, +} + +func (s *PrinterS) TestIndent(c *C) { + for _, test := range indentTests { + out := Indent(test.in, ">>>") + c.Assert(out, Equals, test.out) + } + +} diff --git a/vendor/gopkg.in/check.v1/reporter.go b/vendor/gopkg.in/check.v1/reporter.go new file mode 100644 index 0000000..fb04f76 --- /dev/null +++ b/vendor/gopkg.in/check.v1/reporter.go @@ -0,0 +1,88 @@ +package check + +import ( + "fmt" + "io" + "sync" +) + +// ----------------------------------------------------------------------- +// Output writer manages atomic output writing according to settings. + +type outputWriter struct { + m sync.Mutex + writer io.Writer + wroteCallProblemLast bool + Stream bool + Verbose bool +} + +func newOutputWriter(writer io.Writer, stream, verbose bool) *outputWriter { + return &outputWriter{writer: writer, Stream: stream, Verbose: verbose} +} + +func (ow *outputWriter) Write(content []byte) (n int, err error) { + ow.m.Lock() + n, err = ow.writer.Write(content) + ow.m.Unlock() + return +} + +func (ow *outputWriter) WriteCallStarted(label string, c *C) { + if ow.Stream { + header := renderCallHeader(label, c, "", "\n") + ow.m.Lock() + ow.writer.Write([]byte(header)) + ow.m.Unlock() + } +} + +func (ow *outputWriter) WriteCallProblem(label string, c *C) { + var prefix string + if !ow.Stream { + prefix = "\n-----------------------------------" + + "-----------------------------------\n" + } + header := renderCallHeader(label, c, prefix, "\n\n") + ow.m.Lock() + ow.wroteCallProblemLast = true + ow.writer.Write([]byte(header)) + if !ow.Stream { + c.logb.WriteTo(ow.writer) + } + ow.m.Unlock() +} + +func (ow *outputWriter) WriteCallSuccess(label string, c *C) { + if ow.Stream || (ow.Verbose && c.kind == testKd) { + // TODO Use a buffer here. + var suffix string + if c.reason != "" { + suffix = " (" + c.reason + ")" + } + if c.status() == succeededSt { + suffix += "\t" + c.timerString() + } + suffix += "\n" + if ow.Stream { + suffix += "\n" + } + header := renderCallHeader(label, c, "", suffix) + ow.m.Lock() + // Resist temptation of using line as prefix above due to race. + if !ow.Stream && ow.wroteCallProblemLast { + header = "\n-----------------------------------" + + "-----------------------------------\n" + + header + } + ow.wroteCallProblemLast = false + ow.writer.Write([]byte(header)) + ow.m.Unlock() + } +} + +func renderCallHeader(label string, c *C, prefix, suffix string) string { + pc := c.method.PC() + return fmt.Sprintf("%s%s: %s: %s%s", prefix, label, niceFuncPath(pc), + niceFuncName(pc), suffix) +} diff --git a/vendor/gopkg.in/check.v1/reporter_test.go b/vendor/gopkg.in/check.v1/reporter_test.go new file mode 100644 index 0000000..0b7ed76 --- /dev/null +++ b/vendor/gopkg.in/check.v1/reporter_test.go @@ -0,0 +1,159 @@ +package check_test + +import ( + "fmt" + "path/filepath" + "runtime" + + . "gopkg.in/check.v1" +) + +var _ = Suite(&reporterS{}) + +type reporterS struct { + testFile string +} + +func (s *reporterS) SetUpSuite(c *C) { + _, fileName, _, ok := runtime.Caller(0) + c.Assert(ok, Equals, true) + s.testFile = filepath.Base(fileName) +} + +func (s *reporterS) TestWrite(c *C) { + testString := "test string" + output := String{} + + dummyStream := true + dummyVerbose := true + o := NewOutputWriter(&output, dummyStream, dummyVerbose) + + o.Write([]byte(testString)) + c.Assert(output.value, Equals, testString) +} + +func (s *reporterS) TestWriteCallStartedWithStreamFlag(c *C) { + testLabel := "test started label" + stream := true + output := String{} + + dummyVerbose := true + o := NewOutputWriter(&output, stream, dummyVerbose) + + o.WriteCallStarted(testLabel, c) + expected := fmt.Sprintf("%s: %s:\\d+: %s\n", testLabel, s.testFile, c.TestName()) + c.Assert(output.value, Matches, expected) +} + +func (s *reporterS) TestWriteCallStartedWithoutStreamFlag(c *C) { + stream := false + output := String{} + + dummyLabel := "dummy" + dummyVerbose := true + o := NewOutputWriter(&output, stream, dummyVerbose) + + o.WriteCallStarted(dummyLabel, c) + c.Assert(output.value, Equals, "") +} + +func (s *reporterS) TestWriteCallProblemWithStreamFlag(c *C) { + testLabel := "test problem label" + stream := true + output := String{} + + dummyVerbose := true + o := NewOutputWriter(&output, stream, dummyVerbose) + + o.WriteCallProblem(testLabel, c) + expected := fmt.Sprintf("%s: %s:\\d+: %s\n\n", testLabel, s.testFile, c.TestName()) + c.Assert(output.value, Matches, expected) +} + +func (s *reporterS) TestWriteCallProblemWithoutStreamFlag(c *C) { + testLabel := "test problem label" + stream := false + output := String{} + + dummyVerbose := true + o := NewOutputWriter(&output, stream, dummyVerbose) + + o.WriteCallProblem(testLabel, c) + expected := fmt.Sprintf(""+ + "\n"+ + "----------------------------------------------------------------------\n"+ + "%s: %s:\\d+: %s\n\n", testLabel, s.testFile, c.TestName()) + c.Assert(output.value, Matches, expected) +} + +func (s *reporterS) TestWriteCallProblemWithoutStreamFlagWithLog(c *C) { + testLabel := "test problem label" + testLog := "test log" + stream := false + output := String{} + + dummyVerbose := true + o := NewOutputWriter(&output, stream, dummyVerbose) + + c.Log(testLog) + o.WriteCallProblem(testLabel, c) + expected := fmt.Sprintf(""+ + "\n"+ + "----------------------------------------------------------------------\n"+ + "%s: %s:\\d+: %s\n\n%s\n", testLabel, s.testFile, c.TestName(), testLog) + c.Assert(output.value, Matches, expected) +} + +func (s *reporterS) TestWriteCallSuccessWithStreamFlag(c *C) { + testLabel := "test success label" + stream := true + output := String{} + + dummyVerbose := true + o := NewOutputWriter(&output, stream, dummyVerbose) + + o.WriteCallSuccess(testLabel, c) + expected := fmt.Sprintf("%s: %s:\\d+: %s\t\\d\\.\\d+s\n\n", testLabel, s.testFile, c.TestName()) + c.Assert(output.value, Matches, expected) +} + +func (s *reporterS) TestWriteCallSuccessWithStreamFlagAndReason(c *C) { + testLabel := "test success label" + testReason := "test skip reason" + stream := true + output := String{} + + dummyVerbose := true + o := NewOutputWriter(&output, stream, dummyVerbose) + c.FakeSkip(testReason) + + o.WriteCallSuccess(testLabel, c) + expected := fmt.Sprintf("%s: %s:\\d+: %s \\(%s\\)\t\\d\\.\\d+s\n\n", + testLabel, s.testFile, c.TestName(), testReason) + c.Assert(output.value, Matches, expected) +} + +func (s *reporterS) TestWriteCallSuccessWithoutStreamFlagWithVerboseFlag(c *C) { + testLabel := "test success label" + stream := false + verbose := true + output := String{} + + o := NewOutputWriter(&output, stream, verbose) + + o.WriteCallSuccess(testLabel, c) + expected := fmt.Sprintf("%s: %s:\\d+: %s\t\\d\\.\\d+s\n", testLabel, s.testFile, c.TestName()) + c.Assert(output.value, Matches, expected) +} + +func (s *reporterS) TestWriteCallSuccessWithoutStreamFlagWithoutVerboseFlag(c *C) { + testLabel := "test success label" + stream := false + verbose := false + output := String{} + + o := NewOutputWriter(&output, stream, verbose) + + o.WriteCallSuccess(testLabel, c) + c.Assert(output.value, Equals, "") +} diff --git a/vendor/gopkg.in/check.v1/run.go b/vendor/gopkg.in/check.v1/run.go new file mode 100644 index 0000000..da8fd79 --- /dev/null +++ b/vendor/gopkg.in/check.v1/run.go @@ -0,0 +1,175 @@ +package check + +import ( + "bufio" + "flag" + "fmt" + "os" + "testing" + "time" +) + +// ----------------------------------------------------------------------- +// Test suite registry. + +var allSuites []interface{} + +// Suite registers the given value as a test suite to be run. Any methods +// starting with the Test prefix in the given value will be considered as +// a test method. +func Suite(suite interface{}) interface{} { + allSuites = append(allSuites, suite) + return suite +} + +// ----------------------------------------------------------------------- +// Public running interface. + +var ( + oldFilterFlag = flag.String("gocheck.f", "", "Regular expression selecting which tests and/or suites to run") + oldVerboseFlag = flag.Bool("gocheck.v", false, "Verbose mode") + oldStreamFlag = flag.Bool("gocheck.vv", false, "Super verbose mode (disables output caching)") + oldBenchFlag = flag.Bool("gocheck.b", false, "Run benchmarks") + oldBenchTime = flag.Duration("gocheck.btime", 1*time.Second, "approximate run time for each benchmark") + oldListFlag = flag.Bool("gocheck.list", false, "List the names of all tests that will be run") + oldWorkFlag = flag.Bool("gocheck.work", false, "Display and do not remove the test working directory") + + newFilterFlag = flag.String("check.f", "", "Regular expression selecting which tests and/or suites to run") + newVerboseFlag = flag.Bool("check.v", false, "Verbose mode") + newStreamFlag = flag.Bool("check.vv", false, "Super verbose mode (disables output caching)") + newBenchFlag = flag.Bool("check.b", false, "Run benchmarks") + newBenchTime = flag.Duration("check.btime", 1*time.Second, "approximate run time for each benchmark") + newBenchMem = flag.Bool("check.bmem", false, "Report memory benchmarks") + newListFlag = flag.Bool("check.list", false, "List the names of all tests that will be run") + newWorkFlag = flag.Bool("check.work", false, "Display and do not remove the test working directory") +) + +// TestingT runs all test suites registered with the Suite function, +// printing results to stdout, and reporting any failures back to +// the "testing" package. +func TestingT(testingT *testing.T) { + benchTime := *newBenchTime + if benchTime == 1*time.Second { + benchTime = *oldBenchTime + } + conf := &RunConf{ + Filter: *oldFilterFlag + *newFilterFlag, + Verbose: *oldVerboseFlag || *newVerboseFlag, + Stream: *oldStreamFlag || *newStreamFlag, + Benchmark: *oldBenchFlag || *newBenchFlag, + BenchmarkTime: benchTime, + BenchmarkMem: *newBenchMem, + KeepWorkDir: *oldWorkFlag || *newWorkFlag, + } + if *oldListFlag || *newListFlag { + w := bufio.NewWriter(os.Stdout) + for _, name := range ListAll(conf) { + fmt.Fprintln(w, name) + } + w.Flush() + return + } + result := RunAll(conf) + println(result.String()) + if !result.Passed() { + testingT.Fail() + } +} + +// RunAll runs all test suites registered with the Suite function, using the +// provided run configuration. +func RunAll(runConf *RunConf) *Result { + result := Result{} + for _, suite := range allSuites { + result.Add(Run(suite, runConf)) + } + return &result +} + +// Run runs the provided test suite using the provided run configuration. +func Run(suite interface{}, runConf *RunConf) *Result { + runner := newSuiteRunner(suite, runConf) + return runner.run() +} + +// ListAll returns the names of all the test functions registered with the +// Suite function that will be run with the provided run configuration. +func ListAll(runConf *RunConf) []string { + var names []string + for _, suite := range allSuites { + names = append(names, List(suite, runConf)...) + } + return names +} + +// List returns the names of the test functions in the given +// suite that will be run with the provided run configuration. +func List(suite interface{}, runConf *RunConf) []string { + var names []string + runner := newSuiteRunner(suite, runConf) + for _, t := range runner.tests { + names = append(names, t.String()) + } + return names +} + +// ----------------------------------------------------------------------- +// Result methods. + +func (r *Result) Add(other *Result) { + r.Succeeded += other.Succeeded + r.Skipped += other.Skipped + r.Failed += other.Failed + r.Panicked += other.Panicked + r.FixturePanicked += other.FixturePanicked + r.ExpectedFailures += other.ExpectedFailures + r.Missed += other.Missed + if r.WorkDir != "" && other.WorkDir != "" { + r.WorkDir += ":" + other.WorkDir + } else if other.WorkDir != "" { + r.WorkDir = other.WorkDir + } +} + +func (r *Result) Passed() bool { + return (r.Failed == 0 && r.Panicked == 0 && + r.FixturePanicked == 0 && r.Missed == 0 && + r.RunError == nil) +} + +func (r *Result) String() string { + if r.RunError != nil { + return "ERROR: " + r.RunError.Error() + } + + var value string + if r.Failed == 0 && r.Panicked == 0 && r.FixturePanicked == 0 && + r.Missed == 0 { + value = "OK: " + } else { + value = "OOPS: " + } + value += fmt.Sprintf("%d passed", r.Succeeded) + if r.Skipped != 0 { + value += fmt.Sprintf(", %d skipped", r.Skipped) + } + if r.ExpectedFailures != 0 { + value += fmt.Sprintf(", %d expected failures", r.ExpectedFailures) + } + if r.Failed != 0 { + value += fmt.Sprintf(", %d FAILED", r.Failed) + } + if r.Panicked != 0 { + value += fmt.Sprintf(", %d PANICKED", r.Panicked) + } + if r.FixturePanicked != 0 { + value += fmt.Sprintf(", %d FIXTURE-PANICKED", r.FixturePanicked) + } + if r.Missed != 0 { + value += fmt.Sprintf(", %d MISSED", r.Missed) + } + if r.WorkDir != "" { + value += "\nWORK=" + r.WorkDir + } + return value +} diff --git a/vendor/gopkg.in/check.v1/run_test.go b/vendor/gopkg.in/check.v1/run_test.go new file mode 100644 index 0000000..f41fffc --- /dev/null +++ b/vendor/gopkg.in/check.v1/run_test.go @@ -0,0 +1,419 @@ +// These tests verify the test running logic. + +package check_test + +import ( + "errors" + . "gopkg.in/check.v1" + "os" + "sync" +) + +var runnerS = Suite(&RunS{}) + +type RunS struct{} + +func (s *RunS) TestCountSuite(c *C) { + suitesRun += 1 +} + +// ----------------------------------------------------------------------- +// Tests ensuring result counting works properly. + +func (s *RunS) TestSuccess(c *C) { + output := String{} + result := Run(&SuccessHelper{}, &RunConf{Output: &output}) + c.Check(result.Succeeded, Equals, 1) + c.Check(result.Failed, Equals, 0) + c.Check(result.Skipped, Equals, 0) + c.Check(result.Panicked, Equals, 0) + c.Check(result.FixturePanicked, Equals, 0) + c.Check(result.Missed, Equals, 0) + c.Check(result.RunError, IsNil) +} + +func (s *RunS) TestFailure(c *C) { + output := String{} + result := Run(&FailHelper{}, &RunConf{Output: &output}) + c.Check(result.Succeeded, Equals, 0) + c.Check(result.Failed, Equals, 1) + c.Check(result.Skipped, Equals, 0) + c.Check(result.Panicked, Equals, 0) + c.Check(result.FixturePanicked, Equals, 0) + c.Check(result.Missed, Equals, 0) + c.Check(result.RunError, IsNil) +} + +func (s *RunS) TestFixture(c *C) { + output := String{} + result := Run(&FixtureHelper{}, &RunConf{Output: &output}) + c.Check(result.Succeeded, Equals, 2) + c.Check(result.Failed, Equals, 0) + c.Check(result.Skipped, Equals, 0) + c.Check(result.Panicked, Equals, 0) + c.Check(result.FixturePanicked, Equals, 0) + c.Check(result.Missed, Equals, 0) + c.Check(result.RunError, IsNil) +} + +func (s *RunS) TestPanicOnTest(c *C) { + output := String{} + helper := &FixtureHelper{panicOn: "Test1"} + result := Run(helper, &RunConf{Output: &output}) + c.Check(result.Succeeded, Equals, 1) + c.Check(result.Failed, Equals, 0) + c.Check(result.Skipped, Equals, 0) + c.Check(result.Panicked, Equals, 1) + c.Check(result.FixturePanicked, Equals, 0) + c.Check(result.Missed, Equals, 0) + c.Check(result.RunError, IsNil) +} + +func (s *RunS) TestPanicOnSetUpTest(c *C) { + output := String{} + helper := &FixtureHelper{panicOn: "SetUpTest"} + result := Run(helper, &RunConf{Output: &output}) + c.Check(result.Succeeded, Equals, 0) + c.Check(result.Failed, Equals, 0) + c.Check(result.Skipped, Equals, 0) + c.Check(result.Panicked, Equals, 0) + c.Check(result.FixturePanicked, Equals, 1) + c.Check(result.Missed, Equals, 2) + c.Check(result.RunError, IsNil) +} + +func (s *RunS) TestPanicOnSetUpSuite(c *C) { + output := String{} + helper := &FixtureHelper{panicOn: "SetUpSuite"} + result := Run(helper, &RunConf{Output: &output}) + c.Check(result.Succeeded, Equals, 0) + c.Check(result.Failed, Equals, 0) + c.Check(result.Skipped, Equals, 0) + c.Check(result.Panicked, Equals, 0) + c.Check(result.FixturePanicked, Equals, 1) + c.Check(result.Missed, Equals, 2) + c.Check(result.RunError, IsNil) +} + +// ----------------------------------------------------------------------- +// Check result aggregation. + +func (s *RunS) TestAdd(c *C) { + result := &Result{ + Succeeded: 1, + Skipped: 2, + Failed: 3, + Panicked: 4, + FixturePanicked: 5, + Missed: 6, + ExpectedFailures: 7, + } + result.Add(&Result{ + Succeeded: 10, + Skipped: 20, + Failed: 30, + Panicked: 40, + FixturePanicked: 50, + Missed: 60, + ExpectedFailures: 70, + }) + c.Check(result.Succeeded, Equals, 11) + c.Check(result.Skipped, Equals, 22) + c.Check(result.Failed, Equals, 33) + c.Check(result.Panicked, Equals, 44) + c.Check(result.FixturePanicked, Equals, 55) + c.Check(result.Missed, Equals, 66) + c.Check(result.ExpectedFailures, Equals, 77) + c.Check(result.RunError, IsNil) +} + +// ----------------------------------------------------------------------- +// Check the Passed() method. + +func (s *RunS) TestPassed(c *C) { + c.Assert((&Result{}).Passed(), Equals, true) + c.Assert((&Result{Succeeded: 1}).Passed(), Equals, true) + c.Assert((&Result{Skipped: 1}).Passed(), Equals, true) + c.Assert((&Result{Failed: 1}).Passed(), Equals, false) + c.Assert((&Result{Panicked: 1}).Passed(), Equals, false) + c.Assert((&Result{FixturePanicked: 1}).Passed(), Equals, false) + c.Assert((&Result{Missed: 1}).Passed(), Equals, false) + c.Assert((&Result{RunError: errors.New("!")}).Passed(), Equals, false) +} + +// ----------------------------------------------------------------------- +// Check that result printing is working correctly. + +func (s *RunS) TestPrintSuccess(c *C) { + result := &Result{Succeeded: 5} + c.Check(result.String(), Equals, "OK: 5 passed") +} + +func (s *RunS) TestPrintFailure(c *C) { + result := &Result{Failed: 5} + c.Check(result.String(), Equals, "OOPS: 0 passed, 5 FAILED") +} + +func (s *RunS) TestPrintSkipped(c *C) { + result := &Result{Skipped: 5} + c.Check(result.String(), Equals, "OK: 0 passed, 5 skipped") +} + +func (s *RunS) TestPrintExpectedFailures(c *C) { + result := &Result{ExpectedFailures: 5} + c.Check(result.String(), Equals, "OK: 0 passed, 5 expected failures") +} + +func (s *RunS) TestPrintPanicked(c *C) { + result := &Result{Panicked: 5} + c.Check(result.String(), Equals, "OOPS: 0 passed, 5 PANICKED") +} + +func (s *RunS) TestPrintFixturePanicked(c *C) { + result := &Result{FixturePanicked: 5} + c.Check(result.String(), Equals, "OOPS: 0 passed, 5 FIXTURE-PANICKED") +} + +func (s *RunS) TestPrintMissed(c *C) { + result := &Result{Missed: 5} + c.Check(result.String(), Equals, "OOPS: 0 passed, 5 MISSED") +} + +func (s *RunS) TestPrintAll(c *C) { + result := &Result{Succeeded: 1, Skipped: 2, ExpectedFailures: 3, + Panicked: 4, FixturePanicked: 5, Missed: 6} + c.Check(result.String(), Equals, + "OOPS: 1 passed, 2 skipped, 3 expected failures, 4 PANICKED, "+ + "5 FIXTURE-PANICKED, 6 MISSED") +} + +func (s *RunS) TestPrintRunError(c *C) { + result := &Result{Succeeded: 1, Failed: 1, + RunError: errors.New("Kaboom!")} + c.Check(result.String(), Equals, "ERROR: Kaboom!") +} + +// ----------------------------------------------------------------------- +// Verify that the method pattern flag works correctly. + +func (s *RunS) TestFilterTestName(c *C) { + helper := FixtureHelper{} + output := String{} + runConf := RunConf{Output: &output, Filter: "Test[91]"} + Run(&helper, &runConf) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "SetUpTest") + c.Check(helper.calls[2], Equals, "Test1") + c.Check(helper.calls[3], Equals, "TearDownTest") + c.Check(helper.calls[4], Equals, "TearDownSuite") + c.Check(len(helper.calls), Equals, 5) +} + +func (s *RunS) TestFilterTestNameWithAll(c *C) { + helper := FixtureHelper{} + output := String{} + runConf := RunConf{Output: &output, Filter: ".*"} + Run(&helper, &runConf) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "SetUpTest") + c.Check(helper.calls[2], Equals, "Test1") + c.Check(helper.calls[3], Equals, "TearDownTest") + c.Check(helper.calls[4], Equals, "SetUpTest") + c.Check(helper.calls[5], Equals, "Test2") + c.Check(helper.calls[6], Equals, "TearDownTest") + c.Check(helper.calls[7], Equals, "TearDownSuite") + c.Check(len(helper.calls), Equals, 8) +} + +func (s *RunS) TestFilterSuiteName(c *C) { + helper := FixtureHelper{} + output := String{} + runConf := RunConf{Output: &output, Filter: "FixtureHelper"} + Run(&helper, &runConf) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "SetUpTest") + c.Check(helper.calls[2], Equals, "Test1") + c.Check(helper.calls[3], Equals, "TearDownTest") + c.Check(helper.calls[4], Equals, "SetUpTest") + c.Check(helper.calls[5], Equals, "Test2") + c.Check(helper.calls[6], Equals, "TearDownTest") + c.Check(helper.calls[7], Equals, "TearDownSuite") + c.Check(len(helper.calls), Equals, 8) +} + +func (s *RunS) TestFilterSuiteNameAndTestName(c *C) { + helper := FixtureHelper{} + output := String{} + runConf := RunConf{Output: &output, Filter: "FixtureHelper\\.Test2"} + Run(&helper, &runConf) + c.Check(helper.calls[0], Equals, "SetUpSuite") + c.Check(helper.calls[1], Equals, "SetUpTest") + c.Check(helper.calls[2], Equals, "Test2") + c.Check(helper.calls[3], Equals, "TearDownTest") + c.Check(helper.calls[4], Equals, "TearDownSuite") + c.Check(len(helper.calls), Equals, 5) +} + +func (s *RunS) TestFilterAllOut(c *C) { + helper := FixtureHelper{} + output := String{} + runConf := RunConf{Output: &output, Filter: "NotFound"} + Run(&helper, &runConf) + c.Check(len(helper.calls), Equals, 0) +} + +func (s *RunS) TestRequirePartialMatch(c *C) { + helper := FixtureHelper{} + output := String{} + runConf := RunConf{Output: &output, Filter: "est"} + Run(&helper, &runConf) + c.Check(len(helper.calls), Equals, 8) +} + +func (s *RunS) TestFilterError(c *C) { + helper := FixtureHelper{} + output := String{} + runConf := RunConf{Output: &output, Filter: "]["} + result := Run(&helper, &runConf) + c.Check(result.String(), Equals, + "ERROR: Bad filter expression: error parsing regexp: missing closing ]: `[`") + c.Check(len(helper.calls), Equals, 0) +} + +// ----------------------------------------------------------------------- +// Verify that List works correctly. + +func (s *RunS) TestListFiltered(c *C) { + names := List(&FixtureHelper{}, &RunConf{Filter: "1"}) + c.Assert(names, DeepEquals, []string{ + "FixtureHelper.Test1", + }) +} + +func (s *RunS) TestList(c *C) { + names := List(&FixtureHelper{}, &RunConf{}) + c.Assert(names, DeepEquals, []string{ + "FixtureHelper.Test1", + "FixtureHelper.Test2", + }) +} + +// ----------------------------------------------------------------------- +// Verify that verbose mode prints tests which pass as well. + +func (s *RunS) TestVerboseMode(c *C) { + helper := FixtureHelper{} + output := String{} + runConf := RunConf{Output: &output, Verbose: true} + Run(&helper, &runConf) + + expected := "PASS: check_test\\.go:[0-9]+: FixtureHelper\\.Test1\t *[.0-9]+s\n" + + "PASS: check_test\\.go:[0-9]+: FixtureHelper\\.Test2\t *[.0-9]+s\n" + + c.Assert(output.value, Matches, expected) +} + +func (s *RunS) TestVerboseModeWithFailBeforePass(c *C) { + helper := FixtureHelper{panicOn: "Test1"} + output := String{} + runConf := RunConf{Output: &output, Verbose: true} + Run(&helper, &runConf) + + expected := "(?s).*PANIC.*\n-+\n" + // Should have an extra line. + "PASS: check_test\\.go:[0-9]+: FixtureHelper\\.Test2\t *[.0-9]+s\n" + + c.Assert(output.value, Matches, expected) +} + +// ----------------------------------------------------------------------- +// Verify the stream output mode. In this mode there's no output caching. + +type StreamHelper struct { + l2 sync.Mutex + l3 sync.Mutex +} + +func (s *StreamHelper) SetUpSuite(c *C) { + c.Log("0") +} + +func (s *StreamHelper) Test1(c *C) { + c.Log("1") + s.l2.Lock() + s.l3.Lock() + go func() { + s.l2.Lock() // Wait for "2". + c.Log("3") + s.l3.Unlock() + }() +} + +func (s *StreamHelper) Test2(c *C) { + c.Log("2") + s.l2.Unlock() + s.l3.Lock() // Wait for "3". + c.Fail() + c.Log("4") +} + +func (s *RunS) TestStreamMode(c *C) { + helper := &StreamHelper{} + output := String{} + runConf := RunConf{Output: &output, Stream: true} + Run(helper, &runConf) + + expected := "START: run_test\\.go:[0-9]+: StreamHelper\\.SetUpSuite\n0\n" + + "PASS: run_test\\.go:[0-9]+: StreamHelper\\.SetUpSuite\t *[.0-9]+s\n\n" + + "START: run_test\\.go:[0-9]+: StreamHelper\\.Test1\n1\n" + + "PASS: run_test\\.go:[0-9]+: StreamHelper\\.Test1\t *[.0-9]+s\n\n" + + "START: run_test\\.go:[0-9]+: StreamHelper\\.Test2\n2\n3\n4\n" + + "FAIL: run_test\\.go:[0-9]+: StreamHelper\\.Test2\n\n" + + c.Assert(output.value, Matches, expected) +} + +type StreamMissHelper struct{} + +func (s *StreamMissHelper) SetUpSuite(c *C) { + c.Log("0") + c.Fail() +} + +func (s *StreamMissHelper) Test1(c *C) { + c.Log("1") +} + +func (s *RunS) TestStreamModeWithMiss(c *C) { + helper := &StreamMissHelper{} + output := String{} + runConf := RunConf{Output: &output, Stream: true} + Run(helper, &runConf) + + expected := "START: run_test\\.go:[0-9]+: StreamMissHelper\\.SetUpSuite\n0\n" + + "FAIL: run_test\\.go:[0-9]+: StreamMissHelper\\.SetUpSuite\n\n" + + "START: run_test\\.go:[0-9]+: StreamMissHelper\\.Test1\n" + + "MISS: run_test\\.go:[0-9]+: StreamMissHelper\\.Test1\n\n" + + c.Assert(output.value, Matches, expected) +} + +// ----------------------------------------------------------------------- +// Verify that that the keep work dir request indeed does so. + +type WorkDirSuite struct {} + +func (s *WorkDirSuite) Test(c *C) { + c.MkDir() +} + +func (s *RunS) TestKeepWorkDir(c *C) { + output := String{} + runConf := RunConf{Output: &output, Verbose: true, KeepWorkDir: true} + result := Run(&WorkDirSuite{}, &runConf) + + c.Assert(result.String(), Matches, ".*\nWORK=" + result.WorkDir) + + stat, err := os.Stat(result.WorkDir) + c.Assert(err, IsNil) + c.Assert(stat.IsDir(), Equals, true) +} diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/LICENSE b/vendor/gopkg.in/yaml.v1/LICENSE similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/LICENSE rename to vendor/gopkg.in/yaml.v1/LICENSE diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/LICENSE.libyaml b/vendor/gopkg.in/yaml.v1/LICENSE.libyaml similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/LICENSE.libyaml rename to vendor/gopkg.in/yaml.v1/LICENSE.libyaml diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/README.md b/vendor/gopkg.in/yaml.v1/README.md similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/README.md rename to vendor/gopkg.in/yaml.v1/README.md diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/apic.go b/vendor/gopkg.in/yaml.v1/apic.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/apic.go rename to vendor/gopkg.in/yaml.v1/apic.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/decode.go b/vendor/gopkg.in/yaml.v1/decode.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/decode.go rename to vendor/gopkg.in/yaml.v1/decode.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/decode_test.go b/vendor/gopkg.in/yaml.v1/decode_test.go similarity index 99% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/decode_test.go rename to vendor/gopkg.in/yaml.v1/decode_test.go index 332ec87..ef3d37f 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/decode_test.go +++ b/vendor/gopkg.in/yaml.v1/decode_test.go @@ -2,7 +2,7 @@ package yaml_test import ( . "gopkg.in/check.v1" - "github.com/remind101/deploy/Godeps/_workspace/src/gopkg.in/yaml.v1" + "gopkg.in/yaml.v1" "math" "reflect" "strings" diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/emitterc.go b/vendor/gopkg.in/yaml.v1/emitterc.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/emitterc.go rename to vendor/gopkg.in/yaml.v1/emitterc.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/encode.go b/vendor/gopkg.in/yaml.v1/encode.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/encode.go rename to vendor/gopkg.in/yaml.v1/encode.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/encode_test.go b/vendor/gopkg.in/yaml.v1/encode_test.go similarity index 99% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/encode_test.go rename to vendor/gopkg.in/yaml.v1/encode_test.go index 37d13a0..c9febc2 100644 --- a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/encode_test.go +++ b/vendor/gopkg.in/yaml.v1/encode_test.go @@ -8,7 +8,7 @@ import ( "time" . "gopkg.in/check.v1" - "github.com/remind101/deploy/Godeps/_workspace/src/gopkg.in/yaml.v1" + "gopkg.in/yaml.v1" ) var marshalIntTest = 123 diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/parserc.go b/vendor/gopkg.in/yaml.v1/parserc.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/parserc.go rename to vendor/gopkg.in/yaml.v1/parserc.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/readerc.go b/vendor/gopkg.in/yaml.v1/readerc.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/readerc.go rename to vendor/gopkg.in/yaml.v1/readerc.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/resolve.go b/vendor/gopkg.in/yaml.v1/resolve.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/resolve.go rename to vendor/gopkg.in/yaml.v1/resolve.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/scannerc.go b/vendor/gopkg.in/yaml.v1/scannerc.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/scannerc.go rename to vendor/gopkg.in/yaml.v1/scannerc.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/sorter.go b/vendor/gopkg.in/yaml.v1/sorter.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/sorter.go rename to vendor/gopkg.in/yaml.v1/sorter.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/suite_test.go b/vendor/gopkg.in/yaml.v1/suite_test.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/suite_test.go rename to vendor/gopkg.in/yaml.v1/suite_test.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/writerc.go b/vendor/gopkg.in/yaml.v1/writerc.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/writerc.go rename to vendor/gopkg.in/yaml.v1/writerc.go diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/yaml.go b/vendor/gopkg.in/yaml.v1/yaml.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/yaml.go rename to vendor/gopkg.in/yaml.v1/yaml.go diff --git a/Godeps/_workspace/src/gopkg.in/yaml.v1/yamlh.go b/vendor/gopkg.in/yaml.v1/yamlh.go similarity index 99% rename from Godeps/_workspace/src/gopkg.in/yaml.v1/yamlh.go rename to vendor/gopkg.in/yaml.v1/yamlh.go index 4b020b1..d60a6b6 100644 --- a/Godeps/_workspace/src/gopkg.in/yaml.v1/yamlh.go +++ b/vendor/gopkg.in/yaml.v1/yamlh.go @@ -296,7 +296,7 @@ const ( // Not in original libyaml. yaml_BINARY_TAG = "tag:yaml.org,2002:binary" - yaml_MERGE_TAG = "tag:yaml.org,2002:merge" + yaml_MERGE_TAG = "tag:yaml.org,2002:merge" yaml_DEFAULT_SCALAR_TAG = yaml_STR_TAG // The default scalar tag is !!str. yaml_DEFAULT_SEQUENCE_TAG = yaml_SEQ_TAG // The default sequence tag is !!seq. diff --git a/Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/yamlprivateh.go b/vendor/gopkg.in/yaml.v1/yamlprivateh.go similarity index 100% rename from Godeps/_workspace/src/github.com/github/hub/Godeps/_workspace/src/gopkg.in/yaml.v1/yamlprivateh.go rename to vendor/gopkg.in/yaml.v1/yamlprivateh.go diff --git a/vendor/vendor.json b/vendor/vendor.json new file mode 100644 index 0000000..9a85f2f --- /dev/null +++ b/vendor/vendor.json @@ -0,0 +1,176 @@ +{ + "comment": "", + "ignore": "", + "package": [ + { + "comment": "null-15", + "path": "bitbucket.org/kardianos/osext", + "revision": "44140c5fc69ecf1102c5ef451d73cd98ef59b178" + }, + { + "path": "code.google.com/p/go-netrc/netrc", + "revision": "0da3cb9c37e0", + "revisionTime": "2013-10-13T13:28:47-04:00" + }, + { + "comment": "v0.1.0-9-g3883ac1", + "path": "github.com/BurntSushi/toml", + "revision": "3883ac1ce943878302255f538fce319d23226223" + }, + { + "comment": "release.r60-6-ge17e998", + "path": "github.com/bmizerany/assert", + "revision": "e17e99893cb6509f428e1728281c2ad60a6b31e3" + }, + { + "comment": "1.2.0-95-g9b2bd2b", + "path": "github.com/codegangsta/cli", + "revision": "9b2bd2b3489748d4d0a204fa4eb2ee9e89e0ebc6" + }, + { + "path": "github.com/fhs/go-netrc/netrc", + "revision": "4422b68c9c934b03e8e53ef18c8c8714542def7e" + }, + { + "path": "github.com/github/hub/Godeps/_workspace/src/github.com/bmizerany/assert", + "revision": "35c76bf91dc3d699173599464b9b41cb877ee440", + "revisionTime": "2015-12-08T10:26:02+01:00" + }, + { + "path": "github.com/github/hub/Godeps/_workspace/src/github.com/kballard/go-shellquote", + "revision": "35c76bf91dc3d699173599464b9b41cb877ee440", + "revisionTime": "2015-12-08T10:26:02+01:00" + }, + { + "path": "github.com/github/hub/Godeps/_workspace/src/github.com/kr/pretty", + "revision": "35c76bf91dc3d699173599464b9b41cb877ee440", + "revisionTime": "2015-12-08T10:26:02+01:00" + }, + { + "path": "github.com/github/hub/Godeps/_workspace/src/github.com/kr/text", + "revision": "35c76bf91dc3d699173599464b9b41cb877ee440", + "revisionTime": "2015-12-08T10:26:02+01:00" + }, + { + "path": "github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-colorable", + "revision": "35c76bf91dc3d699173599464b9b41cb877ee440", + "revisionTime": "2015-12-08T10:26:02+01:00" + }, + { + "path": "github.com/github/hub/Godeps/_workspace/src/github.com/mattn/go-isatty", + "revision": "35c76bf91dc3d699173599464b9b41cb877ee440", + "revisionTime": "2015-12-08T10:26:02+01:00" + }, + { + "path": "github.com/github/hub/cmd", + "revision": "35c76bf91dc3d699173599464b9b41cb877ee440", + "revisionTime": "2015-12-08T10:26:02+01:00" + }, + { + "path": "github.com/github/hub/fixtures", + "revision": "35c76bf91dc3d699173599464b9b41cb877ee440", + "revisionTime": "2015-12-08T10:26:02+01:00" + }, + { + "comment": "v2.2.0-21-gfb118b0", + "path": "github.com/github/hub/git", + "revision": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" + }, + { + "comment": "v2.2.0-21-gfb118b0", + "path": "github.com/github/hub/github", + "revision": "fb118b02033c3e56b74c9a4b7e26d1c5c63b1d7f" + }, + { + "path": "github.com/github/hub/ui", + "revision": "35c76bf91dc3d699173599464b9b41cb877ee440", + "revisionTime": "2015-12-08T10:26:02+01:00" + }, + { + "path": "github.com/github/hub/utils", + "revision": "35c76bf91dc3d699173599464b9b41cb877ee440", + "revisionTime": "2015-12-08T10:26:02+01:00" + }, + { + "path": "github.com/google/go-github/github", + "revision": "7ea4ee6d222607c11ea86e99a6f6723beeae785d" + }, + { + "path": "github.com/google/go-querystring/query", + "revision": "ec0a78e0f4db229b7897be36596a8944230b857a" + }, + { + "path": "github.com/howeyc/gopass", + "revision": "62ab5a80502a82291f265e6980d72310b8f480d5" + }, + { + "path": "github.com/inconshreveable/go-update", + "revision": "221d034a558b4c21b0624b2a450c076913854a57" + }, + { + "path": "github.com/inconshreveable/go-update/download", + "revision": "221d034a558b4c21b0624b2a450c076913854a57" + }, + { + "path": "github.com/jingweno/go-sawyer", + "revision": "1999ae5763d678f3ce1112cf1fda7c7e9cf2aadf" + }, + { + "path": "github.com/jingweno/go-sawyer/hypermedia", + "revision": "1999ae5763d678f3ce1112cf1fda7c7e9cf2aadf" + }, + { + "path": "github.com/jingweno/go-sawyer/mediaheader", + "revision": "1999ae5763d678f3ce1112cf1fda7c7e9cf2aadf" + }, + { + "path": "github.com/jingweno/go-sawyer/mediatype", + "revision": "1999ae5763d678f3ce1112cf1fda7c7e9cf2aadf" + }, + { + "comment": "0.1-14-g0a85813", + "path": "github.com/jtacoma/uritemplates", + "revision": "0a85813ecac22e3cbe916ab9480b33f2f4a06b2e" + }, + { + "path": "github.com/kr/binarydist", + "revision": "9955b0ab8708602d411341e55fffd7e0700f86bd" + }, + { + "comment": "go.weekly.2011-12-22-24-gf31442d", + "path": "github.com/kr/pretty", + "revision": "f31442d60e51465c69811e2107ae978868dbea5c" + }, + { + "path": "github.com/kr/text", + "revision": "6807e777504f54ad073ecef66747de158294b639" + }, + { + "path": "github.com/mattn/go-isatty", + "revision": "6152ce208cfa13d58f065348a3312b4160fb98d1" + }, + { + "comment": "v0.4.0-97-g6909930", + "path": "github.com/octokit/go-octokit/octokit", + "revision": "69099306b45af55301f9328f52d48338274f8d7d" + }, + { + "path": "github.com/ogier/pflag", + "revision": "e4f7d00f344b0954fa3791a8527d10ba7334eceb" + }, + { + "comment": "null-236", + "path": "golang.org/x/crypto/ssh/terminal", + "revision": "69e2a90ed92d03812364aeb947b7068dc42e561e" + }, + { + "path": "gopkg.in/check.v1", + "revision": "4f90aeace3a26ad7021961c297b22c42160c7b25", + "revisionTime": "2016-01-05T14:49:36-02:00" + }, + { + "path": "gopkg.in/yaml.v1", + "revision": "9f9df34309c04878acc86042b16630b0f696e1de" + } + ] +} From fd9c99138c9e181dd6f2b201dc9e1968edef6713 Mon Sep 17 00:00:00 2001 From: Benjamin Guillet Date: Wed, 13 Jan 2016 17:00:37 -0800 Subject: [PATCH 2/5] Update makefile. --- Makefile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Makefile b/Makefile index e9af799..d447099 100644 --- a/Makefile +++ b/Makefile @@ -1,10 +1,10 @@ .PHONY: cmd cmd: - godep go build -o build/deploy ./cmd/deploy + go build -o build/deploy ./cmd/deploy test: - godep go test -race ./... + go test -race $(shell go list ./... | grep -v /vendor/) release: ./scripts/release $(VERSION) From 1fffc7fe274775eae776d4e7a6721b0d254f43a1 Mon Sep 17 00:00:00 2001 From: Benjamin Guillet Date: Wed, 13 Jan 2016 17:00:42 -0800 Subject: [PATCH 3/5] Update readme. --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index c1636b0..76f2797 100644 --- a/README.md +++ b/README.md @@ -9,7 +9,7 @@ A small Go program for creating **[GitHub Deployments](https://developer.github. You can grab the latest release **[here](https://github.com/remind101/deploy/releases)** -Or if you have a working Go 1.4 environment: +Or if you have a working Go 1.5 environment and the `GO15VENDOREXPERIMENT` variable enabled: ``` go get -u github.com/remind101/deploy/cmd/deploy From 62eef3beb2a35fc253341d2483a134874ae34c80 Mon Sep 17 00:00:00 2001 From: Benjamin Guillet Date: Wed, 13 Jan 2016 17:06:54 -0800 Subject: [PATCH 4/5] Get rid of Travis. --- .travis.yml | 7 ------- README.md | 1 - 2 files changed, 8 deletions(-) delete mode 100644 .travis.yml diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 27bd227..0000000 --- a/.travis.yml +++ /dev/null @@ -1,7 +0,0 @@ -language: go - -go: - - 1.4 - -script: - - go test -race ./... diff --git a/README.md b/README.md index 76f2797..4830b7b 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,5 @@ # GitHub Deployments CLI -[![Build Status](https://travis-ci.org/remind101/deploy.svg?branch=master)](https://travis-ci.org/remind101/deploy) [![Circle CI](https://circleci.com/gh/remind101/deploy.svg?style=svg)](https://circleci.com/gh/remind101/deploy) A small Go program for creating **[GitHub Deployments](https://developer.github.com/v3/repos/deployments/)**. From 2209e162ca593b39e2e648551dd0d410c2f530ed Mon Sep 17 00:00:00 2001 From: Benjamin Guillet Date: Wed, 13 Jan 2016 17:07:08 -0800 Subject: [PATCH 5/5] Set up CircleCI for Go1.5 vendoring. --- Makefile | 3 +++ circle.yml | 20 ++++++++++++++++++++ 2 files changed, 23 insertions(+) create mode 100644 circle.yml diff --git a/Makefile b/Makefile index d447099..c56718c 100644 --- a/Makefile +++ b/Makefile @@ -3,6 +3,9 @@ cmd: go build -o build/deploy ./cmd/deploy +vet: + go vet $(shell go list ./... | grep -v /vendor/) + test: go test -race $(shell go list ./... | grep -v /vendor/) diff --git a/circle.yml b/circle.yml new file mode 100644 index 0000000..d63913a --- /dev/null +++ b/circle.yml @@ -0,0 +1,20 @@ +machine: + timezone: America/Los_Angeles + environment: + GO15VENDOREXPERIMENT: 1 + +checkout: + post: + - rm -rf ~/.go_workspace/src/github.com/remind101 + - mkdir -p ~/.go_workspace/src/github.com/remind101 + - cp -R ~/deploy ~/.go_workspace/src/github.com/remind101/deploy + +dependencies: + override: + - cd ~/.go_workspace/src/github.com/remind101/deploy + - go install -a -race std + - go install github.com/remind101/deploy/... + +test: + override: + - cd ~/.go_workspace/src/github.com/remind101/deploy && make test && make vet