diff --git a/Makefile b/Makefile index ce03adb0e..3a7e128d1 100644 --- a/Makefile +++ b/Makefile @@ -73,3 +73,15 @@ integration: build $(GOBIN)/commander $(MAKE) integration-cleanup; \ exit $$ret .PHONY: integration + +build-doc: + rm ./docs/auth0_*.md + go run ./cmd/build_doc + mv ./docs/auth0.md ./docs/index.md +.PHONY: build-doc + +# Start the doc site locally for testing purposes only +# requires https://jekyllrb.com/docs/installation/ +start-doc: build-doc + @cd docs && bundle exec jekyll serve +.PHONY: start-doc diff --git a/cmd/build_doc/main.go b/cmd/build_doc/main.go new file mode 100644 index 000000000..a902a5659 --- /dev/null +++ b/cmd/build_doc/main.go @@ -0,0 +1,20 @@ +package main + +import ( + "github.com/auth0/auth0-cli/internal/cli" + "github.com/joeshaw/envdecode" +) + +func main() { + var cfg struct { + Path string `env:"AUTH0_CLI_DOCS_PATH,default=./docs/"` + } + if err := envdecode.StrictDecode(&cfg); err != nil { + panic(err) + } + + err := cli.BuildDoc(cfg.Path) + if err != nil { + panic(err) + } +} diff --git a/docs/.gitignore b/docs/.gitignore new file mode 100644 index 000000000..f40fbd8ba --- /dev/null +++ b/docs/.gitignore @@ -0,0 +1,5 @@ +_site +.sass-cache +.jekyll-cache +.jekyll-metadata +vendor diff --git a/docs/404.html b/docs/404.html new file mode 100644 index 000000000..086a5c9ea --- /dev/null +++ b/docs/404.html @@ -0,0 +1,25 @@ +--- +permalink: /404.html +layout: default +--- + + + +
+

404

+ +

Page not found :(

+

The requested page could not be found.

+
diff --git a/docs/Gemfile b/docs/Gemfile new file mode 100644 index 000000000..ef1e3d14d --- /dev/null +++ b/docs/Gemfile @@ -0,0 +1,34 @@ +source "https://rubygems.org" +# Hello! This is where you manage which Jekyll version is used to run. +# When you want to use a different version, change it below, save the +# file and run `bundle install`. Run Jekyll with `bundle exec`, like so: +# +# bundle exec jekyll serve +# +# This will help ensure the proper Jekyll version is running. +# Happy Jekylling! +#gem "jekyll", "~> 4.2.0" +# This is the default theme for new Jekyll sites. You may change this to anything you like. +gem "minima", "~> 2.5" +# If you want to use GitHub Pages, remove the "gem "jekyll"" above and +# uncomment the line below. To upgrade, run `bundle update github-pages`. +# gem "github-pages", group: :jekyll_plugins +# If you have any plugins, put them here! +group :jekyll_plugins do + gem "jekyll-feed", "~> 0.12" +end + +# Windows and JRuby does not include zoneinfo files, so bundle the tzinfo-data gem +# and associated library. +platforms :mingw, :x64_mingw, :mswin, :jruby do + gem "tzinfo", "~> 1.2" + gem "tzinfo-data" +end + +gem "github-pages", "~> 215", group: :jekyll_plugins + + + + + +gem "webrick", "~> 1.7" diff --git a/docs/Gemfile.lock b/docs/Gemfile.lock new file mode 100644 index 000000000..48e78bebe --- /dev/null +++ b/docs/Gemfile.lock @@ -0,0 +1,277 @@ +GEM + remote: https://rubygems.org/ + specs: + activesupport (6.0.4) + concurrent-ruby (~> 1.0, >= 1.0.2) + i18n (>= 0.7, < 2) + minitest (~> 5.1) + tzinfo (~> 1.1) + zeitwerk (~> 2.2, >= 2.2.2) + addressable (2.7.0) + public_suffix (>= 2.0.2, < 5.0) + coffee-script (2.4.1) + coffee-script-source + execjs + coffee-script-source (1.11.1) + colorator (1.1.0) + commonmarker (0.17.13) + ruby-enum (~> 0.5) + concurrent-ruby (1.1.9) + dnsruby (1.61.7) + simpleidn (~> 0.1) + em-websocket (0.5.2) + eventmachine (>= 0.12.9) + http_parser.rb (~> 0.6.0) + ethon (0.14.0) + ffi (>= 1.15.0) + eventmachine (1.2.7) + execjs (2.8.1) + faraday (1.4.3) + faraday-em_http (~> 1.0) + faraday-em_synchrony (~> 1.0) + faraday-excon (~> 1.1) + faraday-net_http (~> 1.0) + faraday-net_http_persistent (~> 1.1) + multipart-post (>= 1.2, < 3) + ruby2_keywords (>= 0.0.4) + faraday-em_http (1.0.0) + faraday-em_synchrony (1.0.0) + faraday-excon (1.1.0) + faraday-net_http (1.0.1) + faraday-net_http_persistent (1.1.0) + ffi (1.15.3) + forwardable-extended (2.6.0) + gemoji (3.0.1) + github-pages (215) + github-pages-health-check (= 1.17.2) + jekyll (= 3.9.0) + jekyll-avatar (= 0.7.0) + jekyll-coffeescript (= 1.1.1) + jekyll-commonmark-ghpages (= 0.1.6) + jekyll-default-layout (= 0.1.4) + jekyll-feed (= 0.15.1) + jekyll-gist (= 1.5.0) + jekyll-github-metadata (= 2.13.0) + jekyll-mentions (= 1.6.0) + jekyll-optional-front-matter (= 0.3.2) + jekyll-paginate (= 1.1.0) + jekyll-readme-index (= 0.3.0) + jekyll-redirect-from (= 0.16.0) + jekyll-relative-links (= 0.6.1) + jekyll-remote-theme (= 0.4.3) + jekyll-sass-converter (= 1.5.2) + jekyll-seo-tag (= 2.7.1) + jekyll-sitemap (= 1.4.0) + jekyll-swiss (= 1.0.0) + jekyll-theme-architect (= 0.1.1) + jekyll-theme-cayman (= 0.1.1) + jekyll-theme-dinky (= 0.1.1) + jekyll-theme-hacker (= 0.1.2) + jekyll-theme-leap-day (= 0.1.1) + jekyll-theme-merlot (= 0.1.1) + jekyll-theme-midnight (= 0.1.1) + jekyll-theme-minimal (= 0.1.1) + jekyll-theme-modernist (= 0.1.1) + jekyll-theme-primer (= 0.5.4) + jekyll-theme-slate (= 0.1.1) + jekyll-theme-tactile (= 0.1.1) + jekyll-theme-time-machine (= 0.1.1) + jekyll-titles-from-headings (= 0.5.3) + jemoji (= 0.12.0) + kramdown (= 2.3.1) + kramdown-parser-gfm (= 1.1.0) + liquid (= 4.0.3) + mercenary (~> 0.3) + minima (= 2.5.1) + nokogiri (>= 1.10.4, < 2.0) + rouge (= 3.26.0) + terminal-table (~> 1.4) + github-pages-health-check (1.17.2) + addressable (~> 2.3) + dnsruby (~> 1.60) + octokit (~> 4.0) + public_suffix (>= 2.0.2, < 5.0) + typhoeus (~> 1.3) + html-pipeline (2.14.0) + activesupport (>= 2) + nokogiri (>= 1.4) + http_parser.rb (0.6.0) + i18n (0.9.5) + concurrent-ruby (~> 1.0) + jekyll (3.9.0) + addressable (~> 2.4) + colorator (~> 1.0) + em-websocket (~> 0.5) + i18n (~> 0.7) + jekyll-sass-converter (~> 1.0) + jekyll-watch (~> 2.0) + kramdown (>= 1.17, < 3) + liquid (~> 4.0) + mercenary (~> 0.3.3) + pathutil (~> 0.9) + rouge (>= 1.7, < 4) + safe_yaml (~> 1.0) + jekyll-avatar (0.7.0) + jekyll (>= 3.0, < 5.0) + jekyll-coffeescript (1.1.1) + coffee-script (~> 2.2) + coffee-script-source (~> 1.11.1) + jekyll-commonmark (1.3.1) + commonmarker (~> 0.14) + jekyll (>= 3.7, < 5.0) + jekyll-commonmark-ghpages (0.1.6) + commonmarker (~> 0.17.6) + jekyll-commonmark (~> 1.2) + rouge (>= 2.0, < 4.0) + jekyll-default-layout (0.1.4) + jekyll (~> 3.0) + jekyll-feed (0.15.1) + jekyll (>= 3.7, < 5.0) + jekyll-gist (1.5.0) + octokit (~> 4.2) + jekyll-github-metadata (2.13.0) + jekyll (>= 3.4, < 5.0) + octokit (~> 4.0, != 4.4.0) + jekyll-mentions (1.6.0) + html-pipeline (~> 2.3) + jekyll (>= 3.7, < 5.0) + jekyll-optional-front-matter (0.3.2) + jekyll (>= 3.0, < 5.0) + jekyll-paginate (1.1.0) + jekyll-readme-index (0.3.0) + jekyll (>= 3.0, < 5.0) + jekyll-redirect-from (0.16.0) + jekyll (>= 3.3, < 5.0) + jekyll-relative-links (0.6.1) + jekyll (>= 3.3, < 5.0) + jekyll-remote-theme (0.4.3) + addressable (~> 2.0) + jekyll (>= 3.5, < 5.0) + jekyll-sass-converter (>= 1.0, <= 3.0.0, != 2.0.0) + rubyzip (>= 1.3.0, < 3.0) + jekyll-sass-converter (1.5.2) + sass (~> 3.4) + jekyll-seo-tag (2.7.1) + jekyll (>= 3.8, < 5.0) + jekyll-sitemap (1.4.0) + jekyll (>= 3.7, < 5.0) + jekyll-swiss (1.0.0) + jekyll-theme-architect (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-cayman (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-dinky (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-hacker (0.1.2) + jekyll (> 3.5, < 5.0) + jekyll-seo-tag (~> 2.0) + jekyll-theme-leap-day (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-merlot (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-midnight (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-minimal (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-modernist (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-primer (0.5.4) + jekyll (> 3.5, < 5.0) + jekyll-github-metadata (~> 2.9) + jekyll-seo-tag (~> 2.0) + jekyll-theme-slate (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-tactile (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-theme-time-machine (0.1.1) + jekyll (~> 3.5) + jekyll-seo-tag (~> 2.0) + jekyll-titles-from-headings (0.5.3) + jekyll (>= 3.3, < 5.0) + jekyll-watch (2.2.1) + listen (~> 3.0) + jemoji (0.12.0) + gemoji (~> 3.0) + html-pipeline (~> 2.2) + jekyll (>= 3.0, < 5.0) + kramdown (2.3.1) + rexml + kramdown-parser-gfm (1.1.0) + kramdown (~> 2.0) + liquid (4.0.3) + listen (3.5.1) + rb-fsevent (~> 0.10, >= 0.10.3) + rb-inotify (~> 0.9, >= 0.9.10) + mercenary (0.3.6) + minima (2.5.1) + jekyll (>= 3.5, < 5.0) + jekyll-feed (~> 0.9) + jekyll-seo-tag (~> 2.1) + minitest (5.14.4) + multipart-post (2.1.1) + nokogiri (1.11.7-x86_64-darwin) + racc (~> 1.4) + octokit (4.21.0) + faraday (>= 0.9) + sawyer (~> 0.8.0, >= 0.5.3) + pathutil (0.16.2) + forwardable-extended (~> 2.6) + public_suffix (4.0.6) + racc (1.5.2) + rb-fsevent (0.11.0) + rb-inotify (0.10.1) + ffi (~> 1.0) + rexml (3.2.5) + rouge (3.26.0) + ruby-enum (0.9.0) + i18n + ruby2_keywords (0.0.4) + rubyzip (2.3.0) + safe_yaml (1.0.5) + sass (3.7.4) + sass-listen (~> 4.0.0) + sass-listen (4.0.0) + rb-fsevent (~> 0.9, >= 0.9.4) + rb-inotify (~> 0.9, >= 0.9.7) + sawyer (0.8.2) + addressable (>= 2.3.5) + faraday (> 0.8, < 2.0) + simpleidn (0.2.1) + unf (~> 0.1.4) + terminal-table (1.8.0) + unicode-display_width (~> 1.1, >= 1.1.1) + thread_safe (0.3.6) + typhoeus (1.4.0) + ethon (>= 0.9.0) + tzinfo (1.2.9) + thread_safe (~> 0.1) + unf (0.1.4) + unf_ext + unf_ext (0.0.7.7) + unicode-display_width (1.7.0) + webrick (1.7.0) + zeitwerk (2.4.2) + +PLATFORMS + x86_64-darwin-20 + +DEPENDENCIES + github-pages (~> 215) + jekyll-feed (~> 0.12) + minima (~> 2.5) + tzinfo (~> 1.2) + tzinfo-data + webrick (~> 1.7) + +BUNDLED WITH + 2.2.21 diff --git a/docs/_config.yml b/docs/_config.yml new file mode 100644 index 000000000..2a057ad1c --- /dev/null +++ b/docs/_config.yml @@ -0,0 +1,17 @@ +# Jekyll configuration file +# see: https://jekyllrb.com/docs/configuration/ + +title: Auth0 CLI +description: >- + auth0 is the command line to supercharge your development workflow. +baseurl: "/auth0-cli" +url: "" # the base hostname & protocol for your site, e.g. http://example.com +twitter_username: auth0 +github_username: auth0 + +# Build settings +theme: minima + +header_pages: + - about.markdown + diff --git a/docs/about.markdown b/docs/about.markdown new file mode 100644 index 000000000..0e92e45f0 --- /dev/null +++ b/docs/about.markdown @@ -0,0 +1,7 @@ +--- +layout: page +title: About +permalink: /about/ +--- + +Build, test, troubleshoot and manage your integration with [Auth0](http://auth0.com/) directly from your terminal. diff --git a/docs/auth0_actions.md b/docs/auth0_actions.md new file mode 100644 index 000000000..56f72b4a0 --- /dev/null +++ b/docs/auth0_actions.md @@ -0,0 +1,38 @@ +--- +layout: default +--- +## auth0 actions + +Manage resources for actions + +### Synopsis + +Manage resources for actions. + +### Options + +``` + -h, --help help for actions +``` + +### Options inherited from parent commands + +``` + --debug Enable debug mode. + --force Skip confirmation. + --format string Command output format. Options: json. + --no-color Disable colors. + --no-input Disable interactivity. + --tenant string Specific tenant to use. +``` + +### SEE ALSO + +* [auth0](/auth0-cli/) - Supercharge your development workflow. +* [auth0 actions create](auth0_actions_create.md) - Create a new action +* [auth0 actions delete](auth0_actions_delete.md) - Delete an action +* [auth0 actions list](auth0_actions_list.md) - List your actions +* [auth0 actions open](auth0_actions_open.md) - Open action details page in the Auth0 Dashboard +* [auth0 actions show](auth0_actions_show.md) - Show an action +* [auth0 actions update](auth0_actions_update.md) - Update an action + diff --git a/docs/auth0_actions_create.md b/docs/auth0_actions_create.md new file mode 100644 index 000000000..dd802e7ea --- /dev/null +++ b/docs/auth0_actions_create.md @@ -0,0 +1,51 @@ +--- +layout: default +--- +## auth0 actions create + +Create a new action + +### Synopsis + +Create a new action. + +``` +auth0 actions create [flags] +``` + +### Examples + +``` +auth0 actions create +auth0 actions create --name myaction +auth0 actions create --n myaction --trigger post-login +auth0 actions create --n myaction -t post-login -d "lodash=4.0.0" -d "uuid=8.0.0" +auth0 actions create --n myaction -t post-login -d "lodash=4.0.0" -s "API_KEY=value" -s "SECRET=value +``` + +### Options + +``` + -c, --code string Code content for the action. + -d, --dependency stringToString Third party npm module, and it version, that the action depends on. (default []) + -h, --help help for create + -n, --name string Name of the action. + -s, --secret stringToString Secret to be used in the action. (default []) + -t, --trigger string Trigger of the action. At this time, an action can only target a single trigger at a time. +``` + +### Options inherited from parent commands + +``` + --debug Enable debug mode. + --force Skip confirmation. + --format string Command output format. Options: json. + --no-color Disable colors. + --no-input Disable interactivity. + --tenant string Specific tenant to use. +``` + +### SEE ALSO + +* [auth0 actions](auth0_actions.md) - Manage resources for actions + diff --git a/docs/auth0_actions_delete.md b/docs/auth0_actions_delete.md new file mode 100644 index 000000000..6c457f118 --- /dev/null +++ b/docs/auth0_actions_delete.md @@ -0,0 +1,43 @@ +--- +layout: default +--- +## auth0 actions delete + +Delete an action + +### Synopsis + +Delete an action. + +``` +auth0 actions delete [flags] +``` + +### Examples + +``` +auth0 actions delete +auth0 actions delete +``` + +### Options + +``` + -h, --help help for delete +``` + +### Options inherited from parent commands + +``` + --debug Enable debug mode. + --force Skip confirmation. + --format string Command output format. Options: json. + --no-color Disable colors. + --no-input Disable interactivity. + --tenant string Specific tenant to use. +``` + +### SEE ALSO + +* [auth0 actions](auth0_actions.md) - Manage resources for actions + diff --git a/docs/auth0_actions_list.md b/docs/auth0_actions_list.md new file mode 100644 index 000000000..f68e302ea --- /dev/null +++ b/docs/auth0_actions_list.md @@ -0,0 +1,44 @@ +--- +layout: default +--- +## auth0 actions list + +List your actions + +### Synopsis + +List your existing actions. To create one try: +auth0 actions create + +``` +auth0 actions list [flags] +``` + +### Examples + +``` +auth0 actions list +auth0 actions ls +``` + +### Options + +``` + -h, --help help for list +``` + +### Options inherited from parent commands + +``` + --debug Enable debug mode. + --force Skip confirmation. + --format string Command output format. Options: json. + --no-color Disable colors. + --no-input Disable interactivity. + --tenant string Specific tenant to use. +``` + +### SEE ALSO + +* [auth0 actions](auth0_actions.md) - Manage resources for actions + diff --git a/docs/auth0_actions_open.md b/docs/auth0_actions_open.md new file mode 100644 index 000000000..99e152367 --- /dev/null +++ b/docs/auth0_actions_open.md @@ -0,0 +1,42 @@ +--- +layout: default +--- +## auth0 actions open + +Open action details page in the Auth0 Dashboard + +### Synopsis + +Open action details page in the Auth0 Dashboard. + +``` +auth0 actions open [flags] +``` + +### Examples + +``` +auth0 actions open +``` + +### Options + +``` + -h, --help help for open +``` + +### Options inherited from parent commands + +``` + --debug Enable debug mode. + --force Skip confirmation. + --format string Command output format. Options: json. + --no-color Disable colors. + --no-input Disable interactivity. + --tenant string Specific tenant to use. +``` + +### SEE ALSO + +* [auth0 actions](auth0_actions.md) - Manage resources for actions + diff --git a/docs/auth0_actions_show.md b/docs/auth0_actions_show.md new file mode 100644 index 000000000..790b9bb07 --- /dev/null +++ b/docs/auth0_actions_show.md @@ -0,0 +1,43 @@ +--- +layout: default +--- +## auth0 actions show + +Show an action + +### Synopsis + +Show an action. + +``` +auth0 actions show [flags] +``` + +### Examples + +``` +auth0 actions show +auth0 actions show +``` + +### Options + +``` + -h, --help help for show +``` + +### Options inherited from parent commands + +``` + --debug Enable debug mode. + --force Skip confirmation. + --format string Command output format. Options: json. + --no-color Disable colors. + --no-input Disable interactivity. + --tenant string Specific tenant to use. +``` + +### SEE ALSO + +* [auth0 actions](auth0_actions.md) - Manage resources for actions + diff --git a/docs/auth0_actions_update.md b/docs/auth0_actions_update.md new file mode 100644 index 000000000..4de2e5b9d --- /dev/null +++ b/docs/auth0_actions_update.md @@ -0,0 +1,51 @@ +--- +layout: default +--- +## auth0 actions update + +Update an action + +### Synopsis + +Update an action. + +``` +auth0 actions update [flags] +``` + +### Examples + +``` +auth0 actions update +auth0 actions update --name myaction +auth0 actions update --n myaction --trigger post-login +auth0 actions update --n myaction -t post-login -d "lodash=4.0.0" -d "uuid=8.0.0" +auth0 actions update --n myaction -t post-login -d "lodash=4.0.0" -s "API_KEY=value" -s "SECRET=value +``` + +### Options + +``` + -c, --code string Code content for the action. + -d, --dependency stringToString Third party npm module, and it version, that the action depends on. (default []) + -h, --help help for update + -n, --name string Name of the action. + -s, --secret stringToString Secret to be used in the action. (default []) + -t, --trigger string Trigger of the action. At this time, an action can only target a single trigger at a time. +``` + +### Options inherited from parent commands + +``` + --debug Enable debug mode. + --force Skip confirmation. + --format string Command output format. Options: json. + --no-color Disable colors. + --no-input Disable interactivity. + --tenant string Specific tenant to use. +``` + +### SEE ALSO + +* [auth0 actions](auth0_actions.md) - Manage resources for actions + diff --git a/docs/auth0_apis.md b/docs/auth0_apis.md index c5088ad5b..195b4f598 100644 --- a/docs/auth0_apis.md +++ b/docs/auth0_apis.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 apis +Manage resources for APIs + +### Synopsis + Manage resources for APIs. -### Flags +### Options ``` -h, --help help for apis ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -21,11 +28,12 @@ Manage resources for APIs. ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. * [auth0 apis create](auth0_apis_create.md) - Create a new API * [auth0 apis delete](auth0_apis_delete.md) - Delete an API * [auth0 apis list](auth0_apis_list.md) - List your APIs -* [auth0 apis open](auth0_apis_open.md) - Open API settings page in Auth0 Manage +* [auth0 apis open](auth0_apis_open.md) - Open API settings page in the Auth0 Dashboard * [auth0 apis scopes](auth0_apis_scopes.md) - Manage resources for API scopes * [auth0 apis show](auth0_apis_show.md) - Show an API * [auth0 apis update](auth0_apis_update.md) - Update an API + diff --git a/docs/auth0_apis_create.md b/docs/auth0_apis_create.md index 2ee18bf13..9c70825e2 100644 --- a/docs/auth0_apis_create.md +++ b/docs/auth0_apis_create.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 apis create +Create a new API + +### Synopsis + Create a new API. ``` @@ -16,7 +23,7 @@ auth0 apis create -n myapi --token-expiration 6100 auth0 apis create -n myapi -e 6100 --offline-access=true ``` -### Flags +### Options ``` -h, --help help for create @@ -27,7 +34,7 @@ auth0 apis create -n myapi -e 6100 --offline-access=true -l, --token-lifetime int The amount of time in seconds that the token will be valid after being issued. Default value is 86400 seconds (1 day). ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -41,3 +48,4 @@ auth0 apis create -n myapi -e 6100 --offline-access=true ### SEE ALSO * [auth0 apis](auth0_apis.md) - Manage resources for APIs + diff --git a/docs/auth0_apis_delete.md b/docs/auth0_apis_delete.md index 685e935d8..cae49a2a9 100644 --- a/docs/auth0_apis_delete.md +++ b/docs/auth0_apis_delete.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 apis delete +Delete an API + +### Synopsis + Delete an API. ``` @@ -13,13 +20,13 @@ auth0 apis delete auth0 apis delete ``` -### Flags +### Options ``` -h, --help help for delete ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 apis delete ### SEE ALSO * [auth0 apis](auth0_apis.md) - Manage resources for APIs + diff --git a/docs/auth0_apis_list.md b/docs/auth0_apis_list.md index 93ac38b4e..c042a7e97 100644 --- a/docs/auth0_apis_list.md +++ b/docs/auth0_apis_list.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 apis list +List your APIs + +### Synopsis + List your existing APIs. To create one try: auth0 apis create @@ -14,13 +21,13 @@ auth0 apis list auth0 apis ls ``` -### Flags +### Options ``` -h, --help help for list ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -34,3 +41,4 @@ auth0 apis ls ### SEE ALSO * [auth0 apis](auth0_apis.md) - Manage resources for APIs + diff --git a/docs/auth0_apis_open.md b/docs/auth0_apis_open.md index 75108a74d..59aa52727 100644 --- a/docs/auth0_apis_open.md +++ b/docs/auth0_apis_open.md @@ -1,6 +1,13 @@ +--- +layout: default +--- ## auth0 apis open -Open API settings page in Auth0 Manage. +Open API settings page in the Auth0 Dashboard + +### Synopsis + +Open API settings page in the Auth0 Dashboard. ``` auth0 apis open [flags] @@ -13,13 +20,13 @@ auth0 apis open auth0 apis open ``` -### Flags +### Options ``` -h, --help help for open ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 apis open ### SEE ALSO * [auth0 apis](auth0_apis.md) - Manage resources for APIs + diff --git a/docs/auth0_apis_scopes.md b/docs/auth0_apis_scopes.md index f0a8b49fa..c6ac2a0be 100644 --- a/docs/auth0_apis_scopes.md +++ b/docs/auth0_apis_scopes.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 apis scopes +Manage resources for API scopes + +### Synopsis + Manage resources for API scopes. -### Flags +### Options ``` -h, --help help for scopes ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -23,3 +30,4 @@ Manage resources for API scopes. * [auth0 apis](auth0_apis.md) - Manage resources for APIs * [auth0 apis scopes list](auth0_apis_scopes_list.md) - List the scopes of an API + diff --git a/docs/auth0_apis_scopes_list.md b/docs/auth0_apis_scopes_list.md index de7fb110d..fa25705b0 100644 --- a/docs/auth0_apis_scopes_list.md +++ b/docs/auth0_apis_scopes_list.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 apis scopes list +List the scopes of an API + +### Synopsis + List the scopes of an API. ``` @@ -13,13 +20,13 @@ auth0 apis scopes list auth0 apis scopes ls ``` -### Flags +### Options ``` -h, --help help for list ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 apis scopes ls ### SEE ALSO * [auth0 apis scopes](auth0_apis_scopes.md) - Manage resources for API scopes + diff --git a/docs/auth0_apis_show.md b/docs/auth0_apis_show.md index b6070db6c..a827230c0 100644 --- a/docs/auth0_apis_show.md +++ b/docs/auth0_apis_show.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 apis show +Show an API + +### Synopsis + Show an API. ``` @@ -13,13 +20,13 @@ auth0 apis show auth0 apis show ``` -### Flags +### Options ``` -h, --help help for show ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 apis show ### SEE ALSO * [auth0 apis](auth0_apis.md) - Manage resources for APIs + diff --git a/docs/auth0_apis_update.md b/docs/auth0_apis_update.md index 4afb6b6dc..e5da57a60 100644 --- a/docs/auth0_apis_update.md +++ b/docs/auth0_apis_update.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 apis update +Update an API + +### Synopsis + Update an API. ``` @@ -16,7 +23,7 @@ auth0 apis update -n myapi --token-expiration 6100 auth0 apis update -n myapi -e 6100 --offline-access=true ``` -### Flags +### Options ``` -h, --help help for update @@ -26,7 +33,7 @@ auth0 apis update -n myapi -e 6100 --offline-access=true -l, --token-lifetime int The amount of time in seconds that the token will be valid after being issued. Default value is 86400 seconds (1 day). ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -40,3 +47,4 @@ auth0 apis update -n myapi -e 6100 --offline-access=true ### SEE ALSO * [auth0 apis](auth0_apis.md) - Manage resources for APIs + diff --git a/docs/auth0_apps.md b/docs/auth0_apps.md index 86c69483b..3fc5e88a8 100644 --- a/docs/auth0_apps.md +++ b/docs/auth0_apps.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 apps +Manage resources for applications + +### Synopsis + Manage resources for applications. -### Flags +### Options ``` -h, --help help for apps ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -21,11 +28,12 @@ Manage resources for applications. ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. * [auth0 apps create](auth0_apps_create.md) - Create a new application * [auth0 apps delete](auth0_apps_delete.md) - Delete an application * [auth0 apps list](auth0_apps_list.md) - List your applications -* [auth0 apps open](auth0_apps_open.md) - Open application settings page in Auth0 Manage +* [auth0 apps open](auth0_apps_open.md) - Open application settings page in the Auth0 Dashboard * [auth0 apps show](auth0_apps_show.md) - Show an application * [auth0 apps update](auth0_apps_update.md) - Update an application * [auth0 apps use](auth0_apps_use.md) - Choose a default application for the Auth0 CLI + diff --git a/docs/auth0_apps_create.md b/docs/auth0_apps_create.md index f38bcd3ef..c5592e43b 100644 --- a/docs/auth0_apps_create.md +++ b/docs/auth0_apps_create.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 apps create +Create a new application + +### Synopsis + Create a new application. ``` @@ -15,7 +22,7 @@ auth0 apps create -n myapp --type [native|spa|regular|m2m] auth0 apps create -n myapp -t [native|spa|regular|m2m] --description ``` -### Flags +### Options ``` -a, --auth-method string Defines the requested authentication method for the token endpoint. Possible values are 'None' (public application without a client secret), 'Post' (application uses HTTP POST parameters) or 'Basic' (application uses HTTP Basic). @@ -35,7 +42,7 @@ auth0 apps create -n myapp -t [native|spa|regular|m2m] --description ``` -### Flags +### Options ``` -h, --help help for delete ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 apps delete ### SEE ALSO * [auth0 apps](auth0_apps.md) - Manage resources for applications + diff --git a/docs/auth0_apps_list.md b/docs/auth0_apps_list.md index 9ea67f04c..92569e3d2 100644 --- a/docs/auth0_apps_list.md +++ b/docs/auth0_apps_list.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 apps list +List your applications + +### Synopsis + List your existing applications. To create one try: auth0 apps create @@ -14,14 +21,14 @@ auth0 apps list auth0 apps ls ``` -### Flags +### Options ``` -h, --help help for list -r, --reveal Display the Client Secret as part of the command output. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -35,3 +42,4 @@ auth0 apps ls ### SEE ALSO * [auth0 apps](auth0_apps.md) - Manage resources for applications + diff --git a/docs/auth0_apps_open.md b/docs/auth0_apps_open.md index b029d6ae3..b90b65039 100644 --- a/docs/auth0_apps_open.md +++ b/docs/auth0_apps_open.md @@ -1,6 +1,13 @@ +--- +layout: default +--- ## auth0 apps open -Open application settings page in Auth0 Manage. +Open application settings page in the Auth0 Dashboard + +### Synopsis + +Open application settings page in the Auth0 Dashboard. ``` auth0 apps open [flags] @@ -12,13 +19,13 @@ auth0 apps open [flags] auth0 apps open ``` -### Flags +### Options ``` -h, --help help for open ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 apps open ### SEE ALSO * [auth0 apps](auth0_apps.md) - Manage resources for applications + diff --git a/docs/auth0_apps_show.md b/docs/auth0_apps_show.md index b6a632b6b..534d7af8d 100644 --- a/docs/auth0_apps_show.md +++ b/docs/auth0_apps_show.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 apps show +Show an application + +### Synopsis + Show an application. ``` @@ -13,14 +20,14 @@ auth0 apps show auth0 apps show ``` -### Flags +### Options ``` -h, --help help for show -r, --reveal Display the Client Secret as part of the command output. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -34,3 +41,4 @@ auth0 apps show ### SEE ALSO * [auth0 apps](auth0_apps.md) - Manage resources for applications + diff --git a/docs/auth0_apps_update.md b/docs/auth0_apps_update.md index 3e1b9e85a..824267297 100644 --- a/docs/auth0_apps_update.md +++ b/docs/auth0_apps_update.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 apps update +Update an application + +### Synopsis + Update an application. ``` @@ -14,7 +21,7 @@ auth0 apps update --name myapp auth0 apps update -n myapp --type [native|spa|regular|m2m] ``` -### Flags +### Options ``` -a, --auth-method string Defines the requested authentication method for the token endpoint. Possible values are 'None' (public application without a client secret), 'Post' (application uses HTTP POST parameters) or 'Basic' (application uses HTTP Basic). @@ -34,7 +41,7 @@ auth0 apps update -n myapp --type [native|spa|regular|m2m] -w, --web-origins strings Comma-separated list of allowed origins for use with Cross-Origin Authentication, Device Flow, and web message response mode. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -48,3 +55,4 @@ auth0 apps update -n myapp --type [native|spa|regular|m2m] ### SEE ALSO * [auth0 apps](auth0_apps.md) - Manage resources for applications + diff --git a/docs/auth0_apps_use.md b/docs/auth0_apps_use.md index 8a3661e1c..35c1c16c6 100644 --- a/docs/auth0_apps_use.md +++ b/docs/auth0_apps_use.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 apps use +Choose a default application for the Auth0 CLI + +### Synopsis + Specify your preferred application for interaction with the Auth0 CLI. ``` @@ -12,14 +19,14 @@ auth0 apps use [flags] auth0 apps use ``` -### Flags +### Options ``` -h, --help help for use -n, --none Specify none of your apps. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 apps use ### SEE ALSO * [auth0 apps](auth0_apps.md) - Manage resources for applications + diff --git a/docs/auth0_branding.md b/docs/auth0_branding.md index a74977b6d..833382539 100644 --- a/docs/auth0_branding.md +++ b/docs/auth0_branding.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 branding +Manage branding options + +### Synopsis + Manage branding options. -### Flags +### Options ``` -h, --help help for branding ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -21,8 +28,9 @@ Manage branding options. ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. * [auth0 branding domains](auth0_branding_domains.md) - Manage custom domains * [auth0 branding show](auth0_branding_show.md) - Display the custom branding settings for Universal Login * [auth0 branding templates](auth0_branding_templates.md) - Manage custom page templates * [auth0 branding update](auth0_branding_update.md) - Update the custom branding settings for Universal Login + diff --git a/docs/auth0_branding_domains.md b/docs/auth0_branding_domains.md index e9b46c9fb..8ff44e67c 100644 --- a/docs/auth0_branding_domains.md +++ b/docs/auth0_branding_domains.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 branding domains +Manage custom domains + +### Synopsis + Manage custom domains. -### Flags +### Options ``` -h, --help help for domains ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -26,4 +33,6 @@ Manage custom domains. * [auth0 branding domains delete](auth0_branding_domains_delete.md) - Delete a custom domain * [auth0 branding domains list](auth0_branding_domains_list.md) - List your custom domains * [auth0 branding domains show](auth0_branding_domains_show.md) - Show a custom domain +* [auth0 branding domains update](auth0_branding_domains_update.md) - Update a custom domain * [auth0 branding domains verify](auth0_branding_domains_verify.md) - Verify a custom domain + diff --git a/docs/auth0_branding_domains_create.md b/docs/auth0_branding_domains_create.md index 32477720f..b8d63c0c4 100644 --- a/docs/auth0_branding_domains_create.md +++ b/docs/auth0_branding_domains_create.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 branding domains create +Create a custom domain + +### Synopsis + Create a custom domain. ``` @@ -13,16 +20,18 @@ auth0 branding domains create auth0 branding domains create ``` -### Flags +### Options ``` -d, --domain string Domain name. -h, --help help for create + -i, --ip-header string The HTTP header to fetch the client's IP address. + -p, --policy string The TLS version policy. Can be either 'compatible' or 'recommended'. -t, --type string Custom domain provisioning type. Must be 'auth0' for Auth0-managed certs or 'self' for self-managed certs. -v, --verification string Custom domain verification method. Must be 'txt'. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -36,3 +45,4 @@ auth0 branding domains create ### SEE ALSO * [auth0 branding domains](auth0_branding_domains.md) - Manage custom domains + diff --git a/docs/auth0_branding_domains_delete.md b/docs/auth0_branding_domains_delete.md index 1242ef8a7..8a8b3e987 100644 --- a/docs/auth0_branding_domains_delete.md +++ b/docs/auth0_branding_domains_delete.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 branding domains delete +Delete a custom domain + +### Synopsis + Delete a custom domain. ``` @@ -13,13 +20,13 @@ auth0 branding domains delete auth0 branding domains delete ``` -### Flags +### Options ``` -h, --help help for delete ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 branding domains delete ### SEE ALSO * [auth0 branding domains](auth0_branding_domains.md) - Manage custom domains + diff --git a/docs/auth0_branding_domains_list.md b/docs/auth0_branding_domains_list.md index 63168255c..29d58adda 100644 --- a/docs/auth0_branding_domains_list.md +++ b/docs/auth0_branding_domains_list.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 branding domains list +List your custom domains + +### Synopsis + List your existing custom domains. To create one try: auth0 branding domains create @@ -14,13 +21,13 @@ auth0 branding domains list auth0 branding domains ls ``` -### Flags +### Options ``` -h, --help help for list ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -34,3 +41,4 @@ auth0 branding domains ls ### SEE ALSO * [auth0 branding domains](auth0_branding_domains.md) - Manage custom domains + diff --git a/docs/auth0_branding_domains_show.md b/docs/auth0_branding_domains_show.md index cd617b5e9..9c4a8b4d6 100644 --- a/docs/auth0_branding_domains_show.md +++ b/docs/auth0_branding_domains_show.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 branding domains show +Show a custom domain + +### Synopsis + Show a custom domain. ``` @@ -13,13 +20,13 @@ auth0 branding domains show auth0 branding domains show ``` -### Flags +### Options ``` -h, --help help for show ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 branding domains show ### SEE ALSO * [auth0 branding domains](auth0_branding_domains.md) - Manage custom domains + diff --git a/docs/auth0_branding_domains_update.md b/docs/auth0_branding_domains_update.md new file mode 100644 index 000000000..cfc32afb1 --- /dev/null +++ b/docs/auth0_branding_domains_update.md @@ -0,0 +1,46 @@ +--- +layout: default +--- +## auth0 branding domains update + +Update a custom domain + +### Synopsis + +Update a custom domain. + +``` +auth0 branding domains update [flags] +``` + +### Examples + +``` +auth0 branding domains update +auth0 branding domains update --policy compatible +auth0 branding domains update -p compatible --ip-header "cf-connecting-ip" +``` + +### Options + +``` + -h, --help help for update + -i, --ip-header string The HTTP header to fetch the client's IP address. + -p, --policy string The TLS version policy. Can be either 'compatible' or 'recommended'. +``` + +### Options inherited from parent commands + +``` + --debug Enable debug mode. + --force Skip confirmation. + --format string Command output format. Options: json. + --no-color Disable colors. + --no-input Disable interactivity. + --tenant string Specific tenant to use. +``` + +### SEE ALSO + +* [auth0 branding domains](auth0_branding_domains.md) - Manage custom domains + diff --git a/docs/auth0_branding_domains_verify.md b/docs/auth0_branding_domains_verify.md index 9e837f6e7..ba836f270 100644 --- a/docs/auth0_branding_domains_verify.md +++ b/docs/auth0_branding_domains_verify.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 branding domains verify +Verify a custom domain + +### Synopsis + Verify a custom domain. ``` @@ -13,13 +20,13 @@ auth0 branding domains verify auth0 branding domains verify ``` -### Flags +### Options ``` -h, --help help for verify ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 branding domains verify ### SEE ALSO * [auth0 branding domains](auth0_branding_domains.md) - Manage custom domains + diff --git a/docs/auth0_branding_show.md b/docs/auth0_branding_show.md index 51150fe7a..9a746c175 100644 --- a/docs/auth0_branding_show.md +++ b/docs/auth0_branding_show.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 branding show +Display the custom branding settings for Universal Login + +### Synopsis + Display the custom branding settings for Universal Login. ``` @@ -12,13 +19,13 @@ auth0 branding show [flags] auth0 branding show ``` -### Flags +### Options ``` -h, --help help for show ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 branding show ### SEE ALSO * [auth0 branding](auth0_branding.md) - Manage branding options + diff --git a/docs/auth0_branding_templates.md b/docs/auth0_branding_templates.md index bf9c3992c..9054aa191 100644 --- a/docs/auth0_branding_templates.md +++ b/docs/auth0_branding_templates.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 branding templates +Manage custom page templates + +### Synopsis + Manage custom page templates. This requires at least one custom domain to be configured for the tenant. -### Flags +### Options ``` -h, --help help for templates ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -24,3 +31,4 @@ Manage custom page templates. This requires at least one custom domain to be con * [auth0 branding](auth0_branding.md) - Manage branding options * [auth0 branding templates show](auth0_branding_templates_show.md) - Display the custom template for Universal Login * [auth0 branding templates update](auth0_branding_templates_update.md) - Update the custom template for Universal Login + diff --git a/docs/auth0_branding_templates_show.md b/docs/auth0_branding_templates_show.md index 4cea7ddec..709a62b1a 100644 --- a/docs/auth0_branding_templates_show.md +++ b/docs/auth0_branding_templates_show.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 branding templates show +Display the custom template for Universal Login + +### Synopsis + Display the custom template for Universal Login. ``` @@ -12,13 +19,13 @@ auth0 branding templates show [flags] auth0 branding templates show ``` -### Flags +### Options ``` -h, --help help for show ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 branding templates show ### SEE ALSO * [auth0 branding templates](auth0_branding_templates.md) - Manage custom page templates + diff --git a/docs/auth0_branding_templates_update.md b/docs/auth0_branding_templates_update.md index 6c2fd2ed1..55a40c567 100644 --- a/docs/auth0_branding_templates_update.md +++ b/docs/auth0_branding_templates_update.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 branding templates update +Update the custom template for Universal Login + +### Synopsis + Update the custom template for Universal Login. ``` @@ -12,13 +19,13 @@ auth0 branding templates update [flags] auth0 branding templates update ``` -### Flags +### Options ``` -h, --help help for update ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 branding templates update ### SEE ALSO * [auth0 branding templates](auth0_branding_templates.md) - Manage custom page templates + diff --git a/docs/auth0_branding_update.md b/docs/auth0_branding_update.md index fbb11aa18..0ea985d18 100644 --- a/docs/auth0_branding_update.md +++ b/docs/auth0_branding_update.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 branding update +Update the custom branding settings for Universal Login + +### Synopsis + Update the custom branding settings for Universal Login. ``` @@ -14,7 +21,7 @@ auth0 branding update --accent '#B24592' --background '#F2DDEC' auth0 branding update -a '#B24592' -b '#F2DDEC --logo 'https://example.com/logo.png ``` -### Flags +### Options ``` -a, --accent string Accent color. @@ -25,7 +32,7 @@ auth0 branding update -a '#B24592' -b '#F2DDEC --logo 'https://example.com/logo. -l, --logo string URL for the logo. Must use HTTPS. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -39,3 +46,4 @@ auth0 branding update -a '#B24592' -b '#F2DDEC --logo 'https://example.com/logo. ### SEE ALSO * [auth0 branding](auth0_branding.md) - Manage branding options + diff --git a/docs/auth0_completion.md b/docs/auth0_completion.md index da1263fef..0f55fa0ff 100644 --- a/docs/auth0_completion.md +++ b/docs/auth0_completion.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 completion +Setup autocomplete features for this CLI on your terminal + +### Synopsis + completion [bash|zsh|fish|powershell] To load completions: @@ -46,13 +53,13 @@ PS> auth0 completion powershell > auth0.ps1 auth0 completion ``` -### Flags +### Options ``` -h, --help help for completion ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -65,4 +72,5 @@ auth0 completion ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. + diff --git a/docs/auth0_ips.md b/docs/auth0_ips.md index 607b5c09a..27377bf95 100644 --- a/docs/auth0_ips.md +++ b/docs/auth0_ips.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 ips +Manage blocked IP addresses + +### Synopsis + Manage blocked IP addresses. -### Flags +### Options ``` -h, --help help for ips ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -21,6 +28,7 @@ Manage blocked IP addresses. ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. * [auth0 ips check](auth0_ips_check.md) - Check IP address * [auth0 ips unblock](auth0_ips_unblock.md) - Unblock IP address + diff --git a/docs/auth0_ips_check.md b/docs/auth0_ips_check.md index 58f4781f9..348687ad8 100644 --- a/docs/auth0_ips_check.md +++ b/docs/auth0_ips_check.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 ips check +Check IP address + +### Synopsis + Check whether a given IP address is blocked. ``` @@ -12,13 +19,13 @@ auth0 ips check [flags] auth0 ips check ``` -### Flags +### Options ``` -h, --help help for check ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 ips check ### SEE ALSO * [auth0 ips](auth0_ips.md) - Manage blocked IP addresses + diff --git a/docs/auth0_ips_unblock.md b/docs/auth0_ips_unblock.md index 6481375a1..84677ed75 100644 --- a/docs/auth0_ips_unblock.md +++ b/docs/auth0_ips_unblock.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 ips unblock +Unblock IP address + +### Synopsis + Unblock an IP address which is currently blocked. ``` @@ -12,13 +19,13 @@ auth0 ips unblock [flags] auth0 ips unblock ``` -### Flags +### Options ``` -h, --help help for unblock ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 ips unblock ### SEE ALSO * [auth0 ips](auth0_ips.md) - Manage blocked IP addresses + diff --git a/docs/auth0_login.md b/docs/auth0_login.md index ff212a6c2..7b6d6021b 100644 --- a/docs/auth0_login.md +++ b/docs/auth0_login.md @@ -1,18 +1,25 @@ +--- +layout: default +--- ## auth0 login +Authenticate the Auth0 CLI + +### Synopsis + Sign in to your Auth0 account and authorize the CLI to access the Management API. ``` auth0 login [flags] ``` -### Flags +### Options ``` -h, --help help for login ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -25,4 +32,5 @@ auth0 login [flags] ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. + diff --git a/docs/auth0_logout.md b/docs/auth0_logout.md index 8e806a734..cf134028d 100644 --- a/docs/auth0_logout.md +++ b/docs/auth0_logout.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 logout +Log out of a tenant's session + +### Synopsis + Log out of a tenant's session. ``` @@ -12,13 +19,13 @@ auth0 logout [flags] auth0 logout ``` -### Flags +### Options ``` -h, --help help for logout ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -31,4 +38,5 @@ auth0 logout ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. + diff --git a/docs/auth0_logs.md b/docs/auth0_logs.md index 4d8124ba6..558ed1567 100644 --- a/docs/auth0_logs.md +++ b/docs/auth0_logs.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 logs +View tenant logs + +### Synopsis + View tenant logs. -### Flags +### Options ``` -h, --help help for logs ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -21,7 +28,8 @@ View tenant logs. ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. * [auth0 logs list](auth0_logs_list.md) - Show the application logs * [auth0 logs streams](auth0_logs_streams.md) - Manage resources for log streams * [auth0 logs tail](auth0_logs_tail.md) - Tail the tenant logs + diff --git a/docs/auth0_logs_list.md b/docs/auth0_logs_list.md index d2d1755cf..5fac06b55 100644 --- a/docs/auth0_logs_list.md +++ b/docs/auth0_logs_list.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 logs list +Show the application logs + +### Synopsis + Show the tenant logs allowing to filter using Lucene query syntax. ``` @@ -19,7 +26,7 @@ auth0 logs list --filter "type:f" # See the full list of type codes at https://a auth0 logs ls -n 100 ``` -### Flags +### Options ``` -f, --filter string Filter in Lucene query syntax. See https://auth0.com/docs/logs/log-search-query-syntax for more details. @@ -27,7 +34,7 @@ auth0 logs ls -n 100 -n, --number int Number of log entries to show. (default 100) ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -41,3 +48,4 @@ auth0 logs ls -n 100 ### SEE ALSO * [auth0 logs](auth0_logs.md) - View tenant logs + diff --git a/docs/auth0_logs_streams.md b/docs/auth0_logs_streams.md index abd1e0802..1b44d6c9a 100644 --- a/docs/auth0_logs_streams.md +++ b/docs/auth0_logs_streams.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 logs streams +Manage resources for log streams + +### Synopsis + manage resources for log streams. -### Flags +### Options ``` -h, --help help for streams ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -25,6 +32,7 @@ manage resources for log streams. * [auth0 logs streams create](auth0_logs_streams_create.md) - Create a new log stream * [auth0 logs streams delete](auth0_logs_streams_delete.md) - Delete a log stream * [auth0 logs streams list](auth0_logs_streams_list.md) - List all log streams -* [auth0 logs streams open](auth0_logs_streams_open.md) - Open log stream settings page in Auth0 Manage +* [auth0 logs streams open](auth0_logs_streams_open.md) - Open log stream settings page in the Auth0 Dashboard * [auth0 logs streams show](auth0_logs_streams_show.md) - Show a log stream by Id * [auth0 logs streams update](auth0_logs_streams_update.md) - Update a log stream + diff --git a/docs/auth0_logs_streams_create.md b/docs/auth0_logs_streams_create.md index ee29d5421..eeb363f0c 100644 --- a/docs/auth0_logs_streams_create.md +++ b/docs/auth0_logs_streams_create.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 logs streams create +Create a new log stream + +### Synopsis + Create a new log stream. ``` @@ -16,7 +23,7 @@ auth0 logs streams create -n myeventbridge -t eventbridge --eventbridge-id 99999 auth0 logs streams create -n test-splunk -t splunk --splunk-domain demo.splunk.com --splunk-token 12a34ab5-c6d7-8901-23ef-456b7c89d0c1 --splunk-port 8080 --splunk-secure=true ``` -### Flags +### Options ``` --datadog-id string The region in which datadog dashboard is created. @@ -41,7 +48,7 @@ auth0 logs streams create -n test-splunk -t splunk --splunk-domain demo.splunk.c -t, --type string Type of the log stream. Possible values: http, eventbridge, eventgrid, datadog, splunk, sumo. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -55,3 +62,4 @@ auth0 logs streams create -n test-splunk -t splunk --splunk-domain demo.splunk.c ### SEE ALSO * [auth0 logs streams](auth0_logs_streams.md) - Manage resources for log streams + diff --git a/docs/auth0_logs_streams_delete.md b/docs/auth0_logs_streams_delete.md index 0eecde447..fd317e3f4 100644 --- a/docs/auth0_logs_streams_delete.md +++ b/docs/auth0_logs_streams_delete.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 logs streams delete +Delete a log stream + +### Synopsis + Delete a log stream. ``` @@ -13,13 +20,13 @@ auth0 logs streams delete auth0 logs streams delete ``` -### Flags +### Options ``` -h, --help help for delete ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 logs streams delete ### SEE ALSO * [auth0 logs streams](auth0_logs_streams.md) - Manage resources for log streams + diff --git a/docs/auth0_logs_streams_list.md b/docs/auth0_logs_streams_list.md index 9f56732db..79bc99b8a 100644 --- a/docs/auth0_logs_streams_list.md +++ b/docs/auth0_logs_streams_list.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 logs streams list +List all log streams + +### Synopsis + List your existing log streams. To create one try: auth0 logs streams create @@ -14,13 +21,13 @@ auth0 logs streams list auth0 logs streams ls ``` -### Flags +### Options ``` -h, --help help for list ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -34,3 +41,4 @@ auth0 logs streams ls ### SEE ALSO * [auth0 logs streams](auth0_logs_streams.md) - Manage resources for log streams + diff --git a/docs/auth0_logs_streams_open.md b/docs/auth0_logs_streams_open.md index 5691a8fc9..f96fcd83a 100644 --- a/docs/auth0_logs_streams_open.md +++ b/docs/auth0_logs_streams_open.md @@ -1,6 +1,13 @@ +--- +layout: default +--- ## auth0 logs streams open -Open log stream settings page in Auth0 Manage. +Open log stream settings page in the Auth0 Dashboard + +### Synopsis + +Open log stream settings page in the Auth0 Dashboard. ``` auth0 logs streams open [flags] @@ -12,13 +19,13 @@ auth0 logs streams open [flags] auth0 logs streams open ``` -### Flags +### Options ``` -h, --help help for open ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 logs streams open ### SEE ALSO * [auth0 logs streams](auth0_logs_streams.md) - Manage resources for log streams + diff --git a/docs/auth0_logs_streams_show.md b/docs/auth0_logs_streams_show.md index 09c617054..3c1d632a5 100644 --- a/docs/auth0_logs_streams_show.md +++ b/docs/auth0_logs_streams_show.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 logs streams show +Show a log stream by Id + +### Synopsis + Show a log stream by Id. ``` @@ -13,13 +20,13 @@ auth0 logs streams show auth0 logs streams show ``` -### Flags +### Options ``` -h, --help help for show ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 logs streams show ### SEE ALSO * [auth0 logs streams](auth0_logs_streams.md) - Manage resources for log streams + diff --git a/docs/auth0_logs_streams_update.md b/docs/auth0_logs_streams_update.md index dbadba32b..0a15b1345 100644 --- a/docs/auth0_logs_streams_update.md +++ b/docs/auth0_logs_streams_update.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 logs streams update +Update a log stream + +### Synopsis + Update a log stream. ``` @@ -17,7 +24,7 @@ auth0 logs streams update -n mydatadog -t datadog --datadog-key 9999999 --d auth0 logs streams update -n myeventbridge -t eventbridge ``` -### Flags +### Options ``` --datadog-id string The region in which datadog dashboard is created. @@ -37,7 +44,7 @@ auth0 logs streams update -n myeventbridge -t eventbridge -t, --type string Type of the log stream. Possible values: http, eventbridge, eventgrid, datadog, splunk, sumo. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -51,3 +58,4 @@ auth0 logs streams update -n myeventbridge -t eventbridge ### SEE ALSO * [auth0 logs streams](auth0_logs_streams.md) - Manage resources for log streams + diff --git a/docs/auth0_logs_tail.md b/docs/auth0_logs_tail.md index 296b3fffa..d3d4d26c6 100644 --- a/docs/auth0_logs_tail.md +++ b/docs/auth0_logs_tail.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 logs tail +Tail the tenant logs + +### Synopsis + Tail the tenant logs allowing to filter using Lucene query syntax. ``` @@ -19,7 +26,7 @@ auth0 logs tail --filter "type:f" # See the full list of type codes at https://a auth0 logs tail -n 100 ``` -### Flags +### Options ``` -f, --filter string Filter in Lucene query syntax. See https://auth0.com/docs/logs/log-search-query-syntax for more details. @@ -27,7 +34,7 @@ auth0 logs tail -n 100 -n, --number int Number of log entries to show. (default 100) ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -41,3 +48,4 @@ auth0 logs tail -n 100 ### SEE ALSO * [auth0 logs](auth0_logs.md) - View tenant logs + diff --git a/docs/auth0_quickstarts.md b/docs/auth0_quickstarts.md index 6054d3282..9b8bf09ab 100644 --- a/docs/auth0_quickstarts.md +++ b/docs/auth0_quickstarts.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 quickstarts +Quickstart support for getting bootstrapped + +### Synopsis + Quickstart support for getting bootstrapped. -### Flags +### Options ``` -h, --help help for quickstarts ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -21,6 +28,7 @@ Quickstart support for getting bootstrapped. ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. * [auth0 quickstarts download](auth0_quickstarts_download.md) - Download a Quickstart sample app for a specific tech stack * [auth0 quickstarts list](auth0_quickstarts_list.md) - List the available Quickstarts + diff --git a/docs/auth0_quickstarts_download.md b/docs/auth0_quickstarts_download.md index cec3e4aa7..e770a2f33 100644 --- a/docs/auth0_quickstarts_download.md +++ b/docs/auth0_quickstarts_download.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 quickstarts download +Download a Quickstart sample app for a specific tech stack + +### Synopsis + Download a Quickstart sample app for a specific tech stack. ``` @@ -13,14 +20,14 @@ auth0 quickstarts download --stack auth0 qs download --stack ``` -### Flags +### Options ``` -h, --help help for download -s, --stack string Tech/Language of the quickstart sample to download. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -34,3 +41,4 @@ auth0 qs download --stack ### SEE ALSO * [auth0 quickstarts](auth0_quickstarts.md) - Quickstart support for getting bootstrapped + diff --git a/docs/auth0_quickstarts_list.md b/docs/auth0_quickstarts_list.md index 31293e08b..0b5d52a2d 100644 --- a/docs/auth0_quickstarts_list.md +++ b/docs/auth0_quickstarts_list.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 quickstarts list +List the available Quickstarts + +### Synopsis + List the available Quickstarts. ``` @@ -15,13 +22,13 @@ auth0 qs list auth0 qs ls ``` -### Flags +### Options ``` -h, --help help for list ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -35,3 +42,4 @@ auth0 qs ls ### SEE ALSO * [auth0 quickstarts](auth0_quickstarts.md) - Quickstart support for getting bootstrapped + diff --git a/docs/auth0_roles.md b/docs/auth0_roles.md index f1b0ce513..a42d48c64 100644 --- a/docs/auth0_roles.md +++ b/docs/auth0_roles.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 roles +Manage resources for roles + +### Synopsis + Manage resources for roles. -### Flags +### Options ``` -h, --help help for roles ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -21,9 +28,10 @@ Manage resources for roles. ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. * [auth0 roles create](auth0_roles_create.md) - Create a new role * [auth0 roles delete](auth0_roles_delete.md) - Delete a role * [auth0 roles list](auth0_roles_list.md) - List your roles * [auth0 roles show](auth0_roles_show.md) - Show a role * [auth0 roles update](auth0_roles_update.md) - Update a role + diff --git a/docs/auth0_roles_create.md b/docs/auth0_roles_create.md index d4b97cd4f..91970b29c 100644 --- a/docs/auth0_roles_create.md +++ b/docs/auth0_roles_create.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 roles create +Create a new role + +### Synopsis + Create a new role. ``` @@ -14,7 +21,7 @@ auth0 roles create --name myrole auth0 roles create -n myrole --description "awesome role" ``` -### Flags +### Options ``` -d, --description string Description of the role. @@ -22,7 +29,7 @@ auth0 roles create -n myrole --description "awesome role" -n, --name string Name of the role. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -36,3 +43,4 @@ auth0 roles create -n myrole --description "awesome role" ### SEE ALSO * [auth0 roles](auth0_roles.md) - Manage resources for roles + diff --git a/docs/auth0_roles_delete.md b/docs/auth0_roles_delete.md index a36b44b59..2819abc76 100644 --- a/docs/auth0_roles_delete.md +++ b/docs/auth0_roles_delete.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 roles delete +Delete a role + +### Synopsis + Delete a role. ``` @@ -13,13 +20,13 @@ auth0 roles delete auth0 roles delete ``` -### Flags +### Options ``` -h, --help help for delete ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 roles delete ### SEE ALSO * [auth0 roles](auth0_roles.md) - Manage resources for roles + diff --git a/docs/auth0_roles_list.md b/docs/auth0_roles_list.md index 575a25afa..50bffadf8 100644 --- a/docs/auth0_roles_list.md +++ b/docs/auth0_roles_list.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 roles list +List your roles + +### Synopsis + List your existing roles. To create one try: auth0 roles create @@ -14,13 +21,13 @@ auth0 roles list auth0 roles ls ``` -### Flags +### Options ``` -h, --help help for list ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -34,3 +41,4 @@ auth0 roles ls ### SEE ALSO * [auth0 roles](auth0_roles.md) - Manage resources for roles + diff --git a/docs/auth0_roles_show.md b/docs/auth0_roles_show.md index fff56a913..64b20b032 100644 --- a/docs/auth0_roles_show.md +++ b/docs/auth0_roles_show.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 roles show +Show a role + +### Synopsis + Show a role. ``` @@ -13,13 +20,13 @@ auth0 roles show auth0 roles show ``` -### Flags +### Options ``` -h, --help help for show ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 roles show ### SEE ALSO * [auth0 roles](auth0_roles.md) - Manage resources for roles + diff --git a/docs/auth0_roles_update.md b/docs/auth0_roles_update.md index 2cff8a6e7..18697bae4 100644 --- a/docs/auth0_roles_update.md +++ b/docs/auth0_roles_update.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 roles update +Update a role + +### Synopsis + Update a role. ``` @@ -14,7 +21,7 @@ auth0 roles update --name myrole auth0 roles update -n myrole --description "awesome role" ``` -### Flags +### Options ``` -d, --description string Description of the role. @@ -22,7 +29,7 @@ auth0 roles update -n myrole --description "awesome role" -n, --name string Name of the role. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -36,3 +43,4 @@ auth0 roles update -n myrole --description "awesome role" ### SEE ALSO * [auth0 roles](auth0_roles.md) - Manage resources for roles + diff --git a/docs/auth0_rules.md b/docs/auth0_rules.md index 128389694..b16da524d 100644 --- a/docs/auth0_rules.md +++ b/docs/auth0_rules.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 rules +Manage resources for rules + +### Synopsis + Manage resources for rules. -### Flags +### Options ``` -h, --help help for rules ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -21,7 +28,7 @@ Manage resources for rules. ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. * [auth0 rules create](auth0_rules_create.md) - Create a new rule * [auth0 rules delete](auth0_rules_delete.md) - Delete a rule * [auth0 rules disable](auth0_rules_disable.md) - Disable a rule @@ -29,3 +36,4 @@ Manage resources for rules. * [auth0 rules list](auth0_rules_list.md) - List your rules * [auth0 rules show](auth0_rules_show.md) - Show a rule * [auth0 rules update](auth0_rules_update.md) - Update a rule + diff --git a/docs/auth0_rules_create.md b/docs/auth0_rules_create.md index d7477005d..4abbbc7b3 100644 --- a/docs/auth0_rules_create.md +++ b/docs/auth0_rules_create.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 rules create +Create a new rule + +### Synopsis + Create a new rule. ``` @@ -15,7 +22,7 @@ auth0 rules create -n "My Rule" --template "Empty rule" auth0 rules create -n "My Rule" -t "Empty rule" --enabled=false ``` -### Flags +### Options ``` -e, --enabled Enable (or disable) a rule. (default true) @@ -24,7 +31,7 @@ auth0 rules create -n "My Rule" -t "Empty rule" --enabled=false -t, --template string Template to use for the rule. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -38,3 +45,4 @@ auth0 rules create -n "My Rule" -t "Empty rule" --enabled=false ### SEE ALSO * [auth0 rules](auth0_rules.md) - Manage resources for rules + diff --git a/docs/auth0_rules_delete.md b/docs/auth0_rules_delete.md index 5c4a2ac6d..d9dc7ef27 100644 --- a/docs/auth0_rules_delete.md +++ b/docs/auth0_rules_delete.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 rules delete +Delete a rule + +### Synopsis + Delete a rule. ``` @@ -10,16 +17,16 @@ auth0 rules delete [flags] ``` auth0 rules delete -auth0 rules delete +auth0 rules delete ``` -### Flags +### Options ``` -h, --help help for delete ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 rules delete ### SEE ALSO * [auth0 rules](auth0_rules.md) - Manage resources for rules + diff --git a/docs/auth0_rules_disable.md b/docs/auth0_rules_disable.md index e17545941..b37a0e4cf 100644 --- a/docs/auth0_rules_disable.md +++ b/docs/auth0_rules_disable.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 rules disable +Disable a rule + +### Synopsis + Disable a rule. ``` @@ -9,16 +16,16 @@ auth0 rules disable [flags] ### Examples ``` -auth0 rules disable +auth0 rules disable ``` -### Flags +### Options ``` -h, --help help for disable ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 rules disable ### SEE ALSO * [auth0 rules](auth0_rules.md) - Manage resources for rules + diff --git a/docs/auth0_rules_enable.md b/docs/auth0_rules_enable.md index 51f3e39be..d95670de4 100644 --- a/docs/auth0_rules_enable.md +++ b/docs/auth0_rules_enable.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 rules enable +Enable a rule + +### Synopsis + Enable a rule. ``` @@ -9,16 +16,16 @@ auth0 rules enable [flags] ### Examples ``` -auth0 rules enable +auth0 rules enable ``` -### Flags +### Options ``` -h, --help help for enable ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 rules enable ### SEE ALSO * [auth0 rules](auth0_rules.md) - Manage resources for rules + diff --git a/docs/auth0_rules_list.md b/docs/auth0_rules_list.md index bb78cbdbb..32e540c78 100644 --- a/docs/auth0_rules_list.md +++ b/docs/auth0_rules_list.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 rules list +List your rules + +### Synopsis + List your existing rules. To create one try: auth0 rules create @@ -14,13 +21,13 @@ auth0 rules list auth0 rules ls ``` -### Flags +### Options ``` -h, --help help for list ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -34,3 +41,4 @@ auth0 rules ls ### SEE ALSO * [auth0 rules](auth0_rules.md) - Manage resources for rules + diff --git a/docs/auth0_rules_show.md b/docs/auth0_rules_show.md index 4fed76b04..df982beb5 100644 --- a/docs/auth0_rules_show.md +++ b/docs/auth0_rules_show.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 rules show +Show a rule + +### Synopsis + Show a rule. ``` @@ -13,13 +20,13 @@ auth0 rules show auth0 rules show ``` -### Flags +### Options ``` -h, --help help for show ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 rules show ### SEE ALSO * [auth0 rules](auth0_rules.md) - Manage resources for rules + diff --git a/docs/auth0_rules_update.md b/docs/auth0_rules_update.md index 545e1991f..51f5a8e43 100644 --- a/docs/auth0_rules_update.md +++ b/docs/auth0_rules_update.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 rules update +Update a rule + +### Synopsis + Update a rule. ``` @@ -9,12 +16,12 @@ auth0 rules update [flags] ### Examples ``` -auth0 rules update -auth0 rules update --name "My Updated Rule" -auth0 rules update -n "My Updated Rule" --enabled=false +auth0 rules update +auth0 rules update --name "My Updated Rule" +auth0 rules update -n "My Updated Rule" --enabled=false ``` -### Flags +### Options ``` -e, --enabled Enable (or disable) a rule. (default true) @@ -22,7 +29,7 @@ auth0 rules update -n "My Updated Rule" --enabled=false -n, --name string Name of the rule. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -36,3 +43,4 @@ auth0 rules update -n "My Updated Rule" --enabled=false ### SEE ALSO * [auth0 rules](auth0_rules.md) - Manage resources for rules + diff --git a/docs/auth0_tenants.md b/docs/auth0_tenants.md index b7f13a5d4..4fd385089 100644 --- a/docs/auth0_tenants.md +++ b/docs/auth0_tenants.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 tenants +Manage configured tenants + +### Synopsis + Manage configured tenants. -### Flags +### Options ``` -h, --help help for tenants ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -21,7 +28,8 @@ Manage configured tenants. ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. * [auth0 tenants list](auth0_tenants_list.md) - List your tenants -* [auth0 tenants open](auth0_tenants_open.md) - Open tenant settings page in Auth0 Manage +* [auth0 tenants open](auth0_tenants_open.md) - Open tenant settings page in the Auth0 Dashboard * [auth0 tenants use](auth0_tenants_use.md) - Set the active tenant + diff --git a/docs/auth0_tenants_list.md b/docs/auth0_tenants_list.md index b8ae7ec09..9e1c1f345 100644 --- a/docs/auth0_tenants_list.md +++ b/docs/auth0_tenants_list.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 tenants list +List your tenants + +### Synopsis + List your tenants. ``` @@ -12,13 +19,13 @@ auth0 tenants list [flags] auth0 tenants list ``` -### Flags +### Options ``` -h, --help help for list ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 tenants list ### SEE ALSO * [auth0 tenants](auth0_tenants.md) - Manage configured tenants + diff --git a/docs/auth0_tenants_open.md b/docs/auth0_tenants_open.md index be0de7359..dd3b52832 100644 --- a/docs/auth0_tenants_open.md +++ b/docs/auth0_tenants_open.md @@ -1,6 +1,13 @@ +--- +layout: default +--- ## auth0 tenants open -Open tenant settings page in Auth0 Manage. +Open tenant settings page in the Auth0 Dashboard + +### Synopsis + +Open tenant settings page in the Auth0 Dashboard. ``` auth0 tenants open [flags] @@ -12,13 +19,13 @@ auth0 tenants open [flags] auth0 tenants open ``` -### Flags +### Options ``` -h, --help help for open ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 tenants open ### SEE ALSO * [auth0 tenants](auth0_tenants.md) - Manage configured tenants + diff --git a/docs/auth0_tenants_use.md b/docs/auth0_tenants_use.md index 5831f8d9d..a0a445d8a 100644 --- a/docs/auth0_tenants_use.md +++ b/docs/auth0_tenants_use.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 tenants use +Set the active tenant + +### Synopsis + Set the active tenant. ``` @@ -12,13 +19,13 @@ auth0 tenants use [flags] auth0 tenants use ``` -### Flags +### Options ``` -h, --help help for use ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 tenants use ### SEE ALSO * [auth0 tenants](auth0_tenants.md) - Manage configured tenants + diff --git a/docs/auth0_test.md b/docs/auth0_test.md index dc18ebfd6..df24f8684 100644 --- a/docs/auth0_test.md +++ b/docs/auth0_test.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 test +Try your Universal Login box or get a token + +### Synopsis + Try your Universal Login box or get a token. -### Flags +### Options ``` -h, --help help for test ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -21,6 +28,7 @@ Try your Universal Login box or get a token. ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. * [auth0 test login](auth0_test_login.md) - Try out your Universal Login box * [auth0 test token](auth0_test_token.md) - Fetch a token for the given application and API + diff --git a/docs/auth0_test_login.md b/docs/auth0_test_login.md index 6a51e31e8..ecb3707f1 100644 --- a/docs/auth0_test_login.md +++ b/docs/auth0_test_login.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 test login +Try out your Universal Login box + +### Synopsis + Launch a browser to try out your Universal Login box. ``` @@ -14,7 +21,7 @@ auth0 test login auth0 test login --connection ``` -### Flags +### Options ``` -a, --audience string The unique identifier of the target API you want to access. @@ -24,7 +31,7 @@ auth0 test login --connection -s, --scopes strings The list of scopes you want to use. (default [openid,profile]) ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -38,3 +45,4 @@ auth0 test login --connection ### SEE ALSO * [auth0 test](auth0_test.md) - Try your Universal Login box or get a token + diff --git a/docs/auth0_test_token.md b/docs/auth0_test_token.md index 976540d72..edf20b4e6 100644 --- a/docs/auth0_test_token.md +++ b/docs/auth0_test_token.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 test token +Fetch a token for the given application and API + +### Synopsis + Fetch an access token for the given application. If --client-id is not provided, the default client "CLI Login Testing" will be used (and created if not exists). Specify the API you want this token for with --audience (API Identifer). Additionally, you can also specify the --scope to use. @@ -15,7 +22,7 @@ auth0 test token auth0 test token --client-id --audience --scopes ``` -### Flags +### Options ``` -a, --audience string The unique identifier of the target API you want to access. @@ -24,7 +31,7 @@ auth0 test token --client-id --audience --scopes -s, --scopes strings The list of scopes you want to use. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -38,3 +45,4 @@ auth0 test token --client-id --audience --scopes ### SEE ALSO * [auth0 test](auth0_test.md) - Try your Universal Login box or get a token + diff --git a/docs/auth0_users.md b/docs/auth0_users.md index 676adb2c5..7b9413f0d 100644 --- a/docs/auth0_users.md +++ b/docs/auth0_users.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 users +Manage resources for users + +### Synopsis + Manage resources for users. -### Flags +### Options ``` -h, --help help for users ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -21,12 +28,13 @@ Manage resources for users. ### SEE ALSO -* [auth0](auth0.md) - Supercharge your development workflow. +* [auth0](/auth0-cli/) - Supercharge your development workflow. * [auth0 users blocks](auth0_users_blocks.md) - Manage brute-force protection user blocks * [auth0 users create](auth0_users_create.md) - Create a new user * [auth0 users delete](auth0_users_delete.md) - Delete a user -* [auth0 users open](auth0_users_open.md) - Open user details page in Auth0 Manage +* [auth0 users open](auth0_users_open.md) - Open user details page in the Auth0 Dashboard * [auth0 users search](auth0_users_search.md) - Search for users * [auth0 users show](auth0_users_show.md) - Show an existing user * [auth0 users unblock](auth0_users_unblock.md) - Remove brute-force protection blocks for a given user * [auth0 users update](auth0_users_update.md) - Update a user + diff --git a/docs/auth0_users_blocks.md b/docs/auth0_users_blocks.md index ac15c470e..d3952de7d 100644 --- a/docs/auth0_users_blocks.md +++ b/docs/auth0_users_blocks.md @@ -1,14 +1,21 @@ +--- +layout: default +--- ## auth0 users blocks +Manage brute-force protection user blocks + +### Synopsis + Manage brute-force protection user blocks. -### Flags +### Options ``` -h, --help help for blocks ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -23,3 +30,4 @@ Manage brute-force protection user blocks. * [auth0 users](auth0_users.md) - Manage resources for users * [auth0 users blocks list](auth0_users_blocks_list.md) - List brute-force protection blocks for a given user + diff --git a/docs/auth0_users_blocks_list.md b/docs/auth0_users_blocks_list.md index 5068e0adc..080123ca1 100644 --- a/docs/auth0_users_blocks_list.md +++ b/docs/auth0_users_blocks_list.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 users blocks list +List brute-force protection blocks for a given user + +### Synopsis + List brute-force protection blocks for a given user. ``` @@ -12,13 +19,13 @@ auth0 users blocks list [flags] auth0 users blocks list ``` -### Flags +### Options ``` -h, --help help for list ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 users blocks list ### SEE ALSO * [auth0 users blocks](auth0_users_blocks.md) - Manage brute-force protection user blocks + diff --git a/docs/auth0_users_create.md b/docs/auth0_users_create.md index dc2e94ab8..c58f60c3f 100644 --- a/docs/auth0_users_create.md +++ b/docs/auth0_users_create.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 users create +Create a new user + +### Synopsis + Create a new user. ``` @@ -15,7 +22,7 @@ auth0 users create -n "John Doe" --email john@example.com auth0 users create -n "John Doe" --e john@example.com --connection "Username-Password-Authentication" ``` -### Flags +### Options ``` -c, --connection string Name of the connection this user should be created in. @@ -26,7 +33,7 @@ auth0 users create -n "John Doe" --e john@example.com --connection "Username-Pas -u, --username string The user's username. Only valid if the connection requires a username. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -40,3 +47,4 @@ auth0 users create -n "John Doe" --e john@example.com --connection "Username-Pas ### SEE ALSO * [auth0 users](auth0_users.md) - Manage resources for users + diff --git a/docs/auth0_users_delete.md b/docs/auth0_users_delete.md index 4b91a9a05..cb4805a06 100644 --- a/docs/auth0_users_delete.md +++ b/docs/auth0_users_delete.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 users delete +Delete a user + +### Synopsis + Delete a user. ``` @@ -13,13 +20,13 @@ auth0 users delete auth0 users delete ``` -### Flags +### Options ``` -h, --help help for delete ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 users delete ### SEE ALSO * [auth0 users](auth0_users.md) - Manage resources for users + diff --git a/docs/auth0_users_open.md b/docs/auth0_users_open.md index f8ff97d05..2f834b4b3 100644 --- a/docs/auth0_users_open.md +++ b/docs/auth0_users_open.md @@ -1,6 +1,13 @@ +--- +layout: default +--- ## auth0 users open -Open user details page in Auth0 Manage. +Open user details page in the Auth0 Dashboard + +### Synopsis + +Open user details page in the Auth0 Dashboard. ``` auth0 users open [flags] @@ -13,13 +20,13 @@ auth0 users open auth0 users open "auth0|xxxxxxxxxx" ``` -### Flags +### Options ``` -h, --help help for open ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 users open "auth0|xxxxxxxxxx" ### SEE ALSO * [auth0 users](auth0_users.md) - Manage resources for users + diff --git a/docs/auth0_users_search.md b/docs/auth0_users_search.md index 35aed5275..beb18955c 100644 --- a/docs/auth0_users_search.md +++ b/docs/auth0_users_search.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 users search +Search for users + +### Synopsis + Search for users. To create one try: auth0 users create @@ -16,7 +23,7 @@ auth0 users search -q name --sort "name:1" auth0 users search -q name -s "name:1" ``` -### Flags +### Options ``` -h, --help help for search @@ -24,7 +31,7 @@ auth0 users search -q name -s "name:1" -s, --sort string Field to sort by. Use 'field:order' where 'order' is '1' for ascending and '-1' for descending. e.g. 'created_at:1'. ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -38,3 +45,4 @@ auth0 users search -q name -s "name:1" ### SEE ALSO * [auth0 users](auth0_users.md) - Manage resources for users + diff --git a/docs/auth0_users_show.md b/docs/auth0_users_show.md index 1d74e3c61..5bf1d92a5 100644 --- a/docs/auth0_users_show.md +++ b/docs/auth0_users_show.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 users show +Show an existing user + +### Synopsis + Show an existing user. ``` @@ -13,13 +20,13 @@ auth0 users show auth0 users show ``` -### Flags +### Options ``` -h, --help help for show ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -33,3 +40,4 @@ auth0 users show ### SEE ALSO * [auth0 users](auth0_users.md) - Manage resources for users + diff --git a/docs/auth0_users_unblock.md b/docs/auth0_users_unblock.md index 31af1dade..6a410a0cc 100644 --- a/docs/auth0_users_unblock.md +++ b/docs/auth0_users_unblock.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 users unblock +Remove brute-force protection blocks for a given user + +### Synopsis + Remove brute-force protection blocks for a given user. ``` @@ -12,13 +19,13 @@ auth0 users unblock [flags] auth0 users unblock ``` -### Flags +### Options ``` -h, --help help for unblock ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -32,3 +39,4 @@ auth0 users unblock ### SEE ALSO * [auth0 users](auth0_users.md) - Manage resources for users + diff --git a/docs/auth0_users_update.md b/docs/auth0_users_update.md index 5519a84aa..b7058bac9 100644 --- a/docs/auth0_users_update.md +++ b/docs/auth0_users_update.md @@ -1,5 +1,12 @@ +--- +layout: default +--- ## auth0 users update +Update a user + +### Synopsis + Update a user. ``` @@ -15,7 +22,7 @@ auth0 users update --name John Doe auth0 users update -n John Doe --email john.doe@example.com ``` -### Flags +### Options ``` -c, --connection string Name of the connection this user should be created in. @@ -25,7 +32,7 @@ auth0 users update -n John Doe --email john.doe@example.com -p, --password string Initial password for this user (mandatory for non-SMS connections). ``` -### Flags inherited from parent commands +### Options inherited from parent commands ``` --debug Enable debug mode. @@ -39,3 +46,4 @@ auth0 users update -n John Doe --email john.doe@example.com ### SEE ALSO * [auth0 users](auth0_users.md) - Manage resources for users + diff --git a/docs/auth0.md b/docs/index.md similarity index 93% rename from docs/auth0.md rename to docs/index.md index 65ec660b4..7c8a2619b 100644 --- a/docs/auth0.md +++ b/docs/index.md @@ -1,13 +1,11 @@ +--- +layout: home +--- ## auth0 Supercharge your development workflow. - -``` -auth0 [flags] -``` - -### Flags +### Options ``` --debug Enable debug mode. @@ -17,11 +15,11 @@ auth0 [flags] --no-color Disable colors. --no-input Disable interactivity. --tenant string Specific tenant to use. - -v, --version version for auth0 ``` ### SEE ALSO +* [auth0 actions](auth0_actions.md) - Manage resources for actions * [auth0 apis](auth0_apis.md) - Manage resources for APIs * [auth0 apps](auth0_apps.md) - Manage resources for applications * [auth0 branding](auth0_branding.md) - Manage branding options @@ -36,3 +34,4 @@ auth0 [flags] * [auth0 tenants](auth0_tenants.md) - Manage configured tenants * [auth0 test](auth0_test.md) - Try your Universal Login box or get a token * [auth0 users](auth0_users.md) - Manage resources for users + diff --git a/go.mod b/go.mod index 7a139823c..e97adcd3f 100644 --- a/go.mod +++ b/go.mod @@ -16,6 +16,7 @@ require ( github.com/google/go-cmp v0.5.5 github.com/google/uuid v1.2.0 github.com/guiguan/caster v0.0.0-20191104051807-3736c4464f38 + github.com/joeshaw/envdecode v0.0.0-20200121155833-099f1fc765bd github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 github.com/klauspost/compress v1.11.9 // indirect github.com/klauspost/pgzip v1.2.5 // indirect diff --git a/go.sum b/go.sum index f856ce593..116d10fb3 100644 --- a/go.sum +++ b/go.sum @@ -96,7 +96,9 @@ github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= +github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= +github.com/cpuguy83/go-md2man/v2 v2.0.0 h1:EoUDS0afbrsXAZ9YQ9jdu/mZ2sXgT1/2yyNng4PGlyM= github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= github.com/danieljoos/wincred v1.1.0 h1:3RNcEpBg4IhIChZdFRSdlQt1QjCp1sMAPIrOnm7Yf8g= github.com/danieljoos/wincred v1.1.0/go.mod h1:XYlo+eRTsVA9aHGp7NGjFkPla4m+DCL7hqDjlFjiygg= @@ -262,6 +264,8 @@ github.com/iris-contrib/go.uuid v2.0.0+incompatible/go.mod h1:iz2lgM/1UnEf1kP0L/ github.com/iris-contrib/jade v1.1.3/go.mod h1:H/geBymxJhShH5kecoiOCSssPX7QWYH7UaeZTSWddIk= github.com/iris-contrib/pongo2 v0.0.1/go.mod h1:Ssh+00+3GAZqSQb30AvBRNxBx7rf0GqwkjqxNd0u65g= github.com/iris-contrib/schema v0.0.1/go.mod h1:urYA3uvUNG1TIIjOSCzHr9/LmbQo8LrOcOqfqxa4hXw= +github.com/joeshaw/envdecode v0.0.0-20200121155833-099f1fc765bd h1:nIzoSW6OhhppWLm4yqBwZsKJlAayUu5FGozhrF3ETSM= +github.com/joeshaw/envdecode v0.0.0-20200121155833-099f1fc765bd/go.mod h1:MEQrHur0g8VplbLOv5vXmDzacSaH9Z7XhcgsSh1xciU= github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= @@ -413,7 +417,9 @@ github.com/rivo/uniseg v0.2.0 h1:S1pD9weZBuJdFmowNwbpi7BJ8TNftyUImj/0WQi72jY= github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= +github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= +github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q= github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= github.com/ryanuber/columnize v2.1.0+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts= @@ -421,6 +427,7 @@ github.com/schollz/closestmatch v2.1.0+incompatible/go.mod h1:RtP1ddjLong6gTkbtm github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc= github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo= github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= diff --git a/internal/cli/build_doc.go b/internal/cli/build_doc.go new file mode 100644 index 000000000..b3f44d10e --- /dev/null +++ b/internal/cli/build_doc.go @@ -0,0 +1,53 @@ +package cli + +import ( + "strings" + + "github.com/spf13/cobra" + "github.com/spf13/cobra/doc" +) + +func BuildDoc(path string) error { + cli := &cli{} + + rootCmd := &cobra.Command{ + Use: "auth0", + Short: rootShort, + DisableAutoGenTag: true, + } + + rootCmd.SetUsageTemplate(namespaceUsageTemplate()) + addPersistentFlags(rootCmd, cli) + addSubcommands(rootCmd, cli) + + err := doc.GenMarkdownTreeCustom(rootCmd, + path, + func(fileName string) string { + // prepend to the generated markdown + if strings.HasSuffix(fileName, "auth0.md") { + return `--- +layout: home +--- +` + } + + return `--- +layout: default +--- +` + }, + func(fileName string) string { + // return same value, we're not changing the internal link + if strings.HasSuffix(fileName, "auth0.md") { + return "/auth0-cli/" + } + + return fileName + }) + + if err != nil { + return err + } + + return nil +} diff --git a/internal/cli/root.go b/internal/cli/root.go index 9166eb4f9..d5675cf15 100644 --- a/internal/cli/root.go +++ b/internal/cli/root.go @@ -15,6 +15,8 @@ import ( "github.com/spf13/cobra" ) +const rootShort = "Supercharge your development workflow." + // Execute is the primary entrypoint of the CLI app. func Execute() { // cfg contains tenant related information, e.g. `travel0-dev`, @@ -27,12 +29,58 @@ func Execute() { tracker: analytics.NewTracker(), } + rootCmd := buildRootCmd(cli) + + rootCmd.SetUsageTemplate(namespaceUsageTemplate()) + addPersistentFlags(rootCmd, cli) + addSubcommands(rootCmd, cli) + + // TODO(cyx): backport this later on using latest auth0/v5. + // rootCmd.AddCommand(actionsCmd(cli)) + // rootCmd.AddCommand(triggersCmd(cli)) + + defer func() { + if v := recover(); v != nil { + err := fmt.Errorf("panic: %v", v) + + // If we're in development mode, we should throw the + // panic for so we have less surprises. For + // non-developers, we'll swallow the panics. + if instrumentation.ReportException(err) { + fmt.Println(panicMessage) + } else { + panic(v) + } + } + }() + + // platform specific terminal initialization: + // this should run for all commands, + // for most of the architectures there's no requirements: + ansi.InitConsole() + + cancelCtx := contextWithCancel() + if err := rootCmd.ExecuteContext(cancelCtx); err != nil { + cli.renderer.Heading("error") + cli.renderer.Errorf(err.Error()) + + instrumentation.ReportException(err) + os.Exit(1) + } + + timeoutCtx, cancel := context.WithTimeout(cancelCtx, 3*time.Second) + // defers are executed in LIFO order + defer cancel() + defer cli.tracker.Wait(timeoutCtx) // No event should be tracked after this has run, or it will panic e.g. in earlier deferred functions +} + +func buildRootCmd(cli *cli) *cobra.Command { rootCmd := &cobra.Command{ Use: "auth0", SilenceUsage: true, SilenceErrors: true, - Short: "Supercharge your development workflow.", - Long: "Supercharge your development workflow.\n" + getLogin(cli), + Short: rootShort, + Long: rootShort + "\n" + getLogin(cli), Version: buildinfo.GetVersionWithCommit(), PersistentPreRunE: func(cmd *cobra.Command, args []string) error { ansi.DisableColors = cli.noColor @@ -47,7 +95,7 @@ func Execute() { // We're tracking the login command in its Run method // so we'll only add this defer if the command is not login defer func() { - if cli.isLoggedIn() { + if cli.tracker != nil && cli.isLoggedIn() { cli.tracker.TrackCommandRun(cmd, cli.config.InstallID) } }() @@ -85,7 +133,10 @@ func Execute() { }, } - rootCmd.SetUsageTemplate(namespaceUsageTemplate()) + return rootCmd +} + +func addPersistentFlags(rootCmd *cobra.Command, cli *cli) { rootCmd.PersistentFlags().StringVar(&cli.tenant, "tenant", cli.config.DefaultTenant, "Specific tenant to use.") @@ -103,6 +154,10 @@ func Execute() { rootCmd.PersistentFlags().BoolVar(&cli.noColor, "no-color", false, "Disable colors.") + +} + +func addSubcommands(rootCmd *cobra.Command, cli *cli) { // order of the comamnds here matters // so add new commands in a place that reflect its relevance or relation with other commands: rootCmd.AddCommand(loginCmd(cli)) @@ -124,43 +179,6 @@ func Execute() { // keep completion at the bottom: rootCmd.AddCommand(completionCmd(cli)) - // TODO(cyx): backport this later on using latest auth0/v5. - // rootCmd.AddCommand(actionsCmd(cli)) - // rootCmd.AddCommand(triggersCmd(cli)) - - defer func() { - if v := recover(); v != nil { - err := fmt.Errorf("panic: %v", v) - - // If we're in development mode, we should throw the - // panic for so we have less surprises. For - // non-developers, we'll swallow the panics. - if instrumentation.ReportException(err) { - fmt.Println(panicMessage) - } else { - panic(v) - } - } - }() - - // platform specific terminal initialization: - // this should run for all commands, - // for most of the architectures there's no requirements: - ansi.InitConsole() - - cancelCtx := contextWithCancel() - if err := rootCmd.ExecuteContext(cancelCtx); err != nil { - cli.renderer.Heading("error") - cli.renderer.Errorf(err.Error()) - - instrumentation.ReportException(err) - os.Exit(1) - } - - timeoutCtx, cancel := context.WithTimeout(cancelCtx, 3*time.Second) - // defers are executed in LIFO order - defer cancel() - defer cli.tracker.Wait(timeoutCtx) // No event should be tracked after this has run, or it will panic e.g. in earlier deferred functions } func contextWithCancel() context.Context { diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md b/vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md new file mode 100644 index 000000000..1cade6cef --- /dev/null +++ b/vendor/github.com/cpuguy83/go-md2man/v2/LICENSE.md @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Brian Goff + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go new file mode 100644 index 000000000..b48005673 --- /dev/null +++ b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/md2man.go @@ -0,0 +1,14 @@ +package md2man + +import ( + "github.com/russross/blackfriday/v2" +) + +// Render converts a markdown document into a roff formatted document. +func Render(doc []byte) []byte { + renderer := NewRoffRenderer() + + return blackfriday.Run(doc, + []blackfriday.Option{blackfriday.WithRenderer(renderer), + blackfriday.WithExtensions(renderer.GetExtensions())}...) +} diff --git a/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go new file mode 100644 index 000000000..0668a66cf --- /dev/null +++ b/vendor/github.com/cpuguy83/go-md2man/v2/md2man/roff.go @@ -0,0 +1,345 @@ +package md2man + +import ( + "fmt" + "io" + "os" + "strings" + + "github.com/russross/blackfriday/v2" +) + +// roffRenderer implements the blackfriday.Renderer interface for creating +// roff format (manpages) from markdown text +type roffRenderer struct { + extensions blackfriday.Extensions + listCounters []int + firstHeader bool + defineTerm bool + listDepth int +} + +const ( + titleHeader = ".TH " + topLevelHeader = "\n\n.SH " + secondLevelHdr = "\n.SH " + otherHeader = "\n.SS " + crTag = "\n" + emphTag = "\\fI" + emphCloseTag = "\\fP" + strongTag = "\\fB" + strongCloseTag = "\\fP" + breakTag = "\n.br\n" + paraTag = "\n.PP\n" + hruleTag = "\n.ti 0\n\\l'\\n(.lu'\n" + linkTag = "\n\\[la]" + linkCloseTag = "\\[ra]" + codespanTag = "\\fB\\fC" + codespanCloseTag = "\\fR" + codeTag = "\n.PP\n.RS\n\n.nf\n" + codeCloseTag = "\n.fi\n.RE\n" + quoteTag = "\n.PP\n.RS\n" + quoteCloseTag = "\n.RE\n" + listTag = "\n.RS\n" + listCloseTag = "\n.RE\n" + arglistTag = "\n.TP\n" + tableStart = "\n.TS\nallbox;\n" + tableEnd = ".TE\n" + tableCellStart = "T{\n" + tableCellEnd = "\nT}\n" +) + +// NewRoffRenderer creates a new blackfriday Renderer for generating roff documents +// from markdown +func NewRoffRenderer() *roffRenderer { // nolint: golint + var extensions blackfriday.Extensions + + extensions |= blackfriday.NoIntraEmphasis + extensions |= blackfriday.Tables + extensions |= blackfriday.FencedCode + extensions |= blackfriday.SpaceHeadings + extensions |= blackfriday.Footnotes + extensions |= blackfriday.Titleblock + extensions |= blackfriday.DefinitionLists + return &roffRenderer{ + extensions: extensions, + } +} + +// GetExtensions returns the list of extensions used by this renderer implementation +func (r *roffRenderer) GetExtensions() blackfriday.Extensions { + return r.extensions +} + +// RenderHeader handles outputting the header at document start +func (r *roffRenderer) RenderHeader(w io.Writer, ast *blackfriday.Node) { + // disable hyphenation + out(w, ".nh\n") +} + +// RenderFooter handles outputting the footer at the document end; the roff +// renderer has no footer information +func (r *roffRenderer) RenderFooter(w io.Writer, ast *blackfriday.Node) { +} + +// RenderNode is called for each node in a markdown document; based on the node +// type the equivalent roff output is sent to the writer +func (r *roffRenderer) RenderNode(w io.Writer, node *blackfriday.Node, entering bool) blackfriday.WalkStatus { + + var walkAction = blackfriday.GoToNext + + switch node.Type { + case blackfriday.Text: + r.handleText(w, node, entering) + case blackfriday.Softbreak: + out(w, crTag) + case blackfriday.Hardbreak: + out(w, breakTag) + case blackfriday.Emph: + if entering { + out(w, emphTag) + } else { + out(w, emphCloseTag) + } + case blackfriday.Strong: + if entering { + out(w, strongTag) + } else { + out(w, strongCloseTag) + } + case blackfriday.Link: + if !entering { + out(w, linkTag+string(node.LinkData.Destination)+linkCloseTag) + } + case blackfriday.Image: + // ignore images + walkAction = blackfriday.SkipChildren + case blackfriday.Code: + out(w, codespanTag) + escapeSpecialChars(w, node.Literal) + out(w, codespanCloseTag) + case blackfriday.Document: + break + case blackfriday.Paragraph: + // roff .PP markers break lists + if r.listDepth > 0 { + return blackfriday.GoToNext + } + if entering { + out(w, paraTag) + } else { + out(w, crTag) + } + case blackfriday.BlockQuote: + if entering { + out(w, quoteTag) + } else { + out(w, quoteCloseTag) + } + case blackfriday.Heading: + r.handleHeading(w, node, entering) + case blackfriday.HorizontalRule: + out(w, hruleTag) + case blackfriday.List: + r.handleList(w, node, entering) + case blackfriday.Item: + r.handleItem(w, node, entering) + case blackfriday.CodeBlock: + out(w, codeTag) + escapeSpecialChars(w, node.Literal) + out(w, codeCloseTag) + case blackfriday.Table: + r.handleTable(w, node, entering) + case blackfriday.TableCell: + r.handleTableCell(w, node, entering) + case blackfriday.TableHead: + case blackfriday.TableBody: + case blackfriday.TableRow: + // no action as cell entries do all the nroff formatting + return blackfriday.GoToNext + default: + fmt.Fprintln(os.Stderr, "WARNING: go-md2man does not handle node type "+node.Type.String()) + } + return walkAction +} + +func (r *roffRenderer) handleText(w io.Writer, node *blackfriday.Node, entering bool) { + var ( + start, end string + ) + // handle special roff table cell text encapsulation + if node.Parent.Type == blackfriday.TableCell { + if len(node.Literal) > 30 { + start = tableCellStart + end = tableCellEnd + } else { + // end rows that aren't terminated by "tableCellEnd" with a cr if end of row + if node.Parent.Next == nil && !node.Parent.IsHeader { + end = crTag + } + } + } + out(w, start) + escapeSpecialChars(w, node.Literal) + out(w, end) +} + +func (r *roffRenderer) handleHeading(w io.Writer, node *blackfriday.Node, entering bool) { + if entering { + switch node.Level { + case 1: + if !r.firstHeader { + out(w, titleHeader) + r.firstHeader = true + break + } + out(w, topLevelHeader) + case 2: + out(w, secondLevelHdr) + default: + out(w, otherHeader) + } + } +} + +func (r *roffRenderer) handleList(w io.Writer, node *blackfriday.Node, entering bool) { + openTag := listTag + closeTag := listCloseTag + if node.ListFlags&blackfriday.ListTypeDefinition != 0 { + // tags for definition lists handled within Item node + openTag = "" + closeTag = "" + } + if entering { + r.listDepth++ + if node.ListFlags&blackfriday.ListTypeOrdered != 0 { + r.listCounters = append(r.listCounters, 1) + } + out(w, openTag) + } else { + if node.ListFlags&blackfriday.ListTypeOrdered != 0 { + r.listCounters = r.listCounters[:len(r.listCounters)-1] + } + out(w, closeTag) + r.listDepth-- + } +} + +func (r *roffRenderer) handleItem(w io.Writer, node *blackfriday.Node, entering bool) { + if entering { + if node.ListFlags&blackfriday.ListTypeOrdered != 0 { + out(w, fmt.Sprintf(".IP \"%3d.\" 5\n", r.listCounters[len(r.listCounters)-1])) + r.listCounters[len(r.listCounters)-1]++ + } else if node.ListFlags&blackfriday.ListTypeDefinition != 0 { + // state machine for handling terms and following definitions + // since blackfriday does not distinguish them properly, nor + // does it seperate them into separate lists as it should + if !r.defineTerm { + out(w, arglistTag) + r.defineTerm = true + } else { + r.defineTerm = false + } + } else { + out(w, ".IP \\(bu 2\n") + } + } else { + out(w, "\n") + } +} + +func (r *roffRenderer) handleTable(w io.Writer, node *blackfriday.Node, entering bool) { + if entering { + out(w, tableStart) + //call walker to count cells (and rows?) so format section can be produced + columns := countColumns(node) + out(w, strings.Repeat("l ", columns)+"\n") + out(w, strings.Repeat("l ", columns)+".\n") + } else { + out(w, tableEnd) + } +} + +func (r *roffRenderer) handleTableCell(w io.Writer, node *blackfriday.Node, entering bool) { + var ( + start, end string + ) + if node.IsHeader { + start = codespanTag + end = codespanCloseTag + } + if entering { + if node.Prev != nil && node.Prev.Type == blackfriday.TableCell { + out(w, "\t"+start) + } else { + out(w, start) + } + } else { + // need to carriage return if we are at the end of the header row + if node.IsHeader && node.Next == nil { + end = end + crTag + } + out(w, end) + } +} + +// because roff format requires knowing the column count before outputting any table +// data we need to walk a table tree and count the columns +func countColumns(node *blackfriday.Node) int { + var columns int + + node.Walk(func(node *blackfriday.Node, entering bool) blackfriday.WalkStatus { + switch node.Type { + case blackfriday.TableRow: + if !entering { + return blackfriday.Terminate + } + case blackfriday.TableCell: + if entering { + columns++ + } + default: + } + return blackfriday.GoToNext + }) + return columns +} + +func out(w io.Writer, output string) { + io.WriteString(w, output) // nolint: errcheck +} + +func needsBackslash(c byte) bool { + for _, r := range []byte("-_&\\~") { + if c == r { + return true + } + } + return false +} + +func escapeSpecialChars(w io.Writer, text []byte) { + for i := 0; i < len(text); i++ { + // escape initial apostrophe or period + if len(text) >= 1 && (text[0] == '\'' || text[0] == '.') { + out(w, "\\&") + } + + // directly copy normal characters + org := i + + for i < len(text) && !needsBackslash(text[i]) { + i++ + } + if i > org { + w.Write(text[org:i]) // nolint: errcheck + } + + // escape a character + if i >= len(text) { + break + } + + w.Write([]byte{'\\', text[i]}) // nolint: errcheck + } +} diff --git a/vendor/github.com/joeshaw/envdecode/.gitignore b/vendor/github.com/joeshaw/envdecode/.gitignore new file mode 100644 index 000000000..1faf59edf --- /dev/null +++ b/vendor/github.com/joeshaw/envdecode/.gitignore @@ -0,0 +1,2 @@ + +example/example diff --git a/vendor/github.com/joeshaw/envdecode/.travis.yml b/vendor/github.com/joeshaw/envdecode/.travis.yml new file mode 100644 index 000000000..9b84a963c --- /dev/null +++ b/vendor/github.com/joeshaw/envdecode/.travis.yml @@ -0,0 +1,5 @@ +language: go +go: + - 1.9.x + - master + diff --git a/vendor/github.com/joeshaw/envdecode/LICENSE b/vendor/github.com/joeshaw/envdecode/LICENSE new file mode 100644 index 000000000..5869b24ec --- /dev/null +++ b/vendor/github.com/joeshaw/envdecode/LICENSE @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2014 Joe Shaw + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/vendor/github.com/joeshaw/envdecode/README.md b/vendor/github.com/joeshaw/envdecode/README.md new file mode 100644 index 000000000..c51467e53 --- /dev/null +++ b/vendor/github.com/joeshaw/envdecode/README.md @@ -0,0 +1,89 @@ +# envdecode [![Travis-CI](https://travis-ci.org/joeshaw/envdecode.svg)](https://travis-ci.org/joeshaw/envdecode) [![GoDoc](https://godoc.org/github.com/joeshaw/envdecode?status.svg)](https://godoc.org/github.com/joeshaw/envdecode) + +`envdecode` is a Go package for populating structs from environment +variables. + +`envdecode` uses struct tags to map environment variables to fields, +allowing you you use any names you want for environment variables. +`envdecode` will recurse into nested structs, including pointers to +nested structs, but it will not allocate new pointers to structs. + +## API + +Full API docs are available on +[godoc.org](https://godoc.org/github.com/joeshaw/envdecode). + +Define a struct with `env` struct tags: + +```go +type Config struct { + Hostname string `env:"SERVER_HOSTNAME,default=localhost"` + Port uint16 `env:"SERVER_PORT,default=8080"` + + AWS struct { + ID string `env:"AWS_ACCESS_KEY_ID"` + Secret string `env:"AWS_SECRET_ACCESS_KEY,required"` + SnsTopics []string `env:"AWS_SNS_TOPICS"` + } + + Timeout time.Duration `env:"TIMEOUT,default=1m,strict"` +} +``` + +Fields *must be exported* (i.e. begin with a capital letter) in order +for `envdecode` to work with them. An error will be returned if a +struct with no exported fields is decoded (including one that contains +no `env` tags at all). +Default values may be provided by appending ",default=value" to the +struct tag. Required values may be marked by appending ",required" to the +struct tag. Strict values may be marked by appending ",strict" which will +return an error on Decode if there is an error while parsing. + +Then call `envdecode.Decode`: + +```go +var cfg Config +err := envdecode.Decode(&cfg) +``` + +If you want all fields to act `strict`, you may use `envdecode.StrictDecode`: + +```go +var cfg Config +err := envdecode.StrictDecode(&cfg) +``` + +All parse errors will fail fast and return an error in this mode. + +## Supported types + +* Structs (and pointer to structs) +* Slices of below defined types, separated by semicolon +* `bool` +* `float32`, `float64` +* `int`, `int8`, `int16`, `int32`, `int64` +* `uint`, `uint8`, `uint16`, `uint32`, `uint64` +* `string` +* `time.Duration`, using the [`time.ParseDuration()` format](http://golang.org/pkg/time/#ParseDuration) +* `*url.URL`, using [`url.Parse()`](https://godoc.org/net/url#Parse) +* Types those implement a `Decoder` interface + +## Custom `Decoder` + +If you want a field to be decoded with custom behavior, you may implement the interface `Decoder` for the filed type. + +```go +type Config struct { + IPAddr IP `env:"IP_ADDR"` +} + +type IP net.IP + +// Decode implements the interface `envdecode.Decoder` +func (i *IP) Decode(repl string) error { + *i = net.ParseIP(repl) + return nil +} +``` + +`Decoder` is the interface implemented by an object that can decode an environment variable string representation of itself. diff --git a/vendor/github.com/joeshaw/envdecode/envdecode.go b/vendor/github.com/joeshaw/envdecode/envdecode.go new file mode 100644 index 000000000..c8c0a655f --- /dev/null +++ b/vendor/github.com/joeshaw/envdecode/envdecode.go @@ -0,0 +1,423 @@ +// Package envdecode is a package for populating structs from environment +// variables, using struct tags. +package envdecode + +import ( + "encoding" + "errors" + "fmt" + "log" + "net/url" + "os" + "reflect" + "sort" + "strconv" + "strings" + "time" +) + +// ErrInvalidTarget indicates that the target value passed to +// Decode is invalid. Target must be a non-nil pointer to a struct. +var ErrInvalidTarget = errors.New("target must be non-nil pointer to struct that has at least one exported field with a valid env tag.") +var ErrNoTargetFieldsAreSet = errors.New("none of the target fields were set from environment variables") + +// FailureFunc is called when an error is encountered during a MustDecode +// operation. It prints the error and terminates the process. +// +// This variable can be assigned to another function of the user-programmer's +// design, allowing for graceful recovery of the problem, such as loading +// from a backup configuration file. +var FailureFunc = func(err error) { + log.Fatalf("envdecode: an error was encountered while decoding: %v\n", err) +} + +// Decoder is the interface implemented by an object that can decode an +// environment variable string representation of itself. +type Decoder interface { + Decode(string) error +} + +// Decode environment variables into the provided target. The target +// must be a non-nil pointer to a struct. Fields in the struct must +// be exported, and tagged with an "env" struct tag with a value +// containing the name of the environment variable. An error is +// returned if there are no exported members tagged. +// +// Default values may be provided by appending ",default=value" to the +// struct tag. Required values may be marked by appending ",required" +// to the struct tag. It is an error to provide both "default" and +// "required". Strict values may be marked by appending ",strict" which +// will return an error on Decode if there is an error while parsing. +// If everything must be strict, consider using StrictDecode instead. +// +// All primitive types are supported, including bool, floating point, +// signed and unsigned integers, and string. Boolean and numeric +// types are decoded using the standard strconv Parse functions for +// those types. Structs and pointers to structs are decoded +// recursively. time.Duration is supported via the +// time.ParseDuration() function and *url.URL is supported via the +// url.Parse() function. Slices are supported for all above mentioned +// primitive types. Semicolon is used as delimiter in environment variables. +func Decode(target interface{}) error { + nFields, err := decode(target, false) + if err != nil { + return err + } + + // if we didn't do anything - the user probably did something + // wrong like leave all fields unexported. + if nFields == 0 { + return ErrNoTargetFieldsAreSet + } + + return nil +} + +// StrictDecode is similar to Decode except all fields will have an implicit +// ",strict" on all fields. +func StrictDecode(target interface{}) error { + nFields, err := decode(target, true) + if err != nil { + return err + } + + // if we didn't do anything - the user probably did something + // wrong like leave all fields unexported. + if nFields == 0 { + return ErrInvalidTarget + } + + return nil +} + +func decode(target interface{}, strict bool) (int, error) { + s := reflect.ValueOf(target) + if s.Kind() != reflect.Ptr || s.IsNil() { + return 0, ErrInvalidTarget + } + + s = s.Elem() + if s.Kind() != reflect.Struct { + return 0, ErrInvalidTarget + } + + t := s.Type() + setFieldCount := 0 + for i := 0; i < s.NumField(); i++ { + // Localize the umbrella `strict` value to the specific field. + strict := strict + + f := s.Field(i) + + switch f.Kind() { + case reflect.Ptr: + if f.Elem().Kind() != reflect.Struct { + break + } + + f = f.Elem() + fallthrough + + case reflect.Struct: + if !f.Addr().CanInterface() { + continue + } + + ss := f.Addr().Interface() + _, custom := ss.(Decoder) + if custom { + break + } + + n, err := decode(ss, strict) + if err != nil { + return 0, err + } + setFieldCount += n + } + + if !f.CanSet() { + continue + } + + tag := t.Field(i).Tag.Get("env") + if tag == "" { + continue + } + + parts := strings.Split(tag, ",") + env := os.Getenv(parts[0]) + + required := false + hasDefault := false + defaultValue := "" + + for _, o := range parts[1:] { + if !required { + required = strings.HasPrefix(o, "required") + } + if strings.HasPrefix(o, "default=") { + hasDefault = true + defaultValue = o[8:] + } + if !strict { + strict = strings.HasPrefix(o, "strict") + } + } + + if required && hasDefault { + panic(`envdecode: "default" and "required" may not be specified in the same annotation`) + } + if env == "" && required { + return 0, fmt.Errorf("the environment variable \"%s\" is missing", parts[0]) + } + if env == "" { + env = defaultValue + } + if env == "" { + continue + } + + setFieldCount++ + + unmarshaler, implementsUnmarshaler := f.Addr().Interface().(encoding.TextUnmarshaler) + decoder, implmentsDecoder := f.Addr().Interface().(Decoder) + if implmentsDecoder { + if err := decoder.Decode(env); err != nil { + return 0, err + } + } else if implementsUnmarshaler { + if err := unmarshaler.UnmarshalText([]byte(env)); err != nil { + return 0, err + } + } else if f.Kind() == reflect.Slice { + decodeSlice(&f, env) + } else { + if err := decodePrimitiveType(&f, env); err != nil && strict { + return 0, err + } + } + } + + return setFieldCount, nil +} + +func decodeSlice(f *reflect.Value, env string) { + parts := strings.Split(env, ";") + + values := parts[:0] + for _, x := range parts { + if x != "" { + values = append(values, strings.TrimSpace(x)) + } + } + + valuesCount := len(values) + slice := reflect.MakeSlice(f.Type(), valuesCount, valuesCount) + if valuesCount > 0 { + for i := 0; i < valuesCount; i++ { + e := slice.Index(i) + decodePrimitiveType(&e, values[i]) + } + } + + f.Set(slice) +} + +func decodePrimitiveType(f *reflect.Value, env string) error { + switch f.Kind() { + case reflect.Bool: + v, err := strconv.ParseBool(env) + if err != nil { + return err + } + f.SetBool(v) + + case reflect.Float32, reflect.Float64: + bits := f.Type().Bits() + v, err := strconv.ParseFloat(env, bits) + if err != nil { + return err + } + f.SetFloat(v) + + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + if t := f.Type(); t.PkgPath() == "time" && t.Name() == "Duration" { + v, err := time.ParseDuration(env) + if err != nil { + return err + } + f.SetInt(int64(v)) + } else { + bits := f.Type().Bits() + v, err := strconv.ParseInt(env, 0, bits) + if err != nil { + return err + } + f.SetInt(v) + } + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + bits := f.Type().Bits() + v, err := strconv.ParseUint(env, 0, bits) + if err != nil { + return err + } + f.SetUint(v) + + case reflect.String: + f.SetString(env) + + case reflect.Ptr: + if t := f.Type().Elem(); t.Kind() == reflect.Struct && t.PkgPath() == "net/url" && t.Name() == "URL" { + v, err := url.Parse(env) + if err != nil { + return err + } + f.Set(reflect.ValueOf(v)) + } + } + return nil +} + +// MustDecode calls Decode and terminates the process if any errors +// are encountered. +func MustDecode(target interface{}) { + err := Decode(target) + if err != nil { + FailureFunc(err) + } +} + +// MustStrictDecode calls StrictDecode and terminates the process if any errors +// are encountered. +func MustStrictDecode(target interface{}) { + err := StrictDecode(target) + if err != nil { + FailureFunc(err) + } +} + +//// Configuration info for Export + +type ConfigInfo struct { + Field string + EnvVar string + Value string + DefaultValue string + HasDefault bool + Required bool + UsesEnv bool +} + +type ConfigInfoSlice []*ConfigInfo + +func (c ConfigInfoSlice) Less(i, j int) bool { + return c[i].EnvVar < c[j].EnvVar +} +func (c ConfigInfoSlice) Len() int { + return len(c) +} +func (c ConfigInfoSlice) Swap(i, j int) { + c[i], c[j] = c[j], c[i] +} + +// Returns a list of final configuration metadata sorted by envvar name +func Export(target interface{}) ([]*ConfigInfo, error) { + s := reflect.ValueOf(target) + if s.Kind() != reflect.Ptr || s.IsNil() { + return nil, ErrInvalidTarget + } + + cfg := []*ConfigInfo{} + + s = s.Elem() + if s.Kind() != reflect.Struct { + return nil, ErrInvalidTarget + } + + t := s.Type() + for i := 0; i < s.NumField(); i++ { + f := s.Field(i) + fName := t.Field(i).Name + + fElem := f + if f.Kind() == reflect.Ptr { + fElem = f.Elem() + } + if fElem.Kind() == reflect.Struct { + ss := fElem.Addr().Interface() + subCfg, err := Export(ss) + if err != ErrInvalidTarget { + f = fElem + for _, v := range subCfg { + v.Field = fmt.Sprintf("%s.%s", fName, v.Field) + cfg = append(cfg, v) + } + } + } + + tag := t.Field(i).Tag.Get("env") + if tag == "" { + continue + } + + parts := strings.Split(tag, ",") + + ci := &ConfigInfo{ + Field: fName, + EnvVar: parts[0], + UsesEnv: os.Getenv(parts[0]) != "", + } + + for _, o := range parts[1:] { + if strings.HasPrefix(o, "default=") { + ci.HasDefault = true + ci.DefaultValue = o[8:] + } else if strings.HasPrefix(o, "required") { + ci.Required = true + } + } + + if f.Kind() == reflect.Ptr && f.IsNil() { + ci.Value = "" + } else if stringer, ok := f.Interface().(fmt.Stringer); ok { + ci.Value = stringer.String() + } else { + switch f.Kind() { + case reflect.Bool: + ci.Value = strconv.FormatBool(f.Bool()) + + case reflect.Float32, reflect.Float64: + bits := f.Type().Bits() + ci.Value = strconv.FormatFloat(f.Float(), 'f', -1, bits) + + case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: + ci.Value = strconv.FormatInt(f.Int(), 10) + + case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: + ci.Value = strconv.FormatUint(f.Uint(), 10) + + case reflect.String: + ci.Value = f.String() + + case reflect.Slice: + ci.Value = fmt.Sprintf("%v", f.Interface()) + + default: + // Unable to determine string format for value + return nil, ErrInvalidTarget + } + } + + cfg = append(cfg, ci) + } + + // No configuration tags found, assume invalid input + if len(cfg) == 0 { + return nil, ErrInvalidTarget + } + + sort.Sort(ConfigInfoSlice(cfg)) + + return cfg, nil +} diff --git a/vendor/github.com/joeshaw/envdecode/go.mod b/vendor/github.com/joeshaw/envdecode/go.mod new file mode 100644 index 000000000..06dc3be54 --- /dev/null +++ b/vendor/github.com/joeshaw/envdecode/go.mod @@ -0,0 +1,3 @@ +module github.com/joeshaw/envdecode + +go 1.12 diff --git a/vendor/github.com/russross/blackfriday/v2/.gitignore b/vendor/github.com/russross/blackfriday/v2/.gitignore new file mode 100644 index 000000000..75623dccc --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/.gitignore @@ -0,0 +1,8 @@ +*.out +*.swp +*.8 +*.6 +_obj +_test* +markdown +tags diff --git a/vendor/github.com/russross/blackfriday/v2/.travis.yml b/vendor/github.com/russross/blackfriday/v2/.travis.yml new file mode 100644 index 000000000..b0b525a5a --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/.travis.yml @@ -0,0 +1,17 @@ +sudo: false +language: go +go: + - "1.10.x" + - "1.11.x" + - tip +matrix: + fast_finish: true + allow_failures: + - go: tip +install: + - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). +script: + - go get -t -v ./... + - diff -u <(echo -n) <(gofmt -d -s .) + - go tool vet . + - go test -v ./... diff --git a/vendor/github.com/russross/blackfriday/v2/LICENSE.txt b/vendor/github.com/russross/blackfriday/v2/LICENSE.txt new file mode 100644 index 000000000..2885af360 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/LICENSE.txt @@ -0,0 +1,29 @@ +Blackfriday is distributed under the Simplified BSD License: + +> Copyright © 2011 Russ Ross +> All rights reserved. +> +> Redistribution and use in source and binary forms, with or without +> modification, are permitted provided that the following conditions +> are met: +> +> 1. Redistributions of source code must retain the above copyright +> notice, this list of conditions and the following disclaimer. +> +> 2. Redistributions in binary form must reproduce the above +> copyright notice, this list of conditions and the following +> disclaimer in the documentation and/or other materials provided with +> the distribution. +> +> THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +> "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +> LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS +> FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE +> COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +> INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +> BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; +> LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER +> CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT +> LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN +> ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE +> POSSIBILITY OF SUCH DAMAGE. diff --git a/vendor/github.com/russross/blackfriday/v2/README.md b/vendor/github.com/russross/blackfriday/v2/README.md new file mode 100644 index 000000000..d5a8649bd --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/README.md @@ -0,0 +1,291 @@ +Blackfriday [![Build Status](https://travis-ci.org/russross/blackfriday.svg?branch=master)](https://travis-ci.org/russross/blackfriday) +=========== + +Blackfriday is a [Markdown][1] processor implemented in [Go][2]. It +is paranoid about its input (so you can safely feed it user-supplied +data), it is fast, it supports common extensions (tables, smart +punctuation substitutions, etc.), and it is safe for all utf-8 +(unicode) input. + +HTML output is currently supported, along with Smartypants +extensions. + +It started as a translation from C of [Sundown][3]. + + +Installation +------------ + +Blackfriday is compatible with any modern Go release. With Go 1.7 and git +installed: + + go get gopkg.in/russross/blackfriday.v2 + +will download, compile, and install the package into your `$GOPATH` +directory hierarchy. Alternatively, you can achieve the same if you +import it into a project: + + import "gopkg.in/russross/blackfriday.v2" + +and `go get` without parameters. + + +Versions +-------- + +Currently maintained and recommended version of Blackfriday is `v2`. It's being +developed on its own branch: https://github.com/russross/blackfriday/tree/v2 and the +documentation is available at +https://godoc.org/gopkg.in/russross/blackfriday.v2. + +It is `go get`-able via via [gopkg.in][6] at `gopkg.in/russross/blackfriday.v2`, +but we highly recommend using package management tool like [dep][7] or +[Glide][8] and make use of semantic versioning. With package management you +should import `github.com/russross/blackfriday` and specify that you're using +version 2.0.0. + +Version 2 offers a number of improvements over v1: + +* Cleaned up API +* A separate call to [`Parse`][4], which produces an abstract syntax tree for + the document +* Latest bug fixes +* Flexibility to easily add your own rendering extensions + +Potential drawbacks: + +* Our benchmarks show v2 to be slightly slower than v1. Currently in the + ballpark of around 15%. +* API breakage. If you can't afford modifying your code to adhere to the new API + and don't care too much about the new features, v2 is probably not for you. +* Several bug fixes are trailing behind and still need to be forward-ported to + v2. See issue [#348](https://github.com/russross/blackfriday/issues/348) for + tracking. + +Usage +----- + +For the most sensible markdown processing, it is as simple as getting your input +into a byte slice and calling: + +```go +output := blackfriday.Run(input) +``` + +Your input will be parsed and the output rendered with a set of most popular +extensions enabled. If you want the most basic feature set, corresponding with +the bare Markdown specification, use: + +```go +output := blackfriday.Run(input, blackfriday.WithNoExtensions()) +``` + +### Sanitize untrusted content + +Blackfriday itself does nothing to protect against malicious content. If you are +dealing with user-supplied markdown, we recommend running Blackfriday's output +through HTML sanitizer such as [Bluemonday][5]. + +Here's an example of simple usage of Blackfriday together with Bluemonday: + +```go +import ( + "github.com/microcosm-cc/bluemonday" + "github.com/russross/blackfriday" +) + +// ... +unsafe := blackfriday.Run(input) +html := bluemonday.UGCPolicy().SanitizeBytes(unsafe) +``` + +### Custom options + +If you want to customize the set of options, use `blackfriday.WithExtensions`, +`blackfriday.WithRenderer` and `blackfriday.WithRefOverride`. + +You can also check out `blackfriday-tool` for a more complete example +of how to use it. Download and install it using: + + go get github.com/russross/blackfriday-tool + +This is a simple command-line tool that allows you to process a +markdown file using a standalone program. You can also browse the +source directly on github if you are just looking for some example +code: + +* + +Note that if you have not already done so, installing +`blackfriday-tool` will be sufficient to download and install +blackfriday in addition to the tool itself. The tool binary will be +installed in `$GOPATH/bin`. This is a statically-linked binary that +can be copied to wherever you need it without worrying about +dependencies and library versions. + + +Features +-------- + +All features of Sundown are supported, including: + +* **Compatibility**. The Markdown v1.0.3 test suite passes with + the `--tidy` option. Without `--tidy`, the differences are + mostly in whitespace and entity escaping, where blackfriday is + more consistent and cleaner. + +* **Common extensions**, including table support, fenced code + blocks, autolinks, strikethroughs, non-strict emphasis, etc. + +* **Safety**. Blackfriday is paranoid when parsing, making it safe + to feed untrusted user input without fear of bad things + happening. The test suite stress tests this and there are no + known inputs that make it crash. If you find one, please let me + know and send me the input that does it. + + NOTE: "safety" in this context means *runtime safety only*. In order to + protect yourself against JavaScript injection in untrusted content, see + [this example](https://github.com/russross/blackfriday#sanitize-untrusted-content). + +* **Fast processing**. It is fast enough to render on-demand in + most web applications without having to cache the output. + +* **Thread safety**. You can run multiple parsers in different + goroutines without ill effect. There is no dependence on global + shared state. + +* **Minimal dependencies**. Blackfriday only depends on standard + library packages in Go. The source code is pretty + self-contained, so it is easy to add to any project, including + Google App Engine projects. + +* **Standards compliant**. Output successfully validates using the + W3C validation tool for HTML 4.01 and XHTML 1.0 Transitional. + + +Extensions +---------- + +In addition to the standard markdown syntax, this package +implements the following extensions: + +* **Intra-word emphasis supression**. The `_` character is + commonly used inside words when discussing code, so having + markdown interpret it as an emphasis command is usually the + wrong thing. Blackfriday lets you treat all emphasis markers as + normal characters when they occur inside a word. + +* **Tables**. Tables can be created by drawing them in the input + using a simple syntax: + + ``` + Name | Age + --------|------ + Bob | 27 + Alice | 23 + ``` + +* **Fenced code blocks**. In addition to the normal 4-space + indentation to mark code blocks, you can explicitly mark them + and supply a language (to make syntax highlighting simple). Just + mark it like this: + + ```go + func getTrue() bool { + return true + } + ``` + + You can use 3 or more backticks to mark the beginning of the + block, and the same number to mark the end of the block. + +* **Definition lists**. A simple definition list is made of a single-line + term followed by a colon and the definition for that term. + + Cat + : Fluffy animal everyone likes + + Internet + : Vector of transmission for pictures of cats + + Terms must be separated from the previous definition by a blank line. + +* **Footnotes**. A marker in the text that will become a superscript number; + a footnote definition that will be placed in a list of footnotes at the + end of the document. A footnote looks like this: + + This is a footnote.[^1] + + [^1]: the footnote text. + +* **Autolinking**. Blackfriday can find URLs that have not been + explicitly marked as links and turn them into links. + +* **Strikethrough**. Use two tildes (`~~`) to mark text that + should be crossed out. + +* **Hard line breaks**. With this extension enabled newlines in the input + translate into line breaks in the output. This extension is off by default. + +* **Smart quotes**. Smartypants-style punctuation substitution is + supported, turning normal double- and single-quote marks into + curly quotes, etc. + +* **LaTeX-style dash parsing** is an additional option, where `--` + is translated into `–`, and `---` is translated into + `—`. This differs from most smartypants processors, which + turn a single hyphen into an ndash and a double hyphen into an + mdash. + +* **Smart fractions**, where anything that looks like a fraction + is translated into suitable HTML (instead of just a few special + cases like most smartypant processors). For example, `4/5` + becomes `45`, which renders as + 45. + + +Other renderers +--------------- + +Blackfriday is structured to allow alternative rendering engines. Here +are a few of note: + +* [github_flavored_markdown](https://godoc.org/github.com/shurcooL/github_flavored_markdown): + provides a GitHub Flavored Markdown renderer with fenced code block + highlighting, clickable heading anchor links. + + It's not customizable, and its goal is to produce HTML output + equivalent to the [GitHub Markdown API endpoint](https://developer.github.com/v3/markdown/#render-a-markdown-document-in-raw-mode), + except the rendering is performed locally. + +* [markdownfmt](https://github.com/shurcooL/markdownfmt): like gofmt, + but for markdown. + +* [LaTeX output](https://github.com/Ambrevar/Blackfriday-LaTeX): + renders output as LaTeX. + +* [Blackfriday-Confluence](https://github.com/kentaro-m/blackfriday-confluence): provides a [Confluence Wiki Markup](https://confluence.atlassian.com/doc/confluence-wiki-markup-251003035.html) renderer. + + +Todo +---- + +* More unit testing +* Improve unicode support. It does not understand all unicode + rules (about what constitutes a letter, a punctuation symbol, + etc.), so it may fail to detect word boundaries correctly in + some instances. It is safe on all utf-8 input. + + +License +------- + +[Blackfriday is distributed under the Simplified BSD License](LICENSE.txt) + + + [1]: https://daringfireball.net/projects/markdown/ "Markdown" + [2]: https://golang.org/ "Go Language" + [3]: https://github.com/vmg/sundown "Sundown" + [4]: https://godoc.org/gopkg.in/russross/blackfriday.v2#Parse "Parse func" + [5]: https://github.com/microcosm-cc/bluemonday "Bluemonday" + [6]: https://labix.org/gopkg.in "gopkg.in" diff --git a/vendor/github.com/russross/blackfriday/v2/block.go b/vendor/github.com/russross/blackfriday/v2/block.go new file mode 100644 index 000000000..b8607474e --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/block.go @@ -0,0 +1,1590 @@ +// +// Blackfriday Markdown Processor +// Available at http://github.com/russross/blackfriday +// +// Copyright © 2011 Russ Ross . +// Distributed under the Simplified BSD License. +// See README.md for details. +// + +// +// Functions to parse block-level elements. +// + +package blackfriday + +import ( + "bytes" + "html" + "regexp" + "strings" + + "github.com/shurcooL/sanitized_anchor_name" +) + +const ( + charEntity = "&(?:#x[a-f0-9]{1,8}|#[0-9]{1,8}|[a-z][a-z0-9]{1,31});" + escapable = "[!\"#$%&'()*+,./:;<=>?@[\\\\\\]^_`{|}~-]" +) + +var ( + reBackslashOrAmp = regexp.MustCompile("[\\&]") + reEntityOrEscapedChar = regexp.MustCompile("(?i)\\\\" + escapable + "|" + charEntity) +) + +// Parse block-level data. +// Note: this function and many that it calls assume that +// the input buffer ends with a newline. +func (p *Markdown) block(data []byte) { + // this is called recursively: enforce a maximum depth + if p.nesting >= p.maxNesting { + return + } + p.nesting++ + + // parse out one block-level construct at a time + for len(data) > 0 { + // prefixed heading: + // + // # Heading 1 + // ## Heading 2 + // ... + // ###### Heading 6 + if p.isPrefixHeading(data) { + data = data[p.prefixHeading(data):] + continue + } + + // block of preformatted HTML: + // + //
+ // ... + //
+ if data[0] == '<' { + if i := p.html(data, true); i > 0 { + data = data[i:] + continue + } + } + + // title block + // + // % stuff + // % more stuff + // % even more stuff + if p.extensions&Titleblock != 0 { + if data[0] == '%' { + if i := p.titleBlock(data, true); i > 0 { + data = data[i:] + continue + } + } + } + + // blank lines. note: returns the # of bytes to skip + if i := p.isEmpty(data); i > 0 { + data = data[i:] + continue + } + + // indented code block: + // + // func max(a, b int) int { + // if a > b { + // return a + // } + // return b + // } + if p.codePrefix(data) > 0 { + data = data[p.code(data):] + continue + } + + // fenced code block: + // + // ``` go + // func fact(n int) int { + // if n <= 1 { + // return n + // } + // return n * fact(n-1) + // } + // ``` + if p.extensions&FencedCode != 0 { + if i := p.fencedCodeBlock(data, true); i > 0 { + data = data[i:] + continue + } + } + + // horizontal rule: + // + // ------ + // or + // ****** + // or + // ______ + if p.isHRule(data) { + p.addBlock(HorizontalRule, nil) + var i int + for i = 0; i < len(data) && data[i] != '\n'; i++ { + } + data = data[i:] + continue + } + + // block quote: + // + // > A big quote I found somewhere + // > on the web + if p.quotePrefix(data) > 0 { + data = data[p.quote(data):] + continue + } + + // table: + // + // Name | Age | Phone + // ------|-----|--------- + // Bob | 31 | 555-1234 + // Alice | 27 | 555-4321 + if p.extensions&Tables != 0 { + if i := p.table(data); i > 0 { + data = data[i:] + continue + } + } + + // an itemized/unordered list: + // + // * Item 1 + // * Item 2 + // + // also works with + or - + if p.uliPrefix(data) > 0 { + data = data[p.list(data, 0):] + continue + } + + // a numbered/ordered list: + // + // 1. Item 1 + // 2. Item 2 + if p.oliPrefix(data) > 0 { + data = data[p.list(data, ListTypeOrdered):] + continue + } + + // definition lists: + // + // Term 1 + // : Definition a + // : Definition b + // + // Term 2 + // : Definition c + if p.extensions&DefinitionLists != 0 { + if p.dliPrefix(data) > 0 { + data = data[p.list(data, ListTypeDefinition):] + continue + } + } + + // anything else must look like a normal paragraph + // note: this finds underlined headings, too + data = data[p.paragraph(data):] + } + + p.nesting-- +} + +func (p *Markdown) addBlock(typ NodeType, content []byte) *Node { + p.closeUnmatchedBlocks() + container := p.addChild(typ, 0) + container.content = content + return container +} + +func (p *Markdown) isPrefixHeading(data []byte) bool { + if data[0] != '#' { + return false + } + + if p.extensions&SpaceHeadings != 0 { + level := 0 + for level < 6 && level < len(data) && data[level] == '#' { + level++ + } + if level == len(data) || data[level] != ' ' { + return false + } + } + return true +} + +func (p *Markdown) prefixHeading(data []byte) int { + level := 0 + for level < 6 && level < len(data) && data[level] == '#' { + level++ + } + i := skipChar(data, level, ' ') + end := skipUntilChar(data, i, '\n') + skip := end + id := "" + if p.extensions&HeadingIDs != 0 { + j, k := 0, 0 + // find start/end of heading id + for j = i; j < end-1 && (data[j] != '{' || data[j+1] != '#'); j++ { + } + for k = j + 1; k < end && data[k] != '}'; k++ { + } + // extract heading id iff found + if j < end && k < end { + id = string(data[j+2 : k]) + end = j + skip = k + 1 + for end > 0 && data[end-1] == ' ' { + end-- + } + } + } + for end > 0 && data[end-1] == '#' { + if isBackslashEscaped(data, end-1) { + break + } + end-- + } + for end > 0 && data[end-1] == ' ' { + end-- + } + if end > i { + if id == "" && p.extensions&AutoHeadingIDs != 0 { + id = sanitized_anchor_name.Create(string(data[i:end])) + } + block := p.addBlock(Heading, data[i:end]) + block.HeadingID = id + block.Level = level + } + return skip +} + +func (p *Markdown) isUnderlinedHeading(data []byte) int { + // test of level 1 heading + if data[0] == '=' { + i := skipChar(data, 1, '=') + i = skipChar(data, i, ' ') + if i < len(data) && data[i] == '\n' { + return 1 + } + return 0 + } + + // test of level 2 heading + if data[0] == '-' { + i := skipChar(data, 1, '-') + i = skipChar(data, i, ' ') + if i < len(data) && data[i] == '\n' { + return 2 + } + return 0 + } + + return 0 +} + +func (p *Markdown) titleBlock(data []byte, doRender bool) int { + if data[0] != '%' { + return 0 + } + splitData := bytes.Split(data, []byte("\n")) + var i int + for idx, b := range splitData { + if !bytes.HasPrefix(b, []byte("%")) { + i = idx // - 1 + break + } + } + + data = bytes.Join(splitData[0:i], []byte("\n")) + consumed := len(data) + data = bytes.TrimPrefix(data, []byte("% ")) + data = bytes.Replace(data, []byte("\n% "), []byte("\n"), -1) + block := p.addBlock(Heading, data) + block.Level = 1 + block.IsTitleblock = true + + return consumed +} + +func (p *Markdown) html(data []byte, doRender bool) int { + var i, j int + + // identify the opening tag + if data[0] != '<' { + return 0 + } + curtag, tagfound := p.htmlFindTag(data[1:]) + + // handle special cases + if !tagfound { + // check for an HTML comment + if size := p.htmlComment(data, doRender); size > 0 { + return size + } + + // check for an
tag + if size := p.htmlHr(data, doRender); size > 0 { + return size + } + + // no special case recognized + return 0 + } + + // look for an unindented matching closing tag + // followed by a blank line + found := false + /* + closetag := []byte("\n") + j = len(curtag) + 1 + for !found { + // scan for a closing tag at the beginning of a line + if skip := bytes.Index(data[j:], closetag); skip >= 0 { + j += skip + len(closetag) + } else { + break + } + + // see if it is the only thing on the line + if skip := p.isEmpty(data[j:]); skip > 0 { + // see if it is followed by a blank line/eof + j += skip + if j >= len(data) { + found = true + i = j + } else { + if skip := p.isEmpty(data[j:]); skip > 0 { + j += skip + found = true + i = j + } + } + } + } + */ + + // if not found, try a second pass looking for indented match + // but not if tag is "ins" or "del" (following original Markdown.pl) + if !found && curtag != "ins" && curtag != "del" { + i = 1 + for i < len(data) { + i++ + for i < len(data) && !(data[i-1] == '<' && data[i] == '/') { + i++ + } + + if i+2+len(curtag) >= len(data) { + break + } + + j = p.htmlFindEnd(curtag, data[i-1:]) + + if j > 0 { + i += j - 1 + found = true + break + } + } + } + + if !found { + return 0 + } + + // the end of the block has been found + if doRender { + // trim newlines + end := i + for end > 0 && data[end-1] == '\n' { + end-- + } + finalizeHTMLBlock(p.addBlock(HTMLBlock, data[:end])) + } + + return i +} + +func finalizeHTMLBlock(block *Node) { + block.Literal = block.content + block.content = nil +} + +// HTML comment, lax form +func (p *Markdown) htmlComment(data []byte, doRender bool) int { + i := p.inlineHTMLComment(data) + // needs to end with a blank line + if j := p.isEmpty(data[i:]); j > 0 { + size := i + j + if doRender { + // trim trailing newlines + end := size + for end > 0 && data[end-1] == '\n' { + end-- + } + block := p.addBlock(HTMLBlock, data[:end]) + finalizeHTMLBlock(block) + } + return size + } + return 0 +} + +// HR, which is the only self-closing block tag considered +func (p *Markdown) htmlHr(data []byte, doRender bool) int { + if len(data) < 4 { + return 0 + } + if data[0] != '<' || (data[1] != 'h' && data[1] != 'H') || (data[2] != 'r' && data[2] != 'R') { + return 0 + } + if data[3] != ' ' && data[3] != '/' && data[3] != '>' { + // not an
tag after all; at least not a valid one + return 0 + } + i := 3 + for i < len(data) && data[i] != '>' && data[i] != '\n' { + i++ + } + if i < len(data) && data[i] == '>' { + i++ + if j := p.isEmpty(data[i:]); j > 0 { + size := i + j + if doRender { + // trim newlines + end := size + for end > 0 && data[end-1] == '\n' { + end-- + } + finalizeHTMLBlock(p.addBlock(HTMLBlock, data[:end])) + } + return size + } + } + return 0 +} + +func (p *Markdown) htmlFindTag(data []byte) (string, bool) { + i := 0 + for i < len(data) && isalnum(data[i]) { + i++ + } + key := string(data[:i]) + if _, ok := blockTags[key]; ok { + return key, true + } + return "", false +} + +func (p *Markdown) htmlFindEnd(tag string, data []byte) int { + // assume data[0] == '<' && data[1] == '/' already tested + if tag == "hr" { + return 2 + } + // check if tag is a match + closetag := []byte("") + if !bytes.HasPrefix(data, closetag) { + return 0 + } + i := len(closetag) + + // check that the rest of the line is blank + skip := 0 + if skip = p.isEmpty(data[i:]); skip == 0 { + return 0 + } + i += skip + skip = 0 + + if i >= len(data) { + return i + } + + if p.extensions&LaxHTMLBlocks != 0 { + return i + } + if skip = p.isEmpty(data[i:]); skip == 0 { + // following line must be blank + return 0 + } + + return i + skip +} + +func (*Markdown) isEmpty(data []byte) int { + // it is okay to call isEmpty on an empty buffer + if len(data) == 0 { + return 0 + } + + var i int + for i = 0; i < len(data) && data[i] != '\n'; i++ { + if data[i] != ' ' && data[i] != '\t' { + return 0 + } + } + if i < len(data) && data[i] == '\n' { + i++ + } + return i +} + +func (*Markdown) isHRule(data []byte) bool { + i := 0 + + // skip up to three spaces + for i < 3 && data[i] == ' ' { + i++ + } + + // look at the hrule char + if data[i] != '*' && data[i] != '-' && data[i] != '_' { + return false + } + c := data[i] + + // the whole line must be the char or whitespace + n := 0 + for i < len(data) && data[i] != '\n' { + switch { + case data[i] == c: + n++ + case data[i] != ' ': + return false + } + i++ + } + + return n >= 3 +} + +// isFenceLine checks if there's a fence line (e.g., ``` or ``` go) at the beginning of data, +// and returns the end index if so, or 0 otherwise. It also returns the marker found. +// If info is not nil, it gets set to the syntax specified in the fence line. +func isFenceLine(data []byte, info *string, oldmarker string) (end int, marker string) { + i, size := 0, 0 + + // skip up to three spaces + for i < len(data) && i < 3 && data[i] == ' ' { + i++ + } + + // check for the marker characters: ~ or ` + if i >= len(data) { + return 0, "" + } + if data[i] != '~' && data[i] != '`' { + return 0, "" + } + + c := data[i] + + // the whole line must be the same char or whitespace + for i < len(data) && data[i] == c { + size++ + i++ + } + + // the marker char must occur at least 3 times + if size < 3 { + return 0, "" + } + marker = string(data[i-size : i]) + + // if this is the end marker, it must match the beginning marker + if oldmarker != "" && marker != oldmarker { + return 0, "" + } + + // TODO(shurcooL): It's probably a good idea to simplify the 2 code paths here + // into one, always get the info string, and discard it if the caller doesn't care. + if info != nil { + infoLength := 0 + i = skipChar(data, i, ' ') + + if i >= len(data) { + if i == len(data) { + return i, marker + } + return 0, "" + } + + infoStart := i + + if data[i] == '{' { + i++ + infoStart++ + + for i < len(data) && data[i] != '}' && data[i] != '\n' { + infoLength++ + i++ + } + + if i >= len(data) || data[i] != '}' { + return 0, "" + } + + // strip all whitespace at the beginning and the end + // of the {} block + for infoLength > 0 && isspace(data[infoStart]) { + infoStart++ + infoLength-- + } + + for infoLength > 0 && isspace(data[infoStart+infoLength-1]) { + infoLength-- + } + i++ + i = skipChar(data, i, ' ') + } else { + for i < len(data) && !isverticalspace(data[i]) { + infoLength++ + i++ + } + } + + *info = strings.TrimSpace(string(data[infoStart : infoStart+infoLength])) + } + + if i == len(data) { + return i, marker + } + if i > len(data) || data[i] != '\n' { + return 0, "" + } + return i + 1, marker // Take newline into account. +} + +// fencedCodeBlock returns the end index if data contains a fenced code block at the beginning, +// or 0 otherwise. It writes to out if doRender is true, otherwise it has no side effects. +// If doRender is true, a final newline is mandatory to recognize the fenced code block. +func (p *Markdown) fencedCodeBlock(data []byte, doRender bool) int { + var info string + beg, marker := isFenceLine(data, &info, "") + if beg == 0 || beg >= len(data) { + return 0 + } + + var work bytes.Buffer + work.Write([]byte(info)) + work.WriteByte('\n') + + for { + // safe to assume beg < len(data) + + // check for the end of the code block + fenceEnd, _ := isFenceLine(data[beg:], nil, marker) + if fenceEnd != 0 { + beg += fenceEnd + break + } + + // copy the current line + end := skipUntilChar(data, beg, '\n') + 1 + + // did we reach the end of the buffer without a closing marker? + if end >= len(data) { + return 0 + } + + // verbatim copy to the working buffer + if doRender { + work.Write(data[beg:end]) + } + beg = end + } + + if doRender { + block := p.addBlock(CodeBlock, work.Bytes()) // TODO: get rid of temp buffer + block.IsFenced = true + finalizeCodeBlock(block) + } + + return beg +} + +func unescapeChar(str []byte) []byte { + if str[0] == '\\' { + return []byte{str[1]} + } + return []byte(html.UnescapeString(string(str))) +} + +func unescapeString(str []byte) []byte { + if reBackslashOrAmp.Match(str) { + return reEntityOrEscapedChar.ReplaceAllFunc(str, unescapeChar) + } + return str +} + +func finalizeCodeBlock(block *Node) { + if block.IsFenced { + newlinePos := bytes.IndexByte(block.content, '\n') + firstLine := block.content[:newlinePos] + rest := block.content[newlinePos+1:] + block.Info = unescapeString(bytes.Trim(firstLine, "\n")) + block.Literal = rest + } else { + block.Literal = block.content + } + block.content = nil +} + +func (p *Markdown) table(data []byte) int { + table := p.addBlock(Table, nil) + i, columns := p.tableHeader(data) + if i == 0 { + p.tip = table.Parent + table.Unlink() + return 0 + } + + p.addBlock(TableBody, nil) + + for i < len(data) { + pipes, rowStart := 0, i + for ; i < len(data) && data[i] != '\n'; i++ { + if data[i] == '|' { + pipes++ + } + } + + if pipes == 0 { + i = rowStart + break + } + + // include the newline in data sent to tableRow + if i < len(data) && data[i] == '\n' { + i++ + } + p.tableRow(data[rowStart:i], columns, false) + } + + return i +} + +// check if the specified position is preceded by an odd number of backslashes +func isBackslashEscaped(data []byte, i int) bool { + backslashes := 0 + for i-backslashes-1 >= 0 && data[i-backslashes-1] == '\\' { + backslashes++ + } + return backslashes&1 == 1 +} + +func (p *Markdown) tableHeader(data []byte) (size int, columns []CellAlignFlags) { + i := 0 + colCount := 1 + for i = 0; i < len(data) && data[i] != '\n'; i++ { + if data[i] == '|' && !isBackslashEscaped(data, i) { + colCount++ + } + } + + // doesn't look like a table header + if colCount == 1 { + return + } + + // include the newline in the data sent to tableRow + j := i + if j < len(data) && data[j] == '\n' { + j++ + } + header := data[:j] + + // column count ignores pipes at beginning or end of line + if data[0] == '|' { + colCount-- + } + if i > 2 && data[i-1] == '|' && !isBackslashEscaped(data, i-1) { + colCount-- + } + + columns = make([]CellAlignFlags, colCount) + + // move on to the header underline + i++ + if i >= len(data) { + return + } + + if data[i] == '|' && !isBackslashEscaped(data, i) { + i++ + } + i = skipChar(data, i, ' ') + + // each column header is of form: / *:?-+:? *|/ with # dashes + # colons >= 3 + // and trailing | optional on last column + col := 0 + for i < len(data) && data[i] != '\n' { + dashes := 0 + + if data[i] == ':' { + i++ + columns[col] |= TableAlignmentLeft + dashes++ + } + for i < len(data) && data[i] == '-' { + i++ + dashes++ + } + if i < len(data) && data[i] == ':' { + i++ + columns[col] |= TableAlignmentRight + dashes++ + } + for i < len(data) && data[i] == ' ' { + i++ + } + if i == len(data) { + return + } + // end of column test is messy + switch { + case dashes < 3: + // not a valid column + return + + case data[i] == '|' && !isBackslashEscaped(data, i): + // marker found, now skip past trailing whitespace + col++ + i++ + for i < len(data) && data[i] == ' ' { + i++ + } + + // trailing junk found after last column + if col >= colCount && i < len(data) && data[i] != '\n' { + return + } + + case (data[i] != '|' || isBackslashEscaped(data, i)) && col+1 < colCount: + // something else found where marker was required + return + + case data[i] == '\n': + // marker is optional for the last column + col++ + + default: + // trailing junk found after last column + return + } + } + if col != colCount { + return + } + + p.addBlock(TableHead, nil) + p.tableRow(header, columns, true) + size = i + if size < len(data) && data[size] == '\n' { + size++ + } + return +} + +func (p *Markdown) tableRow(data []byte, columns []CellAlignFlags, header bool) { + p.addBlock(TableRow, nil) + i, col := 0, 0 + + if data[i] == '|' && !isBackslashEscaped(data, i) { + i++ + } + + for col = 0; col < len(columns) && i < len(data); col++ { + for i < len(data) && data[i] == ' ' { + i++ + } + + cellStart := i + + for i < len(data) && (data[i] != '|' || isBackslashEscaped(data, i)) && data[i] != '\n' { + i++ + } + + cellEnd := i + + // skip the end-of-cell marker, possibly taking us past end of buffer + i++ + + for cellEnd > cellStart && cellEnd-1 < len(data) && data[cellEnd-1] == ' ' { + cellEnd-- + } + + cell := p.addBlock(TableCell, data[cellStart:cellEnd]) + cell.IsHeader = header + cell.Align = columns[col] + } + + // pad it out with empty columns to get the right number + for ; col < len(columns); col++ { + cell := p.addBlock(TableCell, nil) + cell.IsHeader = header + cell.Align = columns[col] + } + + // silently ignore rows with too many cells +} + +// returns blockquote prefix length +func (p *Markdown) quotePrefix(data []byte) int { + i := 0 + for i < 3 && i < len(data) && data[i] == ' ' { + i++ + } + if i < len(data) && data[i] == '>' { + if i+1 < len(data) && data[i+1] == ' ' { + return i + 2 + } + return i + 1 + } + return 0 +} + +// blockquote ends with at least one blank line +// followed by something without a blockquote prefix +func (p *Markdown) terminateBlockquote(data []byte, beg, end int) bool { + if p.isEmpty(data[beg:]) <= 0 { + return false + } + if end >= len(data) { + return true + } + return p.quotePrefix(data[end:]) == 0 && p.isEmpty(data[end:]) == 0 +} + +// parse a blockquote fragment +func (p *Markdown) quote(data []byte) int { + block := p.addBlock(BlockQuote, nil) + var raw bytes.Buffer + beg, end := 0, 0 + for beg < len(data) { + end = beg + // Step over whole lines, collecting them. While doing that, check for + // fenced code and if one's found, incorporate it altogether, + // irregardless of any contents inside it + for end < len(data) && data[end] != '\n' { + if p.extensions&FencedCode != 0 { + if i := p.fencedCodeBlock(data[end:], false); i > 0 { + // -1 to compensate for the extra end++ after the loop: + end += i - 1 + break + } + } + end++ + } + if end < len(data) && data[end] == '\n' { + end++ + } + if pre := p.quotePrefix(data[beg:]); pre > 0 { + // skip the prefix + beg += pre + } else if p.terminateBlockquote(data, beg, end) { + break + } + // this line is part of the blockquote + raw.Write(data[beg:end]) + beg = end + } + p.block(raw.Bytes()) + p.finalize(block) + return end +} + +// returns prefix length for block code +func (p *Markdown) codePrefix(data []byte) int { + if len(data) >= 1 && data[0] == '\t' { + return 1 + } + if len(data) >= 4 && data[0] == ' ' && data[1] == ' ' && data[2] == ' ' && data[3] == ' ' { + return 4 + } + return 0 +} + +func (p *Markdown) code(data []byte) int { + var work bytes.Buffer + + i := 0 + for i < len(data) { + beg := i + for i < len(data) && data[i] != '\n' { + i++ + } + if i < len(data) && data[i] == '\n' { + i++ + } + + blankline := p.isEmpty(data[beg:i]) > 0 + if pre := p.codePrefix(data[beg:i]); pre > 0 { + beg += pre + } else if !blankline { + // non-empty, non-prefixed line breaks the pre + i = beg + break + } + + // verbatim copy to the working buffer + if blankline { + work.WriteByte('\n') + } else { + work.Write(data[beg:i]) + } + } + + // trim all the \n off the end of work + workbytes := work.Bytes() + eol := len(workbytes) + for eol > 0 && workbytes[eol-1] == '\n' { + eol-- + } + if eol != len(workbytes) { + work.Truncate(eol) + } + + work.WriteByte('\n') + + block := p.addBlock(CodeBlock, work.Bytes()) // TODO: get rid of temp buffer + block.IsFenced = false + finalizeCodeBlock(block) + + return i +} + +// returns unordered list item prefix +func (p *Markdown) uliPrefix(data []byte) int { + i := 0 + // start with up to 3 spaces + for i < len(data) && i < 3 && data[i] == ' ' { + i++ + } + if i >= len(data)-1 { + return 0 + } + // need one of {'*', '+', '-'} followed by a space or a tab + if (data[i] != '*' && data[i] != '+' && data[i] != '-') || + (data[i+1] != ' ' && data[i+1] != '\t') { + return 0 + } + return i + 2 +} + +// returns ordered list item prefix +func (p *Markdown) oliPrefix(data []byte) int { + i := 0 + + // start with up to 3 spaces + for i < 3 && i < len(data) && data[i] == ' ' { + i++ + } + + // count the digits + start := i + for i < len(data) && data[i] >= '0' && data[i] <= '9' { + i++ + } + if start == i || i >= len(data)-1 { + return 0 + } + + // we need >= 1 digits followed by a dot and a space or a tab + if data[i] != '.' || !(data[i+1] == ' ' || data[i+1] == '\t') { + return 0 + } + return i + 2 +} + +// returns definition list item prefix +func (p *Markdown) dliPrefix(data []byte) int { + if len(data) < 2 { + return 0 + } + i := 0 + // need a ':' followed by a space or a tab + if data[i] != ':' || !(data[i+1] == ' ' || data[i+1] == '\t') { + return 0 + } + for i < len(data) && data[i] == ' ' { + i++ + } + return i + 2 +} + +// parse ordered or unordered list block +func (p *Markdown) list(data []byte, flags ListType) int { + i := 0 + flags |= ListItemBeginningOfList + block := p.addBlock(List, nil) + block.ListFlags = flags + block.Tight = true + + for i < len(data) { + skip := p.listItem(data[i:], &flags) + if flags&ListItemContainsBlock != 0 { + block.ListData.Tight = false + } + i += skip + if skip == 0 || flags&ListItemEndOfList != 0 { + break + } + flags &= ^ListItemBeginningOfList + } + + above := block.Parent + finalizeList(block) + p.tip = above + return i +} + +// Returns true if the list item is not the same type as its parent list +func (p *Markdown) listTypeChanged(data []byte, flags *ListType) bool { + if p.dliPrefix(data) > 0 && *flags&ListTypeDefinition == 0 { + return true + } else if p.oliPrefix(data) > 0 && *flags&ListTypeOrdered == 0 { + return true + } else if p.uliPrefix(data) > 0 && (*flags&ListTypeOrdered != 0 || *flags&ListTypeDefinition != 0) { + return true + } + return false +} + +// Returns true if block ends with a blank line, descending if needed +// into lists and sublists. +func endsWithBlankLine(block *Node) bool { + // TODO: figure this out. Always false now. + for block != nil { + //if block.lastLineBlank { + //return true + //} + t := block.Type + if t == List || t == Item { + block = block.LastChild + } else { + break + } + } + return false +} + +func finalizeList(block *Node) { + block.open = false + item := block.FirstChild + for item != nil { + // check for non-final list item ending with blank line: + if endsWithBlankLine(item) && item.Next != nil { + block.ListData.Tight = false + break + } + // recurse into children of list item, to see if there are spaces + // between any of them: + subItem := item.FirstChild + for subItem != nil { + if endsWithBlankLine(subItem) && (item.Next != nil || subItem.Next != nil) { + block.ListData.Tight = false + break + } + subItem = subItem.Next + } + item = item.Next + } +} + +// Parse a single list item. +// Assumes initial prefix is already removed if this is a sublist. +func (p *Markdown) listItem(data []byte, flags *ListType) int { + // keep track of the indentation of the first line + itemIndent := 0 + if data[0] == '\t' { + itemIndent += 4 + } else { + for itemIndent < 3 && data[itemIndent] == ' ' { + itemIndent++ + } + } + + var bulletChar byte = '*' + i := p.uliPrefix(data) + if i == 0 { + i = p.oliPrefix(data) + } else { + bulletChar = data[i-2] + } + if i == 0 { + i = p.dliPrefix(data) + // reset definition term flag + if i > 0 { + *flags &= ^ListTypeTerm + } + } + if i == 0 { + // if in definition list, set term flag and continue + if *flags&ListTypeDefinition != 0 { + *flags |= ListTypeTerm + } else { + return 0 + } + } + + // skip leading whitespace on first line + for i < len(data) && data[i] == ' ' { + i++ + } + + // find the end of the line + line := i + for i > 0 && i < len(data) && data[i-1] != '\n' { + i++ + } + + // get working buffer + var raw bytes.Buffer + + // put the first line into the working buffer + raw.Write(data[line:i]) + line = i + + // process the following lines + containsBlankLine := false + sublist := 0 + codeBlockMarker := "" + +gatherlines: + for line < len(data) { + i++ + + // find the end of this line + for i < len(data) && data[i-1] != '\n' { + i++ + } + + // if it is an empty line, guess that it is part of this item + // and move on to the next line + if p.isEmpty(data[line:i]) > 0 { + containsBlankLine = true + line = i + continue + } + + // calculate the indentation + indent := 0 + indentIndex := 0 + if data[line] == '\t' { + indentIndex++ + indent += 4 + } else { + for indent < 4 && line+indent < i && data[line+indent] == ' ' { + indent++ + indentIndex++ + } + } + + chunk := data[line+indentIndex : i] + + if p.extensions&FencedCode != 0 { + // determine if in or out of codeblock + // if in codeblock, ignore normal list processing + _, marker := isFenceLine(chunk, nil, codeBlockMarker) + if marker != "" { + if codeBlockMarker == "" { + // start of codeblock + codeBlockMarker = marker + } else { + // end of codeblock. + codeBlockMarker = "" + } + } + // we are in a codeblock, write line, and continue + if codeBlockMarker != "" || marker != "" { + raw.Write(data[line+indentIndex : i]) + line = i + continue gatherlines + } + } + + // evaluate how this line fits in + switch { + // is this a nested list item? + case (p.uliPrefix(chunk) > 0 && !p.isHRule(chunk)) || + p.oliPrefix(chunk) > 0 || + p.dliPrefix(chunk) > 0: + + // to be a nested list, it must be indented more + // if not, it is either a different kind of list + // or the next item in the same list + if indent <= itemIndent { + if p.listTypeChanged(chunk, flags) { + *flags |= ListItemEndOfList + } else if containsBlankLine { + *flags |= ListItemContainsBlock + } + + break gatherlines + } + + if containsBlankLine { + *flags |= ListItemContainsBlock + } + + // is this the first item in the nested list? + if sublist == 0 { + sublist = raw.Len() + } + + // is this a nested prefix heading? + case p.isPrefixHeading(chunk): + // if the heading is not indented, it is not nested in the list + // and thus ends the list + if containsBlankLine && indent < 4 { + *flags |= ListItemEndOfList + break gatherlines + } + *flags |= ListItemContainsBlock + + // anything following an empty line is only part + // of this item if it is indented 4 spaces + // (regardless of the indentation of the beginning of the item) + case containsBlankLine && indent < 4: + if *flags&ListTypeDefinition != 0 && i < len(data)-1 { + // is the next item still a part of this list? + next := i + for next < len(data) && data[next] != '\n' { + next++ + } + for next < len(data)-1 && data[next] == '\n' { + next++ + } + if i < len(data)-1 && data[i] != ':' && data[next] != ':' { + *flags |= ListItemEndOfList + } + } else { + *flags |= ListItemEndOfList + } + break gatherlines + + // a blank line means this should be parsed as a block + case containsBlankLine: + raw.WriteByte('\n') + *flags |= ListItemContainsBlock + } + + // if this line was preceded by one or more blanks, + // re-introduce the blank into the buffer + if containsBlankLine { + containsBlankLine = false + raw.WriteByte('\n') + } + + // add the line into the working buffer without prefix + raw.Write(data[line+indentIndex : i]) + + line = i + } + + rawBytes := raw.Bytes() + + block := p.addBlock(Item, nil) + block.ListFlags = *flags + block.Tight = false + block.BulletChar = bulletChar + block.Delimiter = '.' // Only '.' is possible in Markdown, but ')' will also be possible in CommonMark + + // render the contents of the list item + if *flags&ListItemContainsBlock != 0 && *flags&ListTypeTerm == 0 { + // intermediate render of block item, except for definition term + if sublist > 0 { + p.block(rawBytes[:sublist]) + p.block(rawBytes[sublist:]) + } else { + p.block(rawBytes) + } + } else { + // intermediate render of inline item + if sublist > 0 { + child := p.addChild(Paragraph, 0) + child.content = rawBytes[:sublist] + p.block(rawBytes[sublist:]) + } else { + child := p.addChild(Paragraph, 0) + child.content = rawBytes + } + } + return line +} + +// render a single paragraph that has already been parsed out +func (p *Markdown) renderParagraph(data []byte) { + if len(data) == 0 { + return + } + + // trim leading spaces + beg := 0 + for data[beg] == ' ' { + beg++ + } + + end := len(data) + // trim trailing newline + if data[len(data)-1] == '\n' { + end-- + } + + // trim trailing spaces + for end > beg && data[end-1] == ' ' { + end-- + } + + p.addBlock(Paragraph, data[beg:end]) +} + +func (p *Markdown) paragraph(data []byte) int { + // prev: index of 1st char of previous line + // line: index of 1st char of current line + // i: index of cursor/end of current line + var prev, line, i int + tabSize := TabSizeDefault + if p.extensions&TabSizeEight != 0 { + tabSize = TabSizeDouble + } + // keep going until we find something to mark the end of the paragraph + for i < len(data) { + // mark the beginning of the current line + prev = line + current := data[i:] + line = i + + // did we find a reference or a footnote? If so, end a paragraph + // preceding it and report that we have consumed up to the end of that + // reference: + if refEnd := isReference(p, current, tabSize); refEnd > 0 { + p.renderParagraph(data[:i]) + return i + refEnd + } + + // did we find a blank line marking the end of the paragraph? + if n := p.isEmpty(current); n > 0 { + // did this blank line followed by a definition list item? + if p.extensions&DefinitionLists != 0 { + if i < len(data)-1 && data[i+1] == ':' { + return p.list(data[prev:], ListTypeDefinition) + } + } + + p.renderParagraph(data[:i]) + return i + n + } + + // an underline under some text marks a heading, so our paragraph ended on prev line + if i > 0 { + if level := p.isUnderlinedHeading(current); level > 0 { + // render the paragraph + p.renderParagraph(data[:prev]) + + // ignore leading and trailing whitespace + eol := i - 1 + for prev < eol && data[prev] == ' ' { + prev++ + } + for eol > prev && data[eol-1] == ' ' { + eol-- + } + + id := "" + if p.extensions&AutoHeadingIDs != 0 { + id = sanitized_anchor_name.Create(string(data[prev:eol])) + } + + block := p.addBlock(Heading, data[prev:eol]) + block.Level = level + block.HeadingID = id + + // find the end of the underline + for i < len(data) && data[i] != '\n' { + i++ + } + return i + } + } + + // if the next line starts a block of HTML, then the paragraph ends here + if p.extensions&LaxHTMLBlocks != 0 { + if data[i] == '<' && p.html(current, false) > 0 { + // rewind to before the HTML block + p.renderParagraph(data[:i]) + return i + } + } + + // if there's a prefixed heading or a horizontal rule after this, paragraph is over + if p.isPrefixHeading(current) || p.isHRule(current) { + p.renderParagraph(data[:i]) + return i + } + + // if there's a fenced code block, paragraph is over + if p.extensions&FencedCode != 0 { + if p.fencedCodeBlock(current, false) > 0 { + p.renderParagraph(data[:i]) + return i + } + } + + // if there's a definition list item, prev line is a definition term + if p.extensions&DefinitionLists != 0 { + if p.dliPrefix(current) != 0 { + ret := p.list(data[prev:], ListTypeDefinition) + return ret + } + } + + // if there's a list after this, paragraph is over + if p.extensions&NoEmptyLineBeforeBlock != 0 { + if p.uliPrefix(current) != 0 || + p.oliPrefix(current) != 0 || + p.quotePrefix(current) != 0 || + p.codePrefix(current) != 0 { + p.renderParagraph(data[:i]) + return i + } + } + + // otherwise, scan to the beginning of the next line + nl := bytes.IndexByte(data[i:], '\n') + if nl >= 0 { + i += nl + 1 + } else { + i += len(data[i:]) + } + } + + p.renderParagraph(data[:i]) + return i +} + +func skipChar(data []byte, start int, char byte) int { + i := start + for i < len(data) && data[i] == char { + i++ + } + return i +} + +func skipUntilChar(text []byte, start int, char byte) int { + i := start + for i < len(text) && text[i] != char { + i++ + } + return i +} diff --git a/vendor/github.com/russross/blackfriday/v2/doc.go b/vendor/github.com/russross/blackfriday/v2/doc.go new file mode 100644 index 000000000..5b3fa9876 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/doc.go @@ -0,0 +1,18 @@ +// Package blackfriday is a markdown processor. +// +// It translates plain text with simple formatting rules into an AST, which can +// then be further processed to HTML (provided by Blackfriday itself) or other +// formats (provided by the community). +// +// The simplest way to invoke Blackfriday is to call the Run function. It will +// take a text input and produce a text output in HTML (or other format). +// +// A slightly more sophisticated way to use Blackfriday is to create a Markdown +// processor and to call Parse, which returns a syntax tree for the input +// document. You can leverage Blackfriday's parsing for content extraction from +// markdown documents. You can assign a custom renderer and set various options +// to the Markdown processor. +// +// If you're interested in calling Blackfriday from command line, see +// https://github.com/russross/blackfriday-tool. +package blackfriday diff --git a/vendor/github.com/russross/blackfriday/v2/esc.go b/vendor/github.com/russross/blackfriday/v2/esc.go new file mode 100644 index 000000000..6385f27cb --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/esc.go @@ -0,0 +1,34 @@ +package blackfriday + +import ( + "html" + "io" +) + +var htmlEscaper = [256][]byte{ + '&': []byte("&"), + '<': []byte("<"), + '>': []byte(">"), + '"': []byte("""), +} + +func escapeHTML(w io.Writer, s []byte) { + var start, end int + for end < len(s) { + escSeq := htmlEscaper[s[end]] + if escSeq != nil { + w.Write(s[start:end]) + w.Write(escSeq) + start = end + 1 + } + end++ + } + if start < len(s) && end <= len(s) { + w.Write(s[start:end]) + } +} + +func escLink(w io.Writer, text []byte) { + unesc := html.UnescapeString(string(text)) + escapeHTML(w, []byte(unesc)) +} diff --git a/vendor/github.com/russross/blackfriday/v2/go.mod b/vendor/github.com/russross/blackfriday/v2/go.mod new file mode 100644 index 000000000..620b74e0a --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/go.mod @@ -0,0 +1 @@ +module github.com/russross/blackfriday/v2 diff --git a/vendor/github.com/russross/blackfriday/v2/html.go b/vendor/github.com/russross/blackfriday/v2/html.go new file mode 100644 index 000000000..284c87184 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/html.go @@ -0,0 +1,949 @@ +// +// Blackfriday Markdown Processor +// Available at http://github.com/russross/blackfriday +// +// Copyright © 2011 Russ Ross . +// Distributed under the Simplified BSD License. +// See README.md for details. +// + +// +// +// HTML rendering backend +// +// + +package blackfriday + +import ( + "bytes" + "fmt" + "io" + "regexp" + "strings" +) + +// HTMLFlags control optional behavior of HTML renderer. +type HTMLFlags int + +// HTML renderer configuration options. +const ( + HTMLFlagsNone HTMLFlags = 0 + SkipHTML HTMLFlags = 1 << iota // Skip preformatted HTML blocks + SkipImages // Skip embedded images + SkipLinks // Skip all links + Safelink // Only link to trusted protocols + NofollowLinks // Only link with rel="nofollow" + NoreferrerLinks // Only link with rel="noreferrer" + NoopenerLinks // Only link with rel="noopener" + HrefTargetBlank // Add a blank target + CompletePage // Generate a complete HTML page + UseXHTML // Generate XHTML output instead of HTML + FootnoteReturnLinks // Generate a link at the end of a footnote to return to the source + Smartypants // Enable smart punctuation substitutions + SmartypantsFractions // Enable smart fractions (with Smartypants) + SmartypantsDashes // Enable smart dashes (with Smartypants) + SmartypantsLatexDashes // Enable LaTeX-style dashes (with Smartypants) + SmartypantsAngledQuotes // Enable angled double quotes (with Smartypants) for double quotes rendering + SmartypantsQuotesNBSP // Enable « French guillemets » (with Smartypants) + TOC // Generate a table of contents +) + +var ( + htmlTagRe = regexp.MustCompile("(?i)^" + htmlTag) +) + +const ( + htmlTag = "(?:" + openTag + "|" + closeTag + "|" + htmlComment + "|" + + processingInstruction + "|" + declaration + "|" + cdata + ")" + closeTag = "]" + openTag = "<" + tagName + attribute + "*" + "\\s*/?>" + attribute = "(?:" + "\\s+" + attributeName + attributeValueSpec + "?)" + attributeValue = "(?:" + unquotedValue + "|" + singleQuotedValue + "|" + doubleQuotedValue + ")" + attributeValueSpec = "(?:" + "\\s*=" + "\\s*" + attributeValue + ")" + attributeName = "[a-zA-Z_:][a-zA-Z0-9:._-]*" + cdata = "" + declaration = "]*>" + doubleQuotedValue = "\"[^\"]*\"" + htmlComment = "|" + processingInstruction = "[<][?].*?[?][>]" + singleQuotedValue = "'[^']*'" + tagName = "[A-Za-z][A-Za-z0-9-]*" + unquotedValue = "[^\"'=<>`\\x00-\\x20]+" +) + +// HTMLRendererParameters is a collection of supplementary parameters tweaking +// the behavior of various parts of HTML renderer. +type HTMLRendererParameters struct { + // Prepend this text to each relative URL. + AbsolutePrefix string + // Add this text to each footnote anchor, to ensure uniqueness. + FootnoteAnchorPrefix string + // Show this text inside the tag for a footnote return link, if the + // HTML_FOOTNOTE_RETURN_LINKS flag is enabled. If blank, the string + // [return] is used. + FootnoteReturnLinkContents string + // If set, add this text to the front of each Heading ID, to ensure + // uniqueness. + HeadingIDPrefix string + // If set, add this text to the back of each Heading ID, to ensure uniqueness. + HeadingIDSuffix string + // Increase heading levels: if the offset is 1,

becomes

etc. + // Negative offset is also valid. + // Resulting levels are clipped between 1 and 6. + HeadingLevelOffset int + + Title string // Document title (used if CompletePage is set) + CSS string // Optional CSS file URL (used if CompletePage is set) + Icon string // Optional icon file URL (used if CompletePage is set) + + Flags HTMLFlags // Flags allow customizing this renderer's behavior +} + +// HTMLRenderer is a type that implements the Renderer interface for HTML output. +// +// Do not create this directly, instead use the NewHTMLRenderer function. +type HTMLRenderer struct { + HTMLRendererParameters + + closeTag string // how to end singleton tags: either " />" or ">" + + // Track heading IDs to prevent ID collision in a single generation. + headingIDs map[string]int + + lastOutputLen int + disableTags int + + sr *SPRenderer +} + +const ( + xhtmlClose = " />" + htmlClose = ">" +) + +// NewHTMLRenderer creates and configures an HTMLRenderer object, which +// satisfies the Renderer interface. +func NewHTMLRenderer(params HTMLRendererParameters) *HTMLRenderer { + // configure the rendering engine + closeTag := htmlClose + if params.Flags&UseXHTML != 0 { + closeTag = xhtmlClose + } + + if params.FootnoteReturnLinkContents == "" { + params.FootnoteReturnLinkContents = `[return]` + } + + return &HTMLRenderer{ + HTMLRendererParameters: params, + + closeTag: closeTag, + headingIDs: make(map[string]int), + + sr: NewSmartypantsRenderer(params.Flags), + } +} + +func isHTMLTag(tag []byte, tagname string) bool { + found, _ := findHTMLTagPos(tag, tagname) + return found +} + +// Look for a character, but ignore it when it's in any kind of quotes, it +// might be JavaScript +func skipUntilCharIgnoreQuotes(html []byte, start int, char byte) int { + inSingleQuote := false + inDoubleQuote := false + inGraveQuote := false + i := start + for i < len(html) { + switch { + case html[i] == char && !inSingleQuote && !inDoubleQuote && !inGraveQuote: + return i + case html[i] == '\'': + inSingleQuote = !inSingleQuote + case html[i] == '"': + inDoubleQuote = !inDoubleQuote + case html[i] == '`': + inGraveQuote = !inGraveQuote + } + i++ + } + return start +} + +func findHTMLTagPos(tag []byte, tagname string) (bool, int) { + i := 0 + if i < len(tag) && tag[0] != '<' { + return false, -1 + } + i++ + i = skipSpace(tag, i) + + if i < len(tag) && tag[i] == '/' { + i++ + } + + i = skipSpace(tag, i) + j := 0 + for ; i < len(tag); i, j = i+1, j+1 { + if j >= len(tagname) { + break + } + + if strings.ToLower(string(tag[i]))[0] != tagname[j] { + return false, -1 + } + } + + if i == len(tag) { + return false, -1 + } + + rightAngle := skipUntilCharIgnoreQuotes(tag, i, '>') + if rightAngle >= i { + return true, rightAngle + } + + return false, -1 +} + +func skipSpace(tag []byte, i int) int { + for i < len(tag) && isspace(tag[i]) { + i++ + } + return i +} + +func isRelativeLink(link []byte) (yes bool) { + // a tag begin with '#' + if link[0] == '#' { + return true + } + + // link begin with '/' but not '//', the second maybe a protocol relative link + if len(link) >= 2 && link[0] == '/' && link[1] != '/' { + return true + } + + // only the root '/' + if len(link) == 1 && link[0] == '/' { + return true + } + + // current directory : begin with "./" + if bytes.HasPrefix(link, []byte("./")) { + return true + } + + // parent directory : begin with "../" + if bytes.HasPrefix(link, []byte("../")) { + return true + } + + return false +} + +func (r *HTMLRenderer) ensureUniqueHeadingID(id string) string { + for count, found := r.headingIDs[id]; found; count, found = r.headingIDs[id] { + tmp := fmt.Sprintf("%s-%d", id, count+1) + + if _, tmpFound := r.headingIDs[tmp]; !tmpFound { + r.headingIDs[id] = count + 1 + id = tmp + } else { + id = id + "-1" + } + } + + if _, found := r.headingIDs[id]; !found { + r.headingIDs[id] = 0 + } + + return id +} + +func (r *HTMLRenderer) addAbsPrefix(link []byte) []byte { + if r.AbsolutePrefix != "" && isRelativeLink(link) && link[0] != '.' { + newDest := r.AbsolutePrefix + if link[0] != '/' { + newDest += "/" + } + newDest += string(link) + return []byte(newDest) + } + return link +} + +func appendLinkAttrs(attrs []string, flags HTMLFlags, link []byte) []string { + if isRelativeLink(link) { + return attrs + } + val := []string{} + if flags&NofollowLinks != 0 { + val = append(val, "nofollow") + } + if flags&NoreferrerLinks != 0 { + val = append(val, "noreferrer") + } + if flags&NoopenerLinks != 0 { + val = append(val, "noopener") + } + if flags&HrefTargetBlank != 0 { + attrs = append(attrs, "target=\"_blank\"") + } + if len(val) == 0 { + return attrs + } + attr := fmt.Sprintf("rel=%q", strings.Join(val, " ")) + return append(attrs, attr) +} + +func isMailto(link []byte) bool { + return bytes.HasPrefix(link, []byte("mailto:")) +} + +func needSkipLink(flags HTMLFlags, dest []byte) bool { + if flags&SkipLinks != 0 { + return true + } + return flags&Safelink != 0 && !isSafeLink(dest) && !isMailto(dest) +} + +func isSmartypantable(node *Node) bool { + pt := node.Parent.Type + return pt != Link && pt != CodeBlock && pt != Code +} + +func appendLanguageAttr(attrs []string, info []byte) []string { + if len(info) == 0 { + return attrs + } + endOfLang := bytes.IndexAny(info, "\t ") + if endOfLang < 0 { + endOfLang = len(info) + } + return append(attrs, fmt.Sprintf("class=\"language-%s\"", info[:endOfLang])) +} + +func (r *HTMLRenderer) tag(w io.Writer, name []byte, attrs []string) { + w.Write(name) + if len(attrs) > 0 { + w.Write(spaceBytes) + w.Write([]byte(strings.Join(attrs, " "))) + } + w.Write(gtBytes) + r.lastOutputLen = 1 +} + +func footnoteRef(prefix string, node *Node) []byte { + urlFrag := prefix + string(slugify(node.Destination)) + anchor := fmt.Sprintf(`%d`, urlFrag, node.NoteID) + return []byte(fmt.Sprintf(`%s`, urlFrag, anchor)) +} + +func footnoteItem(prefix string, slug []byte) []byte { + return []byte(fmt.Sprintf(`
  • `, prefix, slug)) +} + +func footnoteReturnLink(prefix, returnLink string, slug []byte) []byte { + const format = ` %s` + return []byte(fmt.Sprintf(format, prefix, slug, returnLink)) +} + +func itemOpenCR(node *Node) bool { + if node.Prev == nil { + return false + } + ld := node.Parent.ListData + return !ld.Tight && ld.ListFlags&ListTypeDefinition == 0 +} + +func skipParagraphTags(node *Node) bool { + grandparent := node.Parent.Parent + if grandparent == nil || grandparent.Type != List { + return false + } + tightOrTerm := grandparent.Tight || node.Parent.ListFlags&ListTypeTerm != 0 + return grandparent.Type == List && tightOrTerm +} + +func cellAlignment(align CellAlignFlags) string { + switch align { + case TableAlignmentLeft: + return "left" + case TableAlignmentRight: + return "right" + case TableAlignmentCenter: + return "center" + default: + return "" + } +} + +func (r *HTMLRenderer) out(w io.Writer, text []byte) { + if r.disableTags > 0 { + w.Write(htmlTagRe.ReplaceAll(text, []byte{})) + } else { + w.Write(text) + } + r.lastOutputLen = len(text) +} + +func (r *HTMLRenderer) cr(w io.Writer) { + if r.lastOutputLen > 0 { + r.out(w, nlBytes) + } +} + +var ( + nlBytes = []byte{'\n'} + gtBytes = []byte{'>'} + spaceBytes = []byte{' '} +) + +var ( + brTag = []byte("
    ") + brXHTMLTag = []byte("
    ") + emTag = []byte("") + emCloseTag = []byte("") + strongTag = []byte("") + strongCloseTag = []byte("") + delTag = []byte("") + delCloseTag = []byte("") + ttTag = []byte("") + ttCloseTag = []byte("") + aTag = []byte("") + preTag = []byte("
    ")
    +	preCloseTag        = []byte("
    ") + codeTag = []byte("") + codeCloseTag = []byte("") + pTag = []byte("

    ") + pCloseTag = []byte("

    ") + blockquoteTag = []byte("
    ") + blockquoteCloseTag = []byte("
    ") + hrTag = []byte("
    ") + hrXHTMLTag = []byte("
    ") + ulTag = []byte("
      ") + ulCloseTag = []byte("
    ") + olTag = []byte("
      ") + olCloseTag = []byte("
    ") + dlTag = []byte("
    ") + dlCloseTag = []byte("
    ") + liTag = []byte("
  • ") + liCloseTag = []byte("
  • ") + ddTag = []byte("
    ") + ddCloseTag = []byte("
    ") + dtTag = []byte("
    ") + dtCloseTag = []byte("
    ") + tableTag = []byte("") + tableCloseTag = []byte("
    ") + tdTag = []byte("") + thTag = []byte("") + theadTag = []byte("") + theadCloseTag = []byte("") + tbodyTag = []byte("") + tbodyCloseTag = []byte("") + trTag = []byte("") + trCloseTag = []byte("") + h1Tag = []byte("") + h2Tag = []byte("") + h3Tag = []byte("") + h4Tag = []byte("") + h5Tag = []byte("") + h6Tag = []byte("") + + footnotesDivBytes = []byte("\n
    \n\n") + footnotesCloseDivBytes = []byte("\n
    \n") +) + +func headingTagsFromLevel(level int) ([]byte, []byte) { + if level <= 1 { + return h1Tag, h1CloseTag + } + switch level { + case 2: + return h2Tag, h2CloseTag + case 3: + return h3Tag, h3CloseTag + case 4: + return h4Tag, h4CloseTag + case 5: + return h5Tag, h5CloseTag + } + return h6Tag, h6CloseTag +} + +func (r *HTMLRenderer) outHRTag(w io.Writer) { + if r.Flags&UseXHTML == 0 { + r.out(w, hrTag) + } else { + r.out(w, hrXHTMLTag) + } +} + +// RenderNode is a default renderer of a single node of a syntax tree. For +// block nodes it will be called twice: first time with entering=true, second +// time with entering=false, so that it could know when it's working on an open +// tag and when on close. It writes the result to w. +// +// The return value is a way to tell the calling walker to adjust its walk +// pattern: e.g. it can terminate the traversal by returning Terminate. Or it +// can ask the walker to skip a subtree of this node by returning SkipChildren. +// The typical behavior is to return GoToNext, which asks for the usual +// traversal to the next node. +func (r *HTMLRenderer) RenderNode(w io.Writer, node *Node, entering bool) WalkStatus { + attrs := []string{} + switch node.Type { + case Text: + if r.Flags&Smartypants != 0 { + var tmp bytes.Buffer + escapeHTML(&tmp, node.Literal) + r.sr.Process(w, tmp.Bytes()) + } else { + if node.Parent.Type == Link { + escLink(w, node.Literal) + } else { + escapeHTML(w, node.Literal) + } + } + case Softbreak: + r.cr(w) + // TODO: make it configurable via out(renderer.softbreak) + case Hardbreak: + if r.Flags&UseXHTML == 0 { + r.out(w, brTag) + } else { + r.out(w, brXHTMLTag) + } + r.cr(w) + case Emph: + if entering { + r.out(w, emTag) + } else { + r.out(w, emCloseTag) + } + case Strong: + if entering { + r.out(w, strongTag) + } else { + r.out(w, strongCloseTag) + } + case Del: + if entering { + r.out(w, delTag) + } else { + r.out(w, delCloseTag) + } + case HTMLSpan: + if r.Flags&SkipHTML != 0 { + break + } + r.out(w, node.Literal) + case Link: + // mark it but don't link it if it is not a safe link: no smartypants + dest := node.LinkData.Destination + if needSkipLink(r.Flags, dest) { + if entering { + r.out(w, ttTag) + } else { + r.out(w, ttCloseTag) + } + } else { + if entering { + dest = r.addAbsPrefix(dest) + var hrefBuf bytes.Buffer + hrefBuf.WriteString("href=\"") + escLink(&hrefBuf, dest) + hrefBuf.WriteByte('"') + attrs = append(attrs, hrefBuf.String()) + if node.NoteID != 0 { + r.out(w, footnoteRef(r.FootnoteAnchorPrefix, node)) + break + } + attrs = appendLinkAttrs(attrs, r.Flags, dest) + if len(node.LinkData.Title) > 0 { + var titleBuff bytes.Buffer + titleBuff.WriteString("title=\"") + escapeHTML(&titleBuff, node.LinkData.Title) + titleBuff.WriteByte('"') + attrs = append(attrs, titleBuff.String()) + } + r.tag(w, aTag, attrs) + } else { + if node.NoteID != 0 { + break + } + r.out(w, aCloseTag) + } + } + case Image: + if r.Flags&SkipImages != 0 { + return SkipChildren + } + if entering { + dest := node.LinkData.Destination + dest = r.addAbsPrefix(dest) + if r.disableTags == 0 { + //if options.safe && potentiallyUnsafe(dest) { + //out(w, ``)
+				//} else {
+				r.out(w, []byte(`<img src=`)) + } + } + case Code: + r.out(w, codeTag) + escapeHTML(w, node.Literal) + r.out(w, codeCloseTag) + case Document: + break + case Paragraph: + if skipParagraphTags(node) { + break + } + if entering { + // TODO: untangle this clusterfuck about when the newlines need + // to be added and when not. + if node.Prev != nil { + switch node.Prev.Type { + case HTMLBlock, List, Paragraph, Heading, CodeBlock, BlockQuote, HorizontalRule: + r.cr(w) + } + } + if node.Parent.Type == BlockQuote && node.Prev == nil { + r.cr(w) + } + r.out(w, pTag) + } else { + r.out(w, pCloseTag) + if !(node.Parent.Type == Item && node.Next == nil) { + r.cr(w) + } + } + case BlockQuote: + if entering { + r.cr(w) + r.out(w, blockquoteTag) + } else { + r.out(w, blockquoteCloseTag) + r.cr(w) + } + case HTMLBlock: + if r.Flags&SkipHTML != 0 { + break + } + r.cr(w) + r.out(w, node.Literal) + r.cr(w) + case Heading: + headingLevel := r.HTMLRendererParameters.HeadingLevelOffset + node.Level + openTag, closeTag := headingTagsFromLevel(headingLevel) + if entering { + if node.IsTitleblock { + attrs = append(attrs, `class="title"`) + } + if node.HeadingID != "" { + id := r.ensureUniqueHeadingID(node.HeadingID) + if r.HeadingIDPrefix != "" { + id = r.HeadingIDPrefix + id + } + if r.HeadingIDSuffix != "" { + id = id + r.HeadingIDSuffix + } + attrs = append(attrs, fmt.Sprintf(`id="%s"`, id)) + } + r.cr(w) + r.tag(w, openTag, attrs) + } else { + r.out(w, closeTag) + if !(node.Parent.Type == Item && node.Next == nil) { + r.cr(w) + } + } + case HorizontalRule: + r.cr(w) + r.outHRTag(w) + r.cr(w) + case List: + openTag := ulTag + closeTag := ulCloseTag + if node.ListFlags&ListTypeOrdered != 0 { + openTag = olTag + closeTag = olCloseTag + } + if node.ListFlags&ListTypeDefinition != 0 { + openTag = dlTag + closeTag = dlCloseTag + } + if entering { + if node.IsFootnotesList { + r.out(w, footnotesDivBytes) + r.outHRTag(w) + r.cr(w) + } + r.cr(w) + if node.Parent.Type == Item && node.Parent.Parent.Tight { + r.cr(w) + } + r.tag(w, openTag[:len(openTag)-1], attrs) + r.cr(w) + } else { + r.out(w, closeTag) + //cr(w) + //if node.parent.Type != Item { + // cr(w) + //} + if node.Parent.Type == Item && node.Next != nil { + r.cr(w) + } + if node.Parent.Type == Document || node.Parent.Type == BlockQuote { + r.cr(w) + } + if node.IsFootnotesList { + r.out(w, footnotesCloseDivBytes) + } + } + case Item: + openTag := liTag + closeTag := liCloseTag + if node.ListFlags&ListTypeDefinition != 0 { + openTag = ddTag + closeTag = ddCloseTag + } + if node.ListFlags&ListTypeTerm != 0 { + openTag = dtTag + closeTag = dtCloseTag + } + if entering { + if itemOpenCR(node) { + r.cr(w) + } + if node.ListData.RefLink != nil { + slug := slugify(node.ListData.RefLink) + r.out(w, footnoteItem(r.FootnoteAnchorPrefix, slug)) + break + } + r.out(w, openTag) + } else { + if node.ListData.RefLink != nil { + slug := slugify(node.ListData.RefLink) + if r.Flags&FootnoteReturnLinks != 0 { + r.out(w, footnoteReturnLink(r.FootnoteAnchorPrefix, r.FootnoteReturnLinkContents, slug)) + } + } + r.out(w, closeTag) + r.cr(w) + } + case CodeBlock: + attrs = appendLanguageAttr(attrs, node.Info) + r.cr(w) + r.out(w, preTag) + r.tag(w, codeTag[:len(codeTag)-1], attrs) + escapeHTML(w, node.Literal) + r.out(w, codeCloseTag) + r.out(w, preCloseTag) + if node.Parent.Type != Item { + r.cr(w) + } + case Table: + if entering { + r.cr(w) + r.out(w, tableTag) + } else { + r.out(w, tableCloseTag) + r.cr(w) + } + case TableCell: + openTag := tdTag + closeTag := tdCloseTag + if node.IsHeader { + openTag = thTag + closeTag = thCloseTag + } + if entering { + align := cellAlignment(node.Align) + if align != "" { + attrs = append(attrs, fmt.Sprintf(`align="%s"`, align)) + } + if node.Prev == nil { + r.cr(w) + } + r.tag(w, openTag, attrs) + } else { + r.out(w, closeTag) + r.cr(w) + } + case TableHead: + if entering { + r.cr(w) + r.out(w, theadTag) + } else { + r.out(w, theadCloseTag) + r.cr(w) + } + case TableBody: + if entering { + r.cr(w) + r.out(w, tbodyTag) + // XXX: this is to adhere to a rather silly test. Should fix test. + if node.FirstChild == nil { + r.cr(w) + } + } else { + r.out(w, tbodyCloseTag) + r.cr(w) + } + case TableRow: + if entering { + r.cr(w) + r.out(w, trTag) + } else { + r.out(w, trCloseTag) + r.cr(w) + } + default: + panic("Unknown node type " + node.Type.String()) + } + return GoToNext +} + +// RenderHeader writes HTML document preamble and TOC if requested. +func (r *HTMLRenderer) RenderHeader(w io.Writer, ast *Node) { + r.writeDocumentHeader(w) + if r.Flags&TOC != 0 { + r.writeTOC(w, ast) + } +} + +// RenderFooter writes HTML document footer. +func (r *HTMLRenderer) RenderFooter(w io.Writer, ast *Node) { + if r.Flags&CompletePage == 0 { + return + } + io.WriteString(w, "\n\n\n") +} + +func (r *HTMLRenderer) writeDocumentHeader(w io.Writer) { + if r.Flags&CompletePage == 0 { + return + } + ending := "" + if r.Flags&UseXHTML != 0 { + io.WriteString(w, "\n") + io.WriteString(w, "\n") + ending = " /" + } else { + io.WriteString(w, "\n") + io.WriteString(w, "\n") + } + io.WriteString(w, "\n") + io.WriteString(w, " ") + if r.Flags&Smartypants != 0 { + r.sr.Process(w, []byte(r.Title)) + } else { + escapeHTML(w, []byte(r.Title)) + } + io.WriteString(w, "\n") + io.WriteString(w, " \n") + io.WriteString(w, " \n") + if r.CSS != "" { + io.WriteString(w, " \n") + } + if r.Icon != "" { + io.WriteString(w, " \n") + } + io.WriteString(w, "\n") + io.WriteString(w, "\n\n") +} + +func (r *HTMLRenderer) writeTOC(w io.Writer, ast *Node) { + buf := bytes.Buffer{} + + inHeading := false + tocLevel := 0 + headingCount := 0 + + ast.Walk(func(node *Node, entering bool) WalkStatus { + if node.Type == Heading && !node.HeadingData.IsTitleblock { + inHeading = entering + if entering { + node.HeadingID = fmt.Sprintf("toc_%d", headingCount) + if node.Level == tocLevel { + buf.WriteString("\n\n
  • ") + } else if node.Level < tocLevel { + for node.Level < tocLevel { + tocLevel-- + buf.WriteString("
  • \n") + } + buf.WriteString("\n\n
  • ") + } else { + for node.Level > tocLevel { + tocLevel++ + buf.WriteString("\n") + } + + if buf.Len() > 0 { + io.WriteString(w, "\n") + } + r.lastOutputLen = buf.Len() +} diff --git a/vendor/github.com/russross/blackfriday/v2/inline.go b/vendor/github.com/russross/blackfriday/v2/inline.go new file mode 100644 index 000000000..4ed290792 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/inline.go @@ -0,0 +1,1228 @@ +// +// Blackfriday Markdown Processor +// Available at http://github.com/russross/blackfriday +// +// Copyright © 2011 Russ Ross . +// Distributed under the Simplified BSD License. +// See README.md for details. +// + +// +// Functions to parse inline elements. +// + +package blackfriday + +import ( + "bytes" + "regexp" + "strconv" +) + +var ( + urlRe = `((https?|ftp):\/\/|\/)[-A-Za-z0-9+&@#\/%?=~_|!:,.;\(\)]+` + anchorRe = regexp.MustCompile(`^(]+")?\s?>` + urlRe + `<\/a>)`) + + // https://www.w3.org/TR/html5/syntax.html#character-references + // highest unicode code point in 17 planes (2^20): 1,114,112d = + // 7 dec digits or 6 hex digits + // named entity references can be 2-31 characters with stuff like < + // at one end and ∳ at the other. There + // are also sometimes numbers at the end, although this isn't inherent + // in the specification; there are never numbers anywhere else in + // current character references, though; see ¾ and ▒, etc. + // https://www.w3.org/TR/html5/syntax.html#named-character-references + // + // entity := "&" (named group | number ref) ";" + // named group := [a-zA-Z]{2,31}[0-9]{0,2} + // number ref := "#" (dec ref | hex ref) + // dec ref := [0-9]{1,7} + // hex ref := ("x" | "X") [0-9a-fA-F]{1,6} + htmlEntityRe = regexp.MustCompile(`&([a-zA-Z]{2,31}[0-9]{0,2}|#([0-9]{1,7}|[xX][0-9a-fA-F]{1,6}));`) +) + +// Functions to parse text within a block +// Each function returns the number of chars taken care of +// data is the complete block being rendered +// offset is the number of valid chars before the current cursor + +func (p *Markdown) inline(currBlock *Node, data []byte) { + // handlers might call us recursively: enforce a maximum depth + if p.nesting >= p.maxNesting || len(data) == 0 { + return + } + p.nesting++ + beg, end := 0, 0 + for end < len(data) { + handler := p.inlineCallback[data[end]] + if handler != nil { + if consumed, node := handler(p, data, end); consumed == 0 { + // No action from the callback. + end++ + } else { + // Copy inactive chars into the output. + currBlock.AppendChild(text(data[beg:end])) + if node != nil { + currBlock.AppendChild(node) + } + // Skip past whatever the callback used. + beg = end + consumed + end = beg + } + } else { + end++ + } + } + if beg < len(data) { + if data[end-1] == '\n' { + end-- + } + currBlock.AppendChild(text(data[beg:end])) + } + p.nesting-- +} + +// single and double emphasis parsing +func emphasis(p *Markdown, data []byte, offset int) (int, *Node) { + data = data[offset:] + c := data[0] + + if len(data) > 2 && data[1] != c { + // whitespace cannot follow an opening emphasis; + // strikethrough only takes two characters '~~' + if c == '~' || isspace(data[1]) { + return 0, nil + } + ret, node := helperEmphasis(p, data[1:], c) + if ret == 0 { + return 0, nil + } + + return ret + 1, node + } + + if len(data) > 3 && data[1] == c && data[2] != c { + if isspace(data[2]) { + return 0, nil + } + ret, node := helperDoubleEmphasis(p, data[2:], c) + if ret == 0 { + return 0, nil + } + + return ret + 2, node + } + + if len(data) > 4 && data[1] == c && data[2] == c && data[3] != c { + if c == '~' || isspace(data[3]) { + return 0, nil + } + ret, node := helperTripleEmphasis(p, data, 3, c) + if ret == 0 { + return 0, nil + } + + return ret + 3, node + } + + return 0, nil +} + +func codeSpan(p *Markdown, data []byte, offset int) (int, *Node) { + data = data[offset:] + + nb := 0 + + // count the number of backticks in the delimiter + for nb < len(data) && data[nb] == '`' { + nb++ + } + + // find the next delimiter + i, end := 0, 0 + for end = nb; end < len(data) && i < nb; end++ { + if data[end] == '`' { + i++ + } else { + i = 0 + } + } + + // no matching delimiter? + if i < nb && end >= len(data) { + return 0, nil + } + + // trim outside whitespace + fBegin := nb + for fBegin < end && data[fBegin] == ' ' { + fBegin++ + } + + fEnd := end - nb + for fEnd > fBegin && data[fEnd-1] == ' ' { + fEnd-- + } + + // render the code span + if fBegin != fEnd { + code := NewNode(Code) + code.Literal = data[fBegin:fEnd] + return end, code + } + + return end, nil +} + +// newline preceded by two spaces becomes
    +func maybeLineBreak(p *Markdown, data []byte, offset int) (int, *Node) { + origOffset := offset + for offset < len(data) && data[offset] == ' ' { + offset++ + } + + if offset < len(data) && data[offset] == '\n' { + if offset-origOffset >= 2 { + return offset - origOffset + 1, NewNode(Hardbreak) + } + return offset - origOffset, nil + } + return 0, nil +} + +// newline without two spaces works when HardLineBreak is enabled +func lineBreak(p *Markdown, data []byte, offset int) (int, *Node) { + if p.extensions&HardLineBreak != 0 { + return 1, NewNode(Hardbreak) + } + return 0, nil +} + +type linkType int + +const ( + linkNormal linkType = iota + linkImg + linkDeferredFootnote + linkInlineFootnote +) + +func isReferenceStyleLink(data []byte, pos int, t linkType) bool { + if t == linkDeferredFootnote { + return false + } + return pos < len(data)-1 && data[pos] == '[' && data[pos+1] != '^' +} + +func maybeImage(p *Markdown, data []byte, offset int) (int, *Node) { + if offset < len(data)-1 && data[offset+1] == '[' { + return link(p, data, offset) + } + return 0, nil +} + +func maybeInlineFootnote(p *Markdown, data []byte, offset int) (int, *Node) { + if offset < len(data)-1 && data[offset+1] == '[' { + return link(p, data, offset) + } + return 0, nil +} + +// '[': parse a link or an image or a footnote +func link(p *Markdown, data []byte, offset int) (int, *Node) { + // no links allowed inside regular links, footnote, and deferred footnotes + if p.insideLink && (offset > 0 && data[offset-1] == '[' || len(data)-1 > offset && data[offset+1] == '^') { + return 0, nil + } + + var t linkType + switch { + // special case: ![^text] == deferred footnote (that follows something with + // an exclamation point) + case p.extensions&Footnotes != 0 && len(data)-1 > offset && data[offset+1] == '^': + t = linkDeferredFootnote + // ![alt] == image + case offset >= 0 && data[offset] == '!': + t = linkImg + offset++ + // ^[text] == inline footnote + // [^refId] == deferred footnote + case p.extensions&Footnotes != 0: + if offset >= 0 && data[offset] == '^' { + t = linkInlineFootnote + offset++ + } else if len(data)-1 > offset && data[offset+1] == '^' { + t = linkDeferredFootnote + } + // [text] == regular link + default: + t = linkNormal + } + + data = data[offset:] + + var ( + i = 1 + noteID int + title, link, altContent []byte + textHasNl = false + ) + + if t == linkDeferredFootnote { + i++ + } + + // look for the matching closing bracket + for level := 1; level > 0 && i < len(data); i++ { + switch { + case data[i] == '\n': + textHasNl = true + + case data[i-1] == '\\': + continue + + case data[i] == '[': + level++ + + case data[i] == ']': + level-- + if level <= 0 { + i-- // compensate for extra i++ in for loop + } + } + } + + if i >= len(data) { + return 0, nil + } + + txtE := i + i++ + var footnoteNode *Node + + // skip any amount of whitespace or newline + // (this is much more lax than original markdown syntax) + for i < len(data) && isspace(data[i]) { + i++ + } + + // inline style link + switch { + case i < len(data) && data[i] == '(': + // skip initial whitespace + i++ + + for i < len(data) && isspace(data[i]) { + i++ + } + + linkB := i + + // look for link end: ' " ) + findlinkend: + for i < len(data) { + switch { + case data[i] == '\\': + i += 2 + + case data[i] == ')' || data[i] == '\'' || data[i] == '"': + break findlinkend + + default: + i++ + } + } + + if i >= len(data) { + return 0, nil + } + linkE := i + + // look for title end if present + titleB, titleE := 0, 0 + if data[i] == '\'' || data[i] == '"' { + i++ + titleB = i + + findtitleend: + for i < len(data) { + switch { + case data[i] == '\\': + i += 2 + + case data[i] == ')': + break findtitleend + + default: + i++ + } + } + + if i >= len(data) { + return 0, nil + } + + // skip whitespace after title + titleE = i - 1 + for titleE > titleB && isspace(data[titleE]) { + titleE-- + } + + // check for closing quote presence + if data[titleE] != '\'' && data[titleE] != '"' { + titleB, titleE = 0, 0 + linkE = i + } + } + + // remove whitespace at the end of the link + for linkE > linkB && isspace(data[linkE-1]) { + linkE-- + } + + // remove optional angle brackets around the link + if data[linkB] == '<' { + linkB++ + } + if data[linkE-1] == '>' { + linkE-- + } + + // build escaped link and title + if linkE > linkB { + link = data[linkB:linkE] + } + + if titleE > titleB { + title = data[titleB:titleE] + } + + i++ + + // reference style link + case isReferenceStyleLink(data, i, t): + var id []byte + altContentConsidered := false + + // look for the id + i++ + linkB := i + for i < len(data) && data[i] != ']' { + i++ + } + if i >= len(data) { + return 0, nil + } + linkE := i + + // find the reference + if linkB == linkE { + if textHasNl { + var b bytes.Buffer + + for j := 1; j < txtE; j++ { + switch { + case data[j] != '\n': + b.WriteByte(data[j]) + case data[j-1] != ' ': + b.WriteByte(' ') + } + } + + id = b.Bytes() + } else { + id = data[1:txtE] + altContentConsidered = true + } + } else { + id = data[linkB:linkE] + } + + // find the reference with matching id + lr, ok := p.getRef(string(id)) + if !ok { + return 0, nil + } + + // keep link and title from reference + link = lr.link + title = lr.title + if altContentConsidered { + altContent = lr.text + } + i++ + + // shortcut reference style link or reference or inline footnote + default: + var id []byte + + // craft the id + if textHasNl { + var b bytes.Buffer + + for j := 1; j < txtE; j++ { + switch { + case data[j] != '\n': + b.WriteByte(data[j]) + case data[j-1] != ' ': + b.WriteByte(' ') + } + } + + id = b.Bytes() + } else { + if t == linkDeferredFootnote { + id = data[2:txtE] // get rid of the ^ + } else { + id = data[1:txtE] + } + } + + footnoteNode = NewNode(Item) + if t == linkInlineFootnote { + // create a new reference + noteID = len(p.notes) + 1 + + var fragment []byte + if len(id) > 0 { + if len(id) < 16 { + fragment = make([]byte, len(id)) + } else { + fragment = make([]byte, 16) + } + copy(fragment, slugify(id)) + } else { + fragment = append([]byte("footnote-"), []byte(strconv.Itoa(noteID))...) + } + + ref := &reference{ + noteID: noteID, + hasBlock: false, + link: fragment, + title: id, + footnote: footnoteNode, + } + + p.notes = append(p.notes, ref) + + link = ref.link + title = ref.title + } else { + // find the reference with matching id + lr, ok := p.getRef(string(id)) + if !ok { + return 0, nil + } + + if t == linkDeferredFootnote { + lr.noteID = len(p.notes) + 1 + lr.footnote = footnoteNode + p.notes = append(p.notes, lr) + } + + // keep link and title from reference + link = lr.link + // if inline footnote, title == footnote contents + title = lr.title + noteID = lr.noteID + } + + // rewind the whitespace + i = txtE + 1 + } + + var uLink []byte + if t == linkNormal || t == linkImg { + if len(link) > 0 { + var uLinkBuf bytes.Buffer + unescapeText(&uLinkBuf, link) + uLink = uLinkBuf.Bytes() + } + + // links need something to click on and somewhere to go + if len(uLink) == 0 || (t == linkNormal && txtE <= 1) { + return 0, nil + } + } + + // call the relevant rendering function + var linkNode *Node + switch t { + case linkNormal: + linkNode = NewNode(Link) + linkNode.Destination = normalizeURI(uLink) + linkNode.Title = title + if len(altContent) > 0 { + linkNode.AppendChild(text(altContent)) + } else { + // links cannot contain other links, so turn off link parsing + // temporarily and recurse + insideLink := p.insideLink + p.insideLink = true + p.inline(linkNode, data[1:txtE]) + p.insideLink = insideLink + } + + case linkImg: + linkNode = NewNode(Image) + linkNode.Destination = uLink + linkNode.Title = title + linkNode.AppendChild(text(data[1:txtE])) + i++ + + case linkInlineFootnote, linkDeferredFootnote: + linkNode = NewNode(Link) + linkNode.Destination = link + linkNode.Title = title + linkNode.NoteID = noteID + linkNode.Footnote = footnoteNode + if t == linkInlineFootnote { + i++ + } + + default: + return 0, nil + } + + return i, linkNode +} + +func (p *Markdown) inlineHTMLComment(data []byte) int { + if len(data) < 5 { + return 0 + } + if data[0] != '<' || data[1] != '!' || data[2] != '-' || data[3] != '-' { + return 0 + } + i := 5 + // scan for an end-of-comment marker, across lines if necessary + for i < len(data) && !(data[i-2] == '-' && data[i-1] == '-' && data[i] == '>') { + i++ + } + // no end-of-comment marker + if i >= len(data) { + return 0 + } + return i + 1 +} + +func stripMailto(link []byte) []byte { + if bytes.HasPrefix(link, []byte("mailto://")) { + return link[9:] + } else if bytes.HasPrefix(link, []byte("mailto:")) { + return link[7:] + } else { + return link + } +} + +// autolinkType specifies a kind of autolink that gets detected. +type autolinkType int + +// These are the possible flag values for the autolink renderer. +const ( + notAutolink autolinkType = iota + normalAutolink + emailAutolink +) + +// '<' when tags or autolinks are allowed +func leftAngle(p *Markdown, data []byte, offset int) (int, *Node) { + data = data[offset:] + altype, end := tagLength(data) + if size := p.inlineHTMLComment(data); size > 0 { + end = size + } + if end > 2 { + if altype != notAutolink { + var uLink bytes.Buffer + unescapeText(&uLink, data[1:end+1-2]) + if uLink.Len() > 0 { + link := uLink.Bytes() + node := NewNode(Link) + node.Destination = link + if altype == emailAutolink { + node.Destination = append([]byte("mailto:"), link...) + } + node.AppendChild(text(stripMailto(link))) + return end, node + } + } else { + htmlTag := NewNode(HTMLSpan) + htmlTag.Literal = data[:end] + return end, htmlTag + } + } + + return end, nil +} + +// '\\' backslash escape +var escapeChars = []byte("\\`*_{}[]()#+-.!:|&<>~") + +func escape(p *Markdown, data []byte, offset int) (int, *Node) { + data = data[offset:] + + if len(data) > 1 { + if p.extensions&BackslashLineBreak != 0 && data[1] == '\n' { + return 2, NewNode(Hardbreak) + } + if bytes.IndexByte(escapeChars, data[1]) < 0 { + return 0, nil + } + + return 2, text(data[1:2]) + } + + return 2, nil +} + +func unescapeText(ob *bytes.Buffer, src []byte) { + i := 0 + for i < len(src) { + org := i + for i < len(src) && src[i] != '\\' { + i++ + } + + if i > org { + ob.Write(src[org:i]) + } + + if i+1 >= len(src) { + break + } + + ob.WriteByte(src[i+1]) + i += 2 + } +} + +// '&' escaped when it doesn't belong to an entity +// valid entities are assumed to be anything matching &#?[A-Za-z0-9]+; +func entity(p *Markdown, data []byte, offset int) (int, *Node) { + data = data[offset:] + + end := 1 + + if end < len(data) && data[end] == '#' { + end++ + } + + for end < len(data) && isalnum(data[end]) { + end++ + } + + if end < len(data) && data[end] == ';' { + end++ // real entity + } else { + return 0, nil // lone '&' + } + + ent := data[:end] + // undo & escaping or it will be converted to &amp; by another + // escaper in the renderer + if bytes.Equal(ent, []byte("&")) { + ent = []byte{'&'} + } + + return end, text(ent) +} + +func linkEndsWithEntity(data []byte, linkEnd int) bool { + entityRanges := htmlEntityRe.FindAllIndex(data[:linkEnd], -1) + return entityRanges != nil && entityRanges[len(entityRanges)-1][1] == linkEnd +} + +// hasPrefixCaseInsensitive is a custom implementation of +// strings.HasPrefix(strings.ToLower(s), prefix) +// we rolled our own because ToLower pulls in a huge machinery of lowercasing +// anything from Unicode and that's very slow. Since this func will only be +// used on ASCII protocol prefixes, we can take shortcuts. +func hasPrefixCaseInsensitive(s, prefix []byte) bool { + if len(s) < len(prefix) { + return false + } + delta := byte('a' - 'A') + for i, b := range prefix { + if b != s[i] && b != s[i]+delta { + return false + } + } + return true +} + +var protocolPrefixes = [][]byte{ + []byte("http://"), + []byte("https://"), + []byte("ftp://"), + []byte("file://"), + []byte("mailto:"), +} + +const shortestPrefix = 6 // len("ftp://"), the shortest of the above + +func maybeAutoLink(p *Markdown, data []byte, offset int) (int, *Node) { + // quick check to rule out most false hits + if p.insideLink || len(data) < offset+shortestPrefix { + return 0, nil + } + for _, prefix := range protocolPrefixes { + endOfHead := offset + 8 // 8 is the len() of the longest prefix + if endOfHead > len(data) { + endOfHead = len(data) + } + if hasPrefixCaseInsensitive(data[offset:endOfHead], prefix) { + return autoLink(p, data, offset) + } + } + return 0, nil +} + +func autoLink(p *Markdown, data []byte, offset int) (int, *Node) { + // Now a more expensive check to see if we're not inside an anchor element + anchorStart := offset + offsetFromAnchor := 0 + for anchorStart > 0 && data[anchorStart] != '<' { + anchorStart-- + offsetFromAnchor++ + } + + anchorStr := anchorRe.Find(data[anchorStart:]) + if anchorStr != nil { + anchorClose := NewNode(HTMLSpan) + anchorClose.Literal = anchorStr[offsetFromAnchor:] + return len(anchorStr) - offsetFromAnchor, anchorClose + } + + // scan backward for a word boundary + rewind := 0 + for offset-rewind > 0 && rewind <= 7 && isletter(data[offset-rewind-1]) { + rewind++ + } + if rewind > 6 { // longest supported protocol is "mailto" which has 6 letters + return 0, nil + } + + origData := data + data = data[offset-rewind:] + + if !isSafeLink(data) { + return 0, nil + } + + linkEnd := 0 + for linkEnd < len(data) && !isEndOfLink(data[linkEnd]) { + linkEnd++ + } + + // Skip punctuation at the end of the link + if (data[linkEnd-1] == '.' || data[linkEnd-1] == ',') && data[linkEnd-2] != '\\' { + linkEnd-- + } + + // But don't skip semicolon if it's a part of escaped entity: + if data[linkEnd-1] == ';' && data[linkEnd-2] != '\\' && !linkEndsWithEntity(data, linkEnd) { + linkEnd-- + } + + // See if the link finishes with a punctuation sign that can be closed. + var copen byte + switch data[linkEnd-1] { + case '"': + copen = '"' + case '\'': + copen = '\'' + case ')': + copen = '(' + case ']': + copen = '[' + case '}': + copen = '{' + default: + copen = 0 + } + + if copen != 0 { + bufEnd := offset - rewind + linkEnd - 2 + + openDelim := 1 + + /* Try to close the final punctuation sign in this same line; + * if we managed to close it outside of the URL, that means that it's + * not part of the URL. If it closes inside the URL, that means it + * is part of the URL. + * + * Examples: + * + * foo http://www.pokemon.com/Pikachu_(Electric) bar + * => http://www.pokemon.com/Pikachu_(Electric) + * + * foo (http://www.pokemon.com/Pikachu_(Electric)) bar + * => http://www.pokemon.com/Pikachu_(Electric) + * + * foo http://www.pokemon.com/Pikachu_(Electric)) bar + * => http://www.pokemon.com/Pikachu_(Electric)) + * + * (foo http://www.pokemon.com/Pikachu_(Electric)) bar + * => foo http://www.pokemon.com/Pikachu_(Electric) + */ + + for bufEnd >= 0 && origData[bufEnd] != '\n' && openDelim != 0 { + if origData[bufEnd] == data[linkEnd-1] { + openDelim++ + } + + if origData[bufEnd] == copen { + openDelim-- + } + + bufEnd-- + } + + if openDelim == 0 { + linkEnd-- + } + } + + var uLink bytes.Buffer + unescapeText(&uLink, data[:linkEnd]) + + if uLink.Len() > 0 { + node := NewNode(Link) + node.Destination = uLink.Bytes() + node.AppendChild(text(uLink.Bytes())) + return linkEnd, node + } + + return linkEnd, nil +} + +func isEndOfLink(char byte) bool { + return isspace(char) || char == '<' +} + +var validUris = [][]byte{[]byte("http://"), []byte("https://"), []byte("ftp://"), []byte("mailto://")} +var validPaths = [][]byte{[]byte("/"), []byte("./"), []byte("../")} + +func isSafeLink(link []byte) bool { + for _, path := range validPaths { + if len(link) >= len(path) && bytes.Equal(link[:len(path)], path) { + if len(link) == len(path) { + return true + } else if isalnum(link[len(path)]) { + return true + } + } + } + + for _, prefix := range validUris { + // TODO: handle unicode here + // case-insensitive prefix test + if len(link) > len(prefix) && bytes.Equal(bytes.ToLower(link[:len(prefix)]), prefix) && isalnum(link[len(prefix)]) { + return true + } + } + + return false +} + +// return the length of the given tag, or 0 is it's not valid +func tagLength(data []byte) (autolink autolinkType, end int) { + var i, j int + + // a valid tag can't be shorter than 3 chars + if len(data) < 3 { + return notAutolink, 0 + } + + // begins with a '<' optionally followed by '/', followed by letter or number + if data[0] != '<' { + return notAutolink, 0 + } + if data[1] == '/' { + i = 2 + } else { + i = 1 + } + + if !isalnum(data[i]) { + return notAutolink, 0 + } + + // scheme test + autolink = notAutolink + + // try to find the beginning of an URI + for i < len(data) && (isalnum(data[i]) || data[i] == '.' || data[i] == '+' || data[i] == '-') { + i++ + } + + if i > 1 && i < len(data) && data[i] == '@' { + if j = isMailtoAutoLink(data[i:]); j != 0 { + return emailAutolink, i + j + } + } + + if i > 2 && i < len(data) && data[i] == ':' { + autolink = normalAutolink + i++ + } + + // complete autolink test: no whitespace or ' or " + switch { + case i >= len(data): + autolink = notAutolink + case autolink != notAutolink: + j = i + + for i < len(data) { + if data[i] == '\\' { + i += 2 + } else if data[i] == '>' || data[i] == '\'' || data[i] == '"' || isspace(data[i]) { + break + } else { + i++ + } + + } + + if i >= len(data) { + return autolink, 0 + } + if i > j && data[i] == '>' { + return autolink, i + 1 + } + + // one of the forbidden chars has been found + autolink = notAutolink + } + i += bytes.IndexByte(data[i:], '>') + if i < 0 { + return autolink, 0 + } + return autolink, i + 1 +} + +// look for the address part of a mail autolink and '>' +// this is less strict than the original markdown e-mail address matching +func isMailtoAutoLink(data []byte) int { + nb := 0 + + // address is assumed to be: [-@._a-zA-Z0-9]+ with exactly one '@' + for i := 0; i < len(data); i++ { + if isalnum(data[i]) { + continue + } + + switch data[i] { + case '@': + nb++ + + case '-', '.', '_': + break + + case '>': + if nb == 1 { + return i + 1 + } + return 0 + default: + return 0 + } + } + + return 0 +} + +// look for the next emph char, skipping other constructs +func helperFindEmphChar(data []byte, c byte) int { + i := 0 + + for i < len(data) { + for i < len(data) && data[i] != c && data[i] != '`' && data[i] != '[' { + i++ + } + if i >= len(data) { + return 0 + } + // do not count escaped chars + if i != 0 && data[i-1] == '\\' { + i++ + continue + } + if data[i] == c { + return i + } + + if data[i] == '`' { + // skip a code span + tmpI := 0 + i++ + for i < len(data) && data[i] != '`' { + if tmpI == 0 && data[i] == c { + tmpI = i + } + i++ + } + if i >= len(data) { + return tmpI + } + i++ + } else if data[i] == '[' { + // skip a link + tmpI := 0 + i++ + for i < len(data) && data[i] != ']' { + if tmpI == 0 && data[i] == c { + tmpI = i + } + i++ + } + i++ + for i < len(data) && (data[i] == ' ' || data[i] == '\n') { + i++ + } + if i >= len(data) { + return tmpI + } + if data[i] != '[' && data[i] != '(' { // not a link + if tmpI > 0 { + return tmpI + } + continue + } + cc := data[i] + i++ + for i < len(data) && data[i] != cc { + if tmpI == 0 && data[i] == c { + return i + } + i++ + } + if i >= len(data) { + return tmpI + } + i++ + } + } + return 0 +} + +func helperEmphasis(p *Markdown, data []byte, c byte) (int, *Node) { + i := 0 + + // skip one symbol if coming from emph3 + if len(data) > 1 && data[0] == c && data[1] == c { + i = 1 + } + + for i < len(data) { + length := helperFindEmphChar(data[i:], c) + if length == 0 { + return 0, nil + } + i += length + if i >= len(data) { + return 0, nil + } + + if i+1 < len(data) && data[i+1] == c { + i++ + continue + } + + if data[i] == c && !isspace(data[i-1]) { + + if p.extensions&NoIntraEmphasis != 0 { + if !(i+1 == len(data) || isspace(data[i+1]) || ispunct(data[i+1])) { + continue + } + } + + emph := NewNode(Emph) + p.inline(emph, data[:i]) + return i + 1, emph + } + } + + return 0, nil +} + +func helperDoubleEmphasis(p *Markdown, data []byte, c byte) (int, *Node) { + i := 0 + + for i < len(data) { + length := helperFindEmphChar(data[i:], c) + if length == 0 { + return 0, nil + } + i += length + + if i+1 < len(data) && data[i] == c && data[i+1] == c && i > 0 && !isspace(data[i-1]) { + nodeType := Strong + if c == '~' { + nodeType = Del + } + node := NewNode(nodeType) + p.inline(node, data[:i]) + return i + 2, node + } + i++ + } + return 0, nil +} + +func helperTripleEmphasis(p *Markdown, data []byte, offset int, c byte) (int, *Node) { + i := 0 + origData := data + data = data[offset:] + + for i < len(data) { + length := helperFindEmphChar(data[i:], c) + if length == 0 { + return 0, nil + } + i += length + + // skip whitespace preceded symbols + if data[i] != c || isspace(data[i-1]) { + continue + } + + switch { + case i+2 < len(data) && data[i+1] == c && data[i+2] == c: + // triple symbol found + strong := NewNode(Strong) + em := NewNode(Emph) + strong.AppendChild(em) + p.inline(em, data[:i]) + return i + 3, strong + case (i+1 < len(data) && data[i+1] == c): + // double symbol found, hand over to emph1 + length, node := helperEmphasis(p, origData[offset-2:], c) + if length == 0 { + return 0, nil + } + return length - 2, node + default: + // single symbol found, hand over to emph2 + length, node := helperDoubleEmphasis(p, origData[offset-1:], c) + if length == 0 { + return 0, nil + } + return length - 1, node + } + } + return 0, nil +} + +func text(s []byte) *Node { + node := NewNode(Text) + node.Literal = s + return node +} + +func normalizeURI(s []byte) []byte { + return s // TODO: implement +} diff --git a/vendor/github.com/russross/blackfriday/v2/markdown.go b/vendor/github.com/russross/blackfriday/v2/markdown.go new file mode 100644 index 000000000..58d2e4538 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/markdown.go @@ -0,0 +1,950 @@ +// Blackfriday Markdown Processor +// Available at http://github.com/russross/blackfriday +// +// Copyright © 2011 Russ Ross . +// Distributed under the Simplified BSD License. +// See README.md for details. + +package blackfriday + +import ( + "bytes" + "fmt" + "io" + "strings" + "unicode/utf8" +) + +// +// Markdown parsing and processing +// + +// Version string of the package. Appears in the rendered document when +// CompletePage flag is on. +const Version = "2.0" + +// Extensions is a bitwise or'ed collection of enabled Blackfriday's +// extensions. +type Extensions int + +// These are the supported markdown parsing extensions. +// OR these values together to select multiple extensions. +const ( + NoExtensions Extensions = 0 + NoIntraEmphasis Extensions = 1 << iota // Ignore emphasis markers inside words + Tables // Render tables + FencedCode // Render fenced code blocks + Autolink // Detect embedded URLs that are not explicitly marked + Strikethrough // Strikethrough text using ~~test~~ + LaxHTMLBlocks // Loosen up HTML block parsing rules + SpaceHeadings // Be strict about prefix heading rules + HardLineBreak // Translate newlines into line breaks + TabSizeEight // Expand tabs to eight spaces instead of four + Footnotes // Pandoc-style footnotes + NoEmptyLineBeforeBlock // No need to insert an empty line to start a (code, quote, ordered list, unordered list) block + HeadingIDs // specify heading IDs with {#id} + Titleblock // Titleblock ala pandoc + AutoHeadingIDs // Create the heading ID from the text + BackslashLineBreak // Translate trailing backslashes into line breaks + DefinitionLists // Render definition lists + + CommonHTMLFlags HTMLFlags = UseXHTML | Smartypants | + SmartypantsFractions | SmartypantsDashes | SmartypantsLatexDashes + + CommonExtensions Extensions = NoIntraEmphasis | Tables | FencedCode | + Autolink | Strikethrough | SpaceHeadings | HeadingIDs | + BackslashLineBreak | DefinitionLists +) + +// ListType contains bitwise or'ed flags for list and list item objects. +type ListType int + +// These are the possible flag values for the ListItem renderer. +// Multiple flag values may be ORed together. +// These are mostly of interest if you are writing a new output format. +const ( + ListTypeOrdered ListType = 1 << iota + ListTypeDefinition + ListTypeTerm + + ListItemContainsBlock + ListItemBeginningOfList // TODO: figure out if this is of any use now + ListItemEndOfList +) + +// CellAlignFlags holds a type of alignment in a table cell. +type CellAlignFlags int + +// These are the possible flag values for the table cell renderer. +// Only a single one of these values will be used; they are not ORed together. +// These are mostly of interest if you are writing a new output format. +const ( + TableAlignmentLeft CellAlignFlags = 1 << iota + TableAlignmentRight + TableAlignmentCenter = (TableAlignmentLeft | TableAlignmentRight) +) + +// The size of a tab stop. +const ( + TabSizeDefault = 4 + TabSizeDouble = 8 +) + +// blockTags is a set of tags that are recognized as HTML block tags. +// Any of these can be included in markdown text without special escaping. +var blockTags = map[string]struct{}{ + "blockquote": {}, + "del": {}, + "div": {}, + "dl": {}, + "fieldset": {}, + "form": {}, + "h1": {}, + "h2": {}, + "h3": {}, + "h4": {}, + "h5": {}, + "h6": {}, + "iframe": {}, + "ins": {}, + "math": {}, + "noscript": {}, + "ol": {}, + "pre": {}, + "p": {}, + "script": {}, + "style": {}, + "table": {}, + "ul": {}, + + // HTML5 + "address": {}, + "article": {}, + "aside": {}, + "canvas": {}, + "figcaption": {}, + "figure": {}, + "footer": {}, + "header": {}, + "hgroup": {}, + "main": {}, + "nav": {}, + "output": {}, + "progress": {}, + "section": {}, + "video": {}, +} + +// Renderer is the rendering interface. This is mostly of interest if you are +// implementing a new rendering format. +// +// Only an HTML implementation is provided in this repository, see the README +// for external implementations. +type Renderer interface { + // RenderNode is the main rendering method. It will be called once for + // every leaf node and twice for every non-leaf node (first with + // entering=true, then with entering=false). The method should write its + // rendition of the node to the supplied writer w. + RenderNode(w io.Writer, node *Node, entering bool) WalkStatus + + // RenderHeader is a method that allows the renderer to produce some + // content preceding the main body of the output document. The header is + // understood in the broad sense here. For example, the default HTML + // renderer will write not only the HTML document preamble, but also the + // table of contents if it was requested. + // + // The method will be passed an entire document tree, in case a particular + // implementation needs to inspect it to produce output. + // + // The output should be written to the supplied writer w. If your + // implementation has no header to write, supply an empty implementation. + RenderHeader(w io.Writer, ast *Node) + + // RenderFooter is a symmetric counterpart of RenderHeader. + RenderFooter(w io.Writer, ast *Node) +} + +// Callback functions for inline parsing. One such function is defined +// for each character that triggers a response when parsing inline data. +type inlineParser func(p *Markdown, data []byte, offset int) (int, *Node) + +// Markdown is a type that holds extensions and the runtime state used by +// Parse, and the renderer. You can not use it directly, construct it with New. +type Markdown struct { + renderer Renderer + referenceOverride ReferenceOverrideFunc + refs map[string]*reference + inlineCallback [256]inlineParser + extensions Extensions + nesting int + maxNesting int + insideLink bool + + // Footnotes need to be ordered as well as available to quickly check for + // presence. If a ref is also a footnote, it's stored both in refs and here + // in notes. Slice is nil if footnotes not enabled. + notes []*reference + + doc *Node + tip *Node // = doc + oldTip *Node + lastMatchedContainer *Node // = doc + allClosed bool +} + +func (p *Markdown) getRef(refid string) (ref *reference, found bool) { + if p.referenceOverride != nil { + r, overridden := p.referenceOverride(refid) + if overridden { + if r == nil { + return nil, false + } + return &reference{ + link: []byte(r.Link), + title: []byte(r.Title), + noteID: 0, + hasBlock: false, + text: []byte(r.Text)}, true + } + } + // refs are case insensitive + ref, found = p.refs[strings.ToLower(refid)] + return ref, found +} + +func (p *Markdown) finalize(block *Node) { + above := block.Parent + block.open = false + p.tip = above +} + +func (p *Markdown) addChild(node NodeType, offset uint32) *Node { + return p.addExistingChild(NewNode(node), offset) +} + +func (p *Markdown) addExistingChild(node *Node, offset uint32) *Node { + for !p.tip.canContain(node.Type) { + p.finalize(p.tip) + } + p.tip.AppendChild(node) + p.tip = node + return node +} + +func (p *Markdown) closeUnmatchedBlocks() { + if !p.allClosed { + for p.oldTip != p.lastMatchedContainer { + parent := p.oldTip.Parent + p.finalize(p.oldTip) + p.oldTip = parent + } + p.allClosed = true + } +} + +// +// +// Public interface +// +// + +// Reference represents the details of a link. +// See the documentation in Options for more details on use-case. +type Reference struct { + // Link is usually the URL the reference points to. + Link string + // Title is the alternate text describing the link in more detail. + Title string + // Text is the optional text to override the ref with if the syntax used was + // [refid][] + Text string +} + +// ReferenceOverrideFunc is expected to be called with a reference string and +// return either a valid Reference type that the reference string maps to or +// nil. If overridden is false, the default reference logic will be executed. +// See the documentation in Options for more details on use-case. +type ReferenceOverrideFunc func(reference string) (ref *Reference, overridden bool) + +// New constructs a Markdown processor. You can use the same With* functions as +// for Run() to customize parser's behavior and the renderer. +func New(opts ...Option) *Markdown { + var p Markdown + for _, opt := range opts { + opt(&p) + } + p.refs = make(map[string]*reference) + p.maxNesting = 16 + p.insideLink = false + docNode := NewNode(Document) + p.doc = docNode + p.tip = docNode + p.oldTip = docNode + p.lastMatchedContainer = docNode + p.allClosed = true + // register inline parsers + p.inlineCallback[' '] = maybeLineBreak + p.inlineCallback['*'] = emphasis + p.inlineCallback['_'] = emphasis + if p.extensions&Strikethrough != 0 { + p.inlineCallback['~'] = emphasis + } + p.inlineCallback['`'] = codeSpan + p.inlineCallback['\n'] = lineBreak + p.inlineCallback['['] = link + p.inlineCallback['<'] = leftAngle + p.inlineCallback['\\'] = escape + p.inlineCallback['&'] = entity + p.inlineCallback['!'] = maybeImage + p.inlineCallback['^'] = maybeInlineFootnote + if p.extensions&Autolink != 0 { + p.inlineCallback['h'] = maybeAutoLink + p.inlineCallback['m'] = maybeAutoLink + p.inlineCallback['f'] = maybeAutoLink + p.inlineCallback['H'] = maybeAutoLink + p.inlineCallback['M'] = maybeAutoLink + p.inlineCallback['F'] = maybeAutoLink + } + if p.extensions&Footnotes != 0 { + p.notes = make([]*reference, 0) + } + return &p +} + +// Option customizes the Markdown processor's default behavior. +type Option func(*Markdown) + +// WithRenderer allows you to override the default renderer. +func WithRenderer(r Renderer) Option { + return func(p *Markdown) { + p.renderer = r + } +} + +// WithExtensions allows you to pick some of the many extensions provided by +// Blackfriday. You can bitwise OR them. +func WithExtensions(e Extensions) Option { + return func(p *Markdown) { + p.extensions = e + } +} + +// WithNoExtensions turns off all extensions and custom behavior. +func WithNoExtensions() Option { + return func(p *Markdown) { + p.extensions = NoExtensions + p.renderer = NewHTMLRenderer(HTMLRendererParameters{ + Flags: HTMLFlagsNone, + }) + } +} + +// WithRefOverride sets an optional function callback that is called every +// time a reference is resolved. +// +// In Markdown, the link reference syntax can be made to resolve a link to +// a reference instead of an inline URL, in one of the following ways: +// +// * [link text][refid] +// * [refid][] +// +// Usually, the refid is defined at the bottom of the Markdown document. If +// this override function is provided, the refid is passed to the override +// function first, before consulting the defined refids at the bottom. If +// the override function indicates an override did not occur, the refids at +// the bottom will be used to fill in the link details. +func WithRefOverride(o ReferenceOverrideFunc) Option { + return func(p *Markdown) { + p.referenceOverride = o + } +} + +// Run is the main entry point to Blackfriday. It parses and renders a +// block of markdown-encoded text. +// +// The simplest invocation of Run takes one argument, input: +// output := Run(input) +// This will parse the input with CommonExtensions enabled and render it with +// the default HTMLRenderer (with CommonHTMLFlags). +// +// Variadic arguments opts can customize the default behavior. Since Markdown +// type does not contain exported fields, you can not use it directly. Instead, +// use the With* functions. For example, this will call the most basic +// functionality, with no extensions: +// output := Run(input, WithNoExtensions()) +// +// You can use any number of With* arguments, even contradicting ones. They +// will be applied in order of appearance and the latter will override the +// former: +// output := Run(input, WithNoExtensions(), WithExtensions(exts), +// WithRenderer(yourRenderer)) +func Run(input []byte, opts ...Option) []byte { + r := NewHTMLRenderer(HTMLRendererParameters{ + Flags: CommonHTMLFlags, + }) + optList := []Option{WithRenderer(r), WithExtensions(CommonExtensions)} + optList = append(optList, opts...) + parser := New(optList...) + ast := parser.Parse(input) + var buf bytes.Buffer + parser.renderer.RenderHeader(&buf, ast) + ast.Walk(func(node *Node, entering bool) WalkStatus { + return parser.renderer.RenderNode(&buf, node, entering) + }) + parser.renderer.RenderFooter(&buf, ast) + return buf.Bytes() +} + +// Parse is an entry point to the parsing part of Blackfriday. It takes an +// input markdown document and produces a syntax tree for its contents. This +// tree can then be rendered with a default or custom renderer, or +// analyzed/transformed by the caller to whatever non-standard needs they have. +// The return value is the root node of the syntax tree. +func (p *Markdown) Parse(input []byte) *Node { + p.block(input) + // Walk the tree and finish up some of unfinished blocks + for p.tip != nil { + p.finalize(p.tip) + } + // Walk the tree again and process inline markdown in each block + p.doc.Walk(func(node *Node, entering bool) WalkStatus { + if node.Type == Paragraph || node.Type == Heading || node.Type == TableCell { + p.inline(node, node.content) + node.content = nil + } + return GoToNext + }) + p.parseRefsToAST() + return p.doc +} + +func (p *Markdown) parseRefsToAST() { + if p.extensions&Footnotes == 0 || len(p.notes) == 0 { + return + } + p.tip = p.doc + block := p.addBlock(List, nil) + block.IsFootnotesList = true + block.ListFlags = ListTypeOrdered + flags := ListItemBeginningOfList + // Note: this loop is intentionally explicit, not range-form. This is + // because the body of the loop will append nested footnotes to p.notes and + // we need to process those late additions. Range form would only walk over + // the fixed initial set. + for i := 0; i < len(p.notes); i++ { + ref := p.notes[i] + p.addExistingChild(ref.footnote, 0) + block := ref.footnote + block.ListFlags = flags | ListTypeOrdered + block.RefLink = ref.link + if ref.hasBlock { + flags |= ListItemContainsBlock + p.block(ref.title) + } else { + p.inline(block, ref.title) + } + flags &^= ListItemBeginningOfList | ListItemContainsBlock + } + above := block.Parent + finalizeList(block) + p.tip = above + block.Walk(func(node *Node, entering bool) WalkStatus { + if node.Type == Paragraph || node.Type == Heading { + p.inline(node, node.content) + node.content = nil + } + return GoToNext + }) +} + +// +// Link references +// +// This section implements support for references that (usually) appear +// as footnotes in a document, and can be referenced anywhere in the document. +// The basic format is: +// +// [1]: http://www.google.com/ "Google" +// [2]: http://www.github.com/ "Github" +// +// Anywhere in the document, the reference can be linked by referring to its +// label, i.e., 1 and 2 in this example, as in: +// +// This library is hosted on [Github][2], a git hosting site. +// +// Actual footnotes as specified in Pandoc and supported by some other Markdown +// libraries such as php-markdown are also taken care of. They look like this: +// +// This sentence needs a bit of further explanation.[^note] +// +// [^note]: This is the explanation. +// +// Footnotes should be placed at the end of the document in an ordered list. +// Finally, there are inline footnotes such as: +// +// Inline footnotes^[Also supported.] provide a quick inline explanation, +// but are rendered at the bottom of the document. +// + +// reference holds all information necessary for a reference-style links or +// footnotes. +// +// Consider this markdown with reference-style links: +// +// [link][ref] +// +// [ref]: /url/ "tooltip title" +// +// It will be ultimately converted to this HTML: +// +//

    link

    +// +// And a reference structure will be populated as follows: +// +// p.refs["ref"] = &reference{ +// link: "/url/", +// title: "tooltip title", +// } +// +// Alternatively, reference can contain information about a footnote. Consider +// this markdown: +// +// Text needing a footnote.[^a] +// +// [^a]: This is the note +// +// A reference structure will be populated as follows: +// +// p.refs["a"] = &reference{ +// link: "a", +// title: "This is the note", +// noteID: , +// } +// +// TODO: As you can see, it begs for splitting into two dedicated structures +// for refs and for footnotes. +type reference struct { + link []byte + title []byte + noteID int // 0 if not a footnote ref + hasBlock bool + footnote *Node // a link to the Item node within a list of footnotes + + text []byte // only gets populated by refOverride feature with Reference.Text +} + +func (r *reference) String() string { + return fmt.Sprintf("{link: %q, title: %q, text: %q, noteID: %d, hasBlock: %v}", + r.link, r.title, r.text, r.noteID, r.hasBlock) +} + +// Check whether or not data starts with a reference link. +// If so, it is parsed and stored in the list of references +// (in the render struct). +// Returns the number of bytes to skip to move past it, +// or zero if the first line is not a reference. +func isReference(p *Markdown, data []byte, tabSize int) int { + // up to 3 optional leading spaces + if len(data) < 4 { + return 0 + } + i := 0 + for i < 3 && data[i] == ' ' { + i++ + } + + noteID := 0 + + // id part: anything but a newline between brackets + if data[i] != '[' { + return 0 + } + i++ + if p.extensions&Footnotes != 0 { + if i < len(data) && data[i] == '^' { + // we can set it to anything here because the proper noteIds will + // be assigned later during the second pass. It just has to be != 0 + noteID = 1 + i++ + } + } + idOffset := i + for i < len(data) && data[i] != '\n' && data[i] != '\r' && data[i] != ']' { + i++ + } + if i >= len(data) || data[i] != ']' { + return 0 + } + idEnd := i + // footnotes can have empty ID, like this: [^], but a reference can not be + // empty like this: []. Break early if it's not a footnote and there's no ID + if noteID == 0 && idOffset == idEnd { + return 0 + } + // spacer: colon (space | tab)* newline? (space | tab)* + i++ + if i >= len(data) || data[i] != ':' { + return 0 + } + i++ + for i < len(data) && (data[i] == ' ' || data[i] == '\t') { + i++ + } + if i < len(data) && (data[i] == '\n' || data[i] == '\r') { + i++ + if i < len(data) && data[i] == '\n' && data[i-1] == '\r' { + i++ + } + } + for i < len(data) && (data[i] == ' ' || data[i] == '\t') { + i++ + } + if i >= len(data) { + return 0 + } + + var ( + linkOffset, linkEnd int + titleOffset, titleEnd int + lineEnd int + raw []byte + hasBlock bool + ) + + if p.extensions&Footnotes != 0 && noteID != 0 { + linkOffset, linkEnd, raw, hasBlock = scanFootnote(p, data, i, tabSize) + lineEnd = linkEnd + } else { + linkOffset, linkEnd, titleOffset, titleEnd, lineEnd = scanLinkRef(p, data, i) + } + if lineEnd == 0 { + return 0 + } + + // a valid ref has been found + + ref := &reference{ + noteID: noteID, + hasBlock: hasBlock, + } + + if noteID > 0 { + // reusing the link field for the id since footnotes don't have links + ref.link = data[idOffset:idEnd] + // if footnote, it's not really a title, it's the contained text + ref.title = raw + } else { + ref.link = data[linkOffset:linkEnd] + ref.title = data[titleOffset:titleEnd] + } + + // id matches are case-insensitive + id := string(bytes.ToLower(data[idOffset:idEnd])) + + p.refs[id] = ref + + return lineEnd +} + +func scanLinkRef(p *Markdown, data []byte, i int) (linkOffset, linkEnd, titleOffset, titleEnd, lineEnd int) { + // link: whitespace-free sequence, optionally between angle brackets + if data[i] == '<' { + i++ + } + linkOffset = i + for i < len(data) && data[i] != ' ' && data[i] != '\t' && data[i] != '\n' && data[i] != '\r' { + i++ + } + linkEnd = i + if data[linkOffset] == '<' && data[linkEnd-1] == '>' { + linkOffset++ + linkEnd-- + } + + // optional spacer: (space | tab)* (newline | '\'' | '"' | '(' ) + for i < len(data) && (data[i] == ' ' || data[i] == '\t') { + i++ + } + if i < len(data) && data[i] != '\n' && data[i] != '\r' && data[i] != '\'' && data[i] != '"' && data[i] != '(' { + return + } + + // compute end-of-line + if i >= len(data) || data[i] == '\r' || data[i] == '\n' { + lineEnd = i + } + if i+1 < len(data) && data[i] == '\r' && data[i+1] == '\n' { + lineEnd++ + } + + // optional (space|tab)* spacer after a newline + if lineEnd > 0 { + i = lineEnd + 1 + for i < len(data) && (data[i] == ' ' || data[i] == '\t') { + i++ + } + } + + // optional title: any non-newline sequence enclosed in '"() alone on its line + if i+1 < len(data) && (data[i] == '\'' || data[i] == '"' || data[i] == '(') { + i++ + titleOffset = i + + // look for EOL + for i < len(data) && data[i] != '\n' && data[i] != '\r' { + i++ + } + if i+1 < len(data) && data[i] == '\n' && data[i+1] == '\r' { + titleEnd = i + 1 + } else { + titleEnd = i + } + + // step back + i-- + for i > titleOffset && (data[i] == ' ' || data[i] == '\t') { + i-- + } + if i > titleOffset && (data[i] == '\'' || data[i] == '"' || data[i] == ')') { + lineEnd = titleEnd + titleEnd = i + } + } + + return +} + +// The first bit of this logic is the same as Parser.listItem, but the rest +// is much simpler. This function simply finds the entire block and shifts it +// over by one tab if it is indeed a block (just returns the line if it's not). +// blockEnd is the end of the section in the input buffer, and contents is the +// extracted text that was shifted over one tab. It will need to be rendered at +// the end of the document. +func scanFootnote(p *Markdown, data []byte, i, indentSize int) (blockStart, blockEnd int, contents []byte, hasBlock bool) { + if i == 0 || len(data) == 0 { + return + } + + // skip leading whitespace on first line + for i < len(data) && data[i] == ' ' { + i++ + } + + blockStart = i + + // find the end of the line + blockEnd = i + for i < len(data) && data[i-1] != '\n' { + i++ + } + + // get working buffer + var raw bytes.Buffer + + // put the first line into the working buffer + raw.Write(data[blockEnd:i]) + blockEnd = i + + // process the following lines + containsBlankLine := false + +gatherLines: + for blockEnd < len(data) { + i++ + + // find the end of this line + for i < len(data) && data[i-1] != '\n' { + i++ + } + + // if it is an empty line, guess that it is part of this item + // and move on to the next line + if p.isEmpty(data[blockEnd:i]) > 0 { + containsBlankLine = true + blockEnd = i + continue + } + + n := 0 + if n = isIndented(data[blockEnd:i], indentSize); n == 0 { + // this is the end of the block. + // we don't want to include this last line in the index. + break gatherLines + } + + // if there were blank lines before this one, insert a new one now + if containsBlankLine { + raw.WriteByte('\n') + containsBlankLine = false + } + + // get rid of that first tab, write to buffer + raw.Write(data[blockEnd+n : i]) + hasBlock = true + + blockEnd = i + } + + if data[blockEnd-1] != '\n' { + raw.WriteByte('\n') + } + + contents = raw.Bytes() + + return +} + +// +// +// Miscellaneous helper functions +// +// + +// Test if a character is a punctuation symbol. +// Taken from a private function in regexp in the stdlib. +func ispunct(c byte) bool { + for _, r := range []byte("!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~") { + if c == r { + return true + } + } + return false +} + +// Test if a character is a whitespace character. +func isspace(c byte) bool { + return ishorizontalspace(c) || isverticalspace(c) +} + +// Test if a character is a horizontal whitespace character. +func ishorizontalspace(c byte) bool { + return c == ' ' || c == '\t' +} + +// Test if a character is a vertical character. +func isverticalspace(c byte) bool { + return c == '\n' || c == '\r' || c == '\f' || c == '\v' +} + +// Test if a character is letter. +func isletter(c byte) bool { + return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') +} + +// Test if a character is a letter or a digit. +// TODO: check when this is looking for ASCII alnum and when it should use unicode +func isalnum(c byte) bool { + return (c >= '0' && c <= '9') || isletter(c) +} + +// Replace tab characters with spaces, aligning to the next TAB_SIZE column. +// always ends output with a newline +func expandTabs(out *bytes.Buffer, line []byte, tabSize int) { + // first, check for common cases: no tabs, or only tabs at beginning of line + i, prefix := 0, 0 + slowcase := false + for i = 0; i < len(line); i++ { + if line[i] == '\t' { + if prefix == i { + prefix++ + } else { + slowcase = true + break + } + } + } + + // no need to decode runes if all tabs are at the beginning of the line + if !slowcase { + for i = 0; i < prefix*tabSize; i++ { + out.WriteByte(' ') + } + out.Write(line[prefix:]) + return + } + + // the slow case: we need to count runes to figure out how + // many spaces to insert for each tab + column := 0 + i = 0 + for i < len(line) { + start := i + for i < len(line) && line[i] != '\t' { + _, size := utf8.DecodeRune(line[i:]) + i += size + column++ + } + + if i > start { + out.Write(line[start:i]) + } + + if i >= len(line) { + break + } + + for { + out.WriteByte(' ') + column++ + if column%tabSize == 0 { + break + } + } + + i++ + } +} + +// Find if a line counts as indented or not. +// Returns number of characters the indent is (0 = not indented). +func isIndented(data []byte, indentSize int) int { + if len(data) == 0 { + return 0 + } + if data[0] == '\t' { + return 1 + } + if len(data) < indentSize { + return 0 + } + for i := 0; i < indentSize; i++ { + if data[i] != ' ' { + return 0 + } + } + return indentSize +} + +// Create a url-safe slug for fragments +func slugify(in []byte) []byte { + if len(in) == 0 { + return in + } + out := make([]byte, 0, len(in)) + sym := false + + for _, ch := range in { + if isalnum(ch) { + sym = false + out = append(out, ch) + } else if sym { + continue + } else { + out = append(out, '-') + sym = true + } + } + var a, b int + var ch byte + for a, ch = range out { + if ch != '-' { + break + } + } + for b = len(out) - 1; b > 0; b-- { + if out[b] != '-' { + break + } + } + return out[a : b+1] +} diff --git a/vendor/github.com/russross/blackfriday/v2/node.go b/vendor/github.com/russross/blackfriday/v2/node.go new file mode 100644 index 000000000..51b9e8c1b --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/node.go @@ -0,0 +1,354 @@ +package blackfriday + +import ( + "bytes" + "fmt" +) + +// NodeType specifies a type of a single node of a syntax tree. Usually one +// node (and its type) corresponds to a single markdown feature, e.g. emphasis +// or code block. +type NodeType int + +// Constants for identifying different types of nodes. See NodeType. +const ( + Document NodeType = iota + BlockQuote + List + Item + Paragraph + Heading + HorizontalRule + Emph + Strong + Del + Link + Image + Text + HTMLBlock + CodeBlock + Softbreak + Hardbreak + Code + HTMLSpan + Table + TableCell + TableHead + TableBody + TableRow +) + +var nodeTypeNames = []string{ + Document: "Document", + BlockQuote: "BlockQuote", + List: "List", + Item: "Item", + Paragraph: "Paragraph", + Heading: "Heading", + HorizontalRule: "HorizontalRule", + Emph: "Emph", + Strong: "Strong", + Del: "Del", + Link: "Link", + Image: "Image", + Text: "Text", + HTMLBlock: "HTMLBlock", + CodeBlock: "CodeBlock", + Softbreak: "Softbreak", + Hardbreak: "Hardbreak", + Code: "Code", + HTMLSpan: "HTMLSpan", + Table: "Table", + TableCell: "TableCell", + TableHead: "TableHead", + TableBody: "TableBody", + TableRow: "TableRow", +} + +func (t NodeType) String() string { + return nodeTypeNames[t] +} + +// ListData contains fields relevant to a List and Item node type. +type ListData struct { + ListFlags ListType + Tight bool // Skip

    s around list item data if true + BulletChar byte // '*', '+' or '-' in bullet lists + Delimiter byte // '.' or ')' after the number in ordered lists + RefLink []byte // If not nil, turns this list item into a footnote item and triggers different rendering + IsFootnotesList bool // This is a list of footnotes +} + +// LinkData contains fields relevant to a Link node type. +type LinkData struct { + Destination []byte // Destination is what goes into a href + Title []byte // Title is the tooltip thing that goes in a title attribute + NoteID int // NoteID contains a serial number of a footnote, zero if it's not a footnote + Footnote *Node // If it's a footnote, this is a direct link to the footnote Node. Otherwise nil. +} + +// CodeBlockData contains fields relevant to a CodeBlock node type. +type CodeBlockData struct { + IsFenced bool // Specifies whether it's a fenced code block or an indented one + Info []byte // This holds the info string + FenceChar byte + FenceLength int + FenceOffset int +} + +// TableCellData contains fields relevant to a TableCell node type. +type TableCellData struct { + IsHeader bool // This tells if it's under the header row + Align CellAlignFlags // This holds the value for align attribute +} + +// HeadingData contains fields relevant to a Heading node type. +type HeadingData struct { + Level int // This holds the heading level number + HeadingID string // This might hold heading ID, if present + IsTitleblock bool // Specifies whether it's a title block +} + +// Node is a single element in the abstract syntax tree of the parsed document. +// It holds connections to the structurally neighboring nodes and, for certain +// types of nodes, additional information that might be needed when rendering. +type Node struct { + Type NodeType // Determines the type of the node + Parent *Node // Points to the parent + FirstChild *Node // Points to the first child, if any + LastChild *Node // Points to the last child, if any + Prev *Node // Previous sibling; nil if it's the first child + Next *Node // Next sibling; nil if it's the last child + + Literal []byte // Text contents of the leaf nodes + + HeadingData // Populated if Type is Heading + ListData // Populated if Type is List + CodeBlockData // Populated if Type is CodeBlock + LinkData // Populated if Type is Link + TableCellData // Populated if Type is TableCell + + content []byte // Markdown content of the block nodes + open bool // Specifies an open block node that has not been finished to process yet +} + +// NewNode allocates a node of a specified type. +func NewNode(typ NodeType) *Node { + return &Node{ + Type: typ, + open: true, + } +} + +func (n *Node) String() string { + ellipsis := "" + snippet := n.Literal + if len(snippet) > 16 { + snippet = snippet[:16] + ellipsis = "..." + } + return fmt.Sprintf("%s: '%s%s'", n.Type, snippet, ellipsis) +} + +// Unlink removes node 'n' from the tree. +// It panics if the node is nil. +func (n *Node) Unlink() { + if n.Prev != nil { + n.Prev.Next = n.Next + } else if n.Parent != nil { + n.Parent.FirstChild = n.Next + } + if n.Next != nil { + n.Next.Prev = n.Prev + } else if n.Parent != nil { + n.Parent.LastChild = n.Prev + } + n.Parent = nil + n.Next = nil + n.Prev = nil +} + +// AppendChild adds a node 'child' as a child of 'n'. +// It panics if either node is nil. +func (n *Node) AppendChild(child *Node) { + child.Unlink() + child.Parent = n + if n.LastChild != nil { + n.LastChild.Next = child + child.Prev = n.LastChild + n.LastChild = child + } else { + n.FirstChild = child + n.LastChild = child + } +} + +// InsertBefore inserts 'sibling' immediately before 'n'. +// It panics if either node is nil. +func (n *Node) InsertBefore(sibling *Node) { + sibling.Unlink() + sibling.Prev = n.Prev + if sibling.Prev != nil { + sibling.Prev.Next = sibling + } + sibling.Next = n + n.Prev = sibling + sibling.Parent = n.Parent + if sibling.Prev == nil { + sibling.Parent.FirstChild = sibling + } +} + +func (n *Node) isContainer() bool { + switch n.Type { + case Document: + fallthrough + case BlockQuote: + fallthrough + case List: + fallthrough + case Item: + fallthrough + case Paragraph: + fallthrough + case Heading: + fallthrough + case Emph: + fallthrough + case Strong: + fallthrough + case Del: + fallthrough + case Link: + fallthrough + case Image: + fallthrough + case Table: + fallthrough + case TableHead: + fallthrough + case TableBody: + fallthrough + case TableRow: + fallthrough + case TableCell: + return true + default: + return false + } +} + +func (n *Node) canContain(t NodeType) bool { + if n.Type == List { + return t == Item + } + if n.Type == Document || n.Type == BlockQuote || n.Type == Item { + return t != Item + } + if n.Type == Table { + return t == TableHead || t == TableBody + } + if n.Type == TableHead || n.Type == TableBody { + return t == TableRow + } + if n.Type == TableRow { + return t == TableCell + } + return false +} + +// WalkStatus allows NodeVisitor to have some control over the tree traversal. +// It is returned from NodeVisitor and different values allow Node.Walk to +// decide which node to go to next. +type WalkStatus int + +const ( + // GoToNext is the default traversal of every node. + GoToNext WalkStatus = iota + // SkipChildren tells walker to skip all children of current node. + SkipChildren + // Terminate tells walker to terminate the traversal. + Terminate +) + +// NodeVisitor is a callback to be called when traversing the syntax tree. +// Called twice for every node: once with entering=true when the branch is +// first visited, then with entering=false after all the children are done. +type NodeVisitor func(node *Node, entering bool) WalkStatus + +// Walk is a convenience method that instantiates a walker and starts a +// traversal of subtree rooted at n. +func (n *Node) Walk(visitor NodeVisitor) { + w := newNodeWalker(n) + for w.current != nil { + status := visitor(w.current, w.entering) + switch status { + case GoToNext: + w.next() + case SkipChildren: + w.entering = false + w.next() + case Terminate: + return + } + } +} + +type nodeWalker struct { + current *Node + root *Node + entering bool +} + +func newNodeWalker(root *Node) *nodeWalker { + return &nodeWalker{ + current: root, + root: root, + entering: true, + } +} + +func (nw *nodeWalker) next() { + if (!nw.current.isContainer() || !nw.entering) && nw.current == nw.root { + nw.current = nil + return + } + if nw.entering && nw.current.isContainer() { + if nw.current.FirstChild != nil { + nw.current = nw.current.FirstChild + nw.entering = true + } else { + nw.entering = false + } + } else if nw.current.Next == nil { + nw.current = nw.current.Parent + nw.entering = false + } else { + nw.current = nw.current.Next + nw.entering = true + } +} + +func dump(ast *Node) { + fmt.Println(dumpString(ast)) +} + +func dumpR(ast *Node, depth int) string { + if ast == nil { + return "" + } + indent := bytes.Repeat([]byte("\t"), depth) + content := ast.Literal + if content == nil { + content = ast.content + } + result := fmt.Sprintf("%s%s(%q)\n", indent, ast.Type, content) + for n := ast.FirstChild; n != nil; n = n.Next { + result += dumpR(n, depth+1) + } + return result +} + +func dumpString(ast *Node) string { + return dumpR(ast, 0) +} diff --git a/vendor/github.com/russross/blackfriday/v2/smartypants.go b/vendor/github.com/russross/blackfriday/v2/smartypants.go new file mode 100644 index 000000000..3a220e942 --- /dev/null +++ b/vendor/github.com/russross/blackfriday/v2/smartypants.go @@ -0,0 +1,457 @@ +// +// Blackfriday Markdown Processor +// Available at http://github.com/russross/blackfriday +// +// Copyright © 2011 Russ Ross . +// Distributed under the Simplified BSD License. +// See README.md for details. +// + +// +// +// SmartyPants rendering +// +// + +package blackfriday + +import ( + "bytes" + "io" +) + +// SPRenderer is a struct containing state of a Smartypants renderer. +type SPRenderer struct { + inSingleQuote bool + inDoubleQuote bool + callbacks [256]smartCallback +} + +func wordBoundary(c byte) bool { + return c == 0 || isspace(c) || ispunct(c) +} + +func tolower(c byte) byte { + if c >= 'A' && c <= 'Z' { + return c - 'A' + 'a' + } + return c +} + +func isdigit(c byte) bool { + return c >= '0' && c <= '9' +} + +func smartQuoteHelper(out *bytes.Buffer, previousChar byte, nextChar byte, quote byte, isOpen *bool, addNBSP bool) bool { + // edge of the buffer is likely to be a tag that we don't get to see, + // so we treat it like text sometimes + + // enumerate all sixteen possibilities for (previousChar, nextChar) + // each can be one of {0, space, punct, other} + switch { + case previousChar == 0 && nextChar == 0: + // context is not any help here, so toggle + *isOpen = !*isOpen + case isspace(previousChar) && nextChar == 0: + // [ "] might be [ "foo...] + *isOpen = true + case ispunct(previousChar) && nextChar == 0: + // [!"] hmm... could be [Run!"] or [("...] + *isOpen = false + case /* isnormal(previousChar) && */ nextChar == 0: + // [a"] is probably a close + *isOpen = false + case previousChar == 0 && isspace(nextChar): + // [" ] might be [...foo" ] + *isOpen = false + case isspace(previousChar) && isspace(nextChar): + // [ " ] context is not any help here, so toggle + *isOpen = !*isOpen + case ispunct(previousChar) && isspace(nextChar): + // [!" ] is probably a close + *isOpen = false + case /* isnormal(previousChar) && */ isspace(nextChar): + // [a" ] this is one of the easy cases + *isOpen = false + case previousChar == 0 && ispunct(nextChar): + // ["!] hmm... could be ["$1.95] or ["!...] + *isOpen = false + case isspace(previousChar) && ispunct(nextChar): + // [ "!] looks more like [ "$1.95] + *isOpen = true + case ispunct(previousChar) && ispunct(nextChar): + // [!"!] context is not any help here, so toggle + *isOpen = !*isOpen + case /* isnormal(previousChar) && */ ispunct(nextChar): + // [a"!] is probably a close + *isOpen = false + case previousChar == 0 /* && isnormal(nextChar) */ : + // ["a] is probably an open + *isOpen = true + case isspace(previousChar) /* && isnormal(nextChar) */ : + // [ "a] this is one of the easy cases + *isOpen = true + case ispunct(previousChar) /* && isnormal(nextChar) */ : + // [!"a] is probably an open + *isOpen = true + default: + // [a'b] maybe a contraction? + *isOpen = false + } + + // Note that with the limited lookahead, this non-breaking + // space will also be appended to single double quotes. + if addNBSP && !*isOpen { + out.WriteString(" ") + } + + out.WriteByte('&') + if *isOpen { + out.WriteByte('l') + } else { + out.WriteByte('r') + } + out.WriteByte(quote) + out.WriteString("quo;") + + if addNBSP && *isOpen { + out.WriteString(" ") + } + + return true +} + +func (r *SPRenderer) smartSingleQuote(out *bytes.Buffer, previousChar byte, text []byte) int { + if len(text) >= 2 { + t1 := tolower(text[1]) + + if t1 == '\'' { + nextChar := byte(0) + if len(text) >= 3 { + nextChar = text[2] + } + if smartQuoteHelper(out, previousChar, nextChar, 'd', &r.inDoubleQuote, false) { + return 1 + } + } + + if (t1 == 's' || t1 == 't' || t1 == 'm' || t1 == 'd') && (len(text) < 3 || wordBoundary(text[2])) { + out.WriteString("’") + return 0 + } + + if len(text) >= 3 { + t2 := tolower(text[2]) + + if ((t1 == 'r' && t2 == 'e') || (t1 == 'l' && t2 == 'l') || (t1 == 'v' && t2 == 'e')) && + (len(text) < 4 || wordBoundary(text[3])) { + out.WriteString("’") + return 0 + } + } + } + + nextChar := byte(0) + if len(text) > 1 { + nextChar = text[1] + } + if smartQuoteHelper(out, previousChar, nextChar, 's', &r.inSingleQuote, false) { + return 0 + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartParens(out *bytes.Buffer, previousChar byte, text []byte) int { + if len(text) >= 3 { + t1 := tolower(text[1]) + t2 := tolower(text[2]) + + if t1 == 'c' && t2 == ')' { + out.WriteString("©") + return 2 + } + + if t1 == 'r' && t2 == ')' { + out.WriteString("®") + return 2 + } + + if len(text) >= 4 && t1 == 't' && t2 == 'm' && text[3] == ')' { + out.WriteString("™") + return 3 + } + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartDash(out *bytes.Buffer, previousChar byte, text []byte) int { + if len(text) >= 2 { + if text[1] == '-' { + out.WriteString("—") + return 1 + } + + if wordBoundary(previousChar) && wordBoundary(text[1]) { + out.WriteString("–") + return 0 + } + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartDashLatex(out *bytes.Buffer, previousChar byte, text []byte) int { + if len(text) >= 3 && text[1] == '-' && text[2] == '-' { + out.WriteString("—") + return 2 + } + if len(text) >= 2 && text[1] == '-' { + out.WriteString("–") + return 1 + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartAmpVariant(out *bytes.Buffer, previousChar byte, text []byte, quote byte, addNBSP bool) int { + if bytes.HasPrefix(text, []byte(""")) { + nextChar := byte(0) + if len(text) >= 7 { + nextChar = text[6] + } + if smartQuoteHelper(out, previousChar, nextChar, quote, &r.inDoubleQuote, addNBSP) { + return 5 + } + } + + if bytes.HasPrefix(text, []byte("�")) { + return 3 + } + + out.WriteByte('&') + return 0 +} + +func (r *SPRenderer) smartAmp(angledQuotes, addNBSP bool) func(*bytes.Buffer, byte, []byte) int { + var quote byte = 'd' + if angledQuotes { + quote = 'a' + } + + return func(out *bytes.Buffer, previousChar byte, text []byte) int { + return r.smartAmpVariant(out, previousChar, text, quote, addNBSP) + } +} + +func (r *SPRenderer) smartPeriod(out *bytes.Buffer, previousChar byte, text []byte) int { + if len(text) >= 3 && text[1] == '.' && text[2] == '.' { + out.WriteString("…") + return 2 + } + + if len(text) >= 5 && text[1] == ' ' && text[2] == '.' && text[3] == ' ' && text[4] == '.' { + out.WriteString("…") + return 4 + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartBacktick(out *bytes.Buffer, previousChar byte, text []byte) int { + if len(text) >= 2 && text[1] == '`' { + nextChar := byte(0) + if len(text) >= 3 { + nextChar = text[2] + } + if smartQuoteHelper(out, previousChar, nextChar, 'd', &r.inDoubleQuote, false) { + return 1 + } + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartNumberGeneric(out *bytes.Buffer, previousChar byte, text []byte) int { + if wordBoundary(previousChar) && previousChar != '/' && len(text) >= 3 { + // is it of the form digits/digits(word boundary)?, i.e., \d+/\d+\b + // note: check for regular slash (/) or fraction slash (⁄, 0x2044, or 0xe2 81 84 in utf-8) + // and avoid changing dates like 1/23/2005 into fractions. + numEnd := 0 + for len(text) > numEnd && isdigit(text[numEnd]) { + numEnd++ + } + if numEnd == 0 { + out.WriteByte(text[0]) + return 0 + } + denStart := numEnd + 1 + if len(text) > numEnd+3 && text[numEnd] == 0xe2 && text[numEnd+1] == 0x81 && text[numEnd+2] == 0x84 { + denStart = numEnd + 3 + } else if len(text) < numEnd+2 || text[numEnd] != '/' { + out.WriteByte(text[0]) + return 0 + } + denEnd := denStart + for len(text) > denEnd && isdigit(text[denEnd]) { + denEnd++ + } + if denEnd == denStart { + out.WriteByte(text[0]) + return 0 + } + if len(text) == denEnd || wordBoundary(text[denEnd]) && text[denEnd] != '/' { + out.WriteString("") + out.Write(text[:numEnd]) + out.WriteString("") + out.Write(text[denStart:denEnd]) + out.WriteString("") + return denEnd - 1 + } + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartNumber(out *bytes.Buffer, previousChar byte, text []byte) int { + if wordBoundary(previousChar) && previousChar != '/' && len(text) >= 3 { + if text[0] == '1' && text[1] == '/' && text[2] == '2' { + if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' { + out.WriteString("½") + return 2 + } + } + + if text[0] == '1' && text[1] == '/' && text[2] == '4' { + if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' || (len(text) >= 5 && tolower(text[3]) == 't' && tolower(text[4]) == 'h') { + out.WriteString("¼") + return 2 + } + } + + if text[0] == '3' && text[1] == '/' && text[2] == '4' { + if len(text) < 4 || wordBoundary(text[3]) && text[3] != '/' || (len(text) >= 6 && tolower(text[3]) == 't' && tolower(text[4]) == 'h' && tolower(text[5]) == 's') { + out.WriteString("¾") + return 2 + } + } + } + + out.WriteByte(text[0]) + return 0 +} + +func (r *SPRenderer) smartDoubleQuoteVariant(out *bytes.Buffer, previousChar byte, text []byte, quote byte) int { + nextChar := byte(0) + if len(text) > 1 { + nextChar = text[1] + } + if !smartQuoteHelper(out, previousChar, nextChar, quote, &r.inDoubleQuote, false) { + out.WriteString(""") + } + + return 0 +} + +func (r *SPRenderer) smartDoubleQuote(out *bytes.Buffer, previousChar byte, text []byte) int { + return r.smartDoubleQuoteVariant(out, previousChar, text, 'd') +} + +func (r *SPRenderer) smartAngledDoubleQuote(out *bytes.Buffer, previousChar byte, text []byte) int { + return r.smartDoubleQuoteVariant(out, previousChar, text, 'a') +} + +func (r *SPRenderer) smartLeftAngle(out *bytes.Buffer, previousChar byte, text []byte) int { + i := 0 + + for i < len(text) && text[i] != '>' { + i++ + } + + out.Write(text[:i+1]) + return i +} + +type smartCallback func(out *bytes.Buffer, previousChar byte, text []byte) int + +// NewSmartypantsRenderer constructs a Smartypants renderer object. +func NewSmartypantsRenderer(flags HTMLFlags) *SPRenderer { + var ( + r SPRenderer + + smartAmpAngled = r.smartAmp(true, false) + smartAmpAngledNBSP = r.smartAmp(true, true) + smartAmpRegular = r.smartAmp(false, false) + smartAmpRegularNBSP = r.smartAmp(false, true) + + addNBSP = flags&SmartypantsQuotesNBSP != 0 + ) + + if flags&SmartypantsAngledQuotes == 0 { + r.callbacks['"'] = r.smartDoubleQuote + if !addNBSP { + r.callbacks['&'] = smartAmpRegular + } else { + r.callbacks['&'] = smartAmpRegularNBSP + } + } else { + r.callbacks['"'] = r.smartAngledDoubleQuote + if !addNBSP { + r.callbacks['&'] = smartAmpAngled + } else { + r.callbacks['&'] = smartAmpAngledNBSP + } + } + r.callbacks['\''] = r.smartSingleQuote + r.callbacks['('] = r.smartParens + if flags&SmartypantsDashes != 0 { + if flags&SmartypantsLatexDashes == 0 { + r.callbacks['-'] = r.smartDash + } else { + r.callbacks['-'] = r.smartDashLatex + } + } + r.callbacks['.'] = r.smartPeriod + if flags&SmartypantsFractions == 0 { + r.callbacks['1'] = r.smartNumber + r.callbacks['3'] = r.smartNumber + } else { + for ch := '1'; ch <= '9'; ch++ { + r.callbacks[ch] = r.smartNumberGeneric + } + } + r.callbacks['<'] = r.smartLeftAngle + r.callbacks['`'] = r.smartBacktick + return &r +} + +// Process is the entry point of the Smartypants renderer. +func (r *SPRenderer) Process(w io.Writer, text []byte) { + mark := 0 + for i := 0; i < len(text); i++ { + if action := r.callbacks[text[i]]; action != nil { + if i > mark { + w.Write(text[mark:i]) + } + previousChar := byte(0) + if i > 0 { + previousChar = text[i-1] + } + var tmp bytes.Buffer + i += action(&tmp, previousChar, text[i:]) + w.Write(tmp.Bytes()) + mark = i + 1 + } + } + if mark < len(text) { + w.Write(text[mark:]) + } +} diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/.travis.yml b/vendor/github.com/shurcooL/sanitized_anchor_name/.travis.yml new file mode 100644 index 000000000..93b1fcdb3 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/.travis.yml @@ -0,0 +1,16 @@ +sudo: false +language: go +go: + - 1.x + - master +matrix: + allow_failures: + - go: master + fast_finish: true +install: + - # Do nothing. This is needed to prevent default install action "go get -t -v ./..." from happening here (we want it to happen inside script step). +script: + - go get -t -v ./... + - diff -u <(echo -n) <(gofmt -d -s .) + - go tool vet . + - go test -v -race ./... diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE b/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE new file mode 100644 index 000000000..c35c17af9 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2015 Dmitri Shuralyov + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/README.md b/vendor/github.com/shurcooL/sanitized_anchor_name/README.md new file mode 100644 index 000000000..670bf0fe6 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/README.md @@ -0,0 +1,36 @@ +sanitized_anchor_name +===================== + +[![Build Status](https://travis-ci.org/shurcooL/sanitized_anchor_name.svg?branch=master)](https://travis-ci.org/shurcooL/sanitized_anchor_name) [![GoDoc](https://godoc.org/github.com/shurcooL/sanitized_anchor_name?status.svg)](https://godoc.org/github.com/shurcooL/sanitized_anchor_name) + +Package sanitized_anchor_name provides a func to create sanitized anchor names. + +Its logic can be reused by multiple packages to create interoperable anchor names +and links to those anchors. + +At this time, it does not try to ensure that generated anchor names +are unique, that responsibility falls on the caller. + +Installation +------------ + +```bash +go get -u github.com/shurcooL/sanitized_anchor_name +``` + +Example +------- + +```Go +anchorName := sanitized_anchor_name.Create("This is a header") + +fmt.Println(anchorName) + +// Output: +// this-is-a-header +``` + +License +------- + +- [MIT License](LICENSE) diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/go.mod b/vendor/github.com/shurcooL/sanitized_anchor_name/go.mod new file mode 100644 index 000000000..1e2553475 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/go.mod @@ -0,0 +1 @@ +module github.com/shurcooL/sanitized_anchor_name diff --git a/vendor/github.com/shurcooL/sanitized_anchor_name/main.go b/vendor/github.com/shurcooL/sanitized_anchor_name/main.go new file mode 100644 index 000000000..6a77d1243 --- /dev/null +++ b/vendor/github.com/shurcooL/sanitized_anchor_name/main.go @@ -0,0 +1,29 @@ +// Package sanitized_anchor_name provides a func to create sanitized anchor names. +// +// Its logic can be reused by multiple packages to create interoperable anchor names +// and links to those anchors. +// +// At this time, it does not try to ensure that generated anchor names +// are unique, that responsibility falls on the caller. +package sanitized_anchor_name // import "github.com/shurcooL/sanitized_anchor_name" + +import "unicode" + +// Create returns a sanitized anchor name for the given text. +func Create(text string) string { + var anchorName []rune + var futureDash = false + for _, r := range text { + switch { + case unicode.IsLetter(r) || unicode.IsNumber(r): + if futureDash && len(anchorName) > 0 { + anchorName = append(anchorName, '-') + } + futureDash = false + anchorName = append(anchorName, unicode.ToLower(r)) + default: + futureDash = true + } + } + return string(anchorName) +} diff --git a/vendor/github.com/spf13/cobra/doc/README.md b/vendor/github.com/spf13/cobra/doc/README.md new file mode 100644 index 000000000..6ea4eb662 --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/README.md @@ -0,0 +1,12 @@ +# Documentation generation + +- [Man page docs](./man_docs.md) +- [Markdown docs](./md_docs.md) +- [Rest docs](./rest_docs.md) +- [Yaml docs](./yaml_docs.md) + +## Options +### `DisableAutoGenTag` +You may set `cmd.DisableAutoGenTag = true` +to _entirely_ remove the auto generated string "Auto generated by spf13/cobra..." +from any documentation source. diff --git a/vendor/github.com/spf13/cobra/doc/man_docs.go b/vendor/github.com/spf13/cobra/doc/man_docs.go new file mode 100644 index 000000000..916e36144 --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/man_docs.go @@ -0,0 +1,245 @@ +// Copyright 2015 Red Hat Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doc + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strconv" + "strings" + "time" + + "github.com/cpuguy83/go-md2man/v2/md2man" + "github.com/spf13/cobra" + "github.com/spf13/pflag" +) + +// GenManTree will generate a man page for this command and all descendants +// in the directory given. The header may be nil. This function may not work +// correctly if your command names have `-` in them. If you have `cmd` with two +// subcmds, `sub` and `sub-third`, and `sub` has a subcommand called `third` +// it is undefined which help output will be in the file `cmd-sub-third.1`. +func GenManTree(cmd *cobra.Command, header *GenManHeader, dir string) error { + return GenManTreeFromOpts(cmd, GenManTreeOptions{ + Header: header, + Path: dir, + CommandSeparator: "-", + }) +} + +// GenManTreeFromOpts generates a man page for the command and all descendants. +// The pages are written to the opts.Path directory. +func GenManTreeFromOpts(cmd *cobra.Command, opts GenManTreeOptions) error { + header := opts.Header + if header == nil { + header = &GenManHeader{} + } + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + if err := GenManTreeFromOpts(c, opts); err != nil { + return err + } + } + section := "1" + if header.Section != "" { + section = header.Section + } + + separator := "_" + if opts.CommandSeparator != "" { + separator = opts.CommandSeparator + } + basename := strings.Replace(cmd.CommandPath(), " ", separator, -1) + filename := filepath.Join(opts.Path, basename+"."+section) + f, err := os.Create(filename) + if err != nil { + return err + } + defer f.Close() + + headerCopy := *header + return GenMan(cmd, &headerCopy, f) +} + +// GenManTreeOptions is the options for generating the man pages. +// Used only in GenManTreeFromOpts. +type GenManTreeOptions struct { + Header *GenManHeader + Path string + CommandSeparator string +} + +// GenManHeader is a lot like the .TH header at the start of man pages. These +// include the title, section, date, source, and manual. We will use the +// current time if Date is unset and will use "Auto generated by spf13/cobra" +// if the Source is unset. +type GenManHeader struct { + Title string + Section string + Date *time.Time + date string + Source string + Manual string +} + +// GenMan will generate a man page for the given command and write it to +// w. The header argument may be nil, however obviously w may not. +func GenMan(cmd *cobra.Command, header *GenManHeader, w io.Writer) error { + if header == nil { + header = &GenManHeader{} + } + if err := fillHeader(header, cmd.CommandPath(), cmd.DisableAutoGenTag); err != nil { + return err + } + + b := genMan(cmd, header) + _, err := w.Write(md2man.Render(b)) + return err +} + +func fillHeader(header *GenManHeader, name string, disableAutoGen bool) error { + if header.Title == "" { + header.Title = strings.ToUpper(strings.Replace(name, " ", "\\-", -1)) + } + if header.Section == "" { + header.Section = "1" + } + if header.Date == nil { + now := time.Now() + if epoch := os.Getenv("SOURCE_DATE_EPOCH"); epoch != "" { + unixEpoch, err := strconv.ParseInt(epoch, 10, 64) + if err != nil { + return fmt.Errorf("invalid SOURCE_DATE_EPOCH: %v", err) + } + now = time.Unix(unixEpoch, 0) + } + header.Date = &now + } + header.date = (*header.Date).Format("Jan 2006") + if header.Source == "" && !disableAutoGen { + header.Source = "Auto generated by spf13/cobra" + } + return nil +} + +func manPreamble(buf io.StringWriter, header *GenManHeader, cmd *cobra.Command, dashedName string) { + description := cmd.Long + if len(description) == 0 { + description = cmd.Short + } + + cobra.WriteStringAndCheck(buf, fmt.Sprintf(`%% "%s" "%s" "%s" "%s" "%s" +# NAME +`, header.Title, header.Section, header.date, header.Source, header.Manual)) + cobra.WriteStringAndCheck(buf, fmt.Sprintf("%s \\- %s\n\n", dashedName, cmd.Short)) + cobra.WriteStringAndCheck(buf, "# SYNOPSIS\n") + cobra.WriteStringAndCheck(buf, fmt.Sprintf("**%s**\n\n", cmd.UseLine())) + cobra.WriteStringAndCheck(buf, "# DESCRIPTION\n") + cobra.WriteStringAndCheck(buf, description+"\n\n") +} + +func manPrintFlags(buf io.StringWriter, flags *pflag.FlagSet) { + flags.VisitAll(func(flag *pflag.Flag) { + if len(flag.Deprecated) > 0 || flag.Hidden { + return + } + format := "" + if len(flag.Shorthand) > 0 && len(flag.ShorthandDeprecated) == 0 { + format = fmt.Sprintf("**-%s**, **--%s**", flag.Shorthand, flag.Name) + } else { + format = fmt.Sprintf("**--%s**", flag.Name) + } + if len(flag.NoOptDefVal) > 0 { + format += "[" + } + if flag.Value.Type() == "string" { + // put quotes on the value + format += "=%q" + } else { + format += "=%s" + } + if len(flag.NoOptDefVal) > 0 { + format += "]" + } + format += "\n\t%s\n\n" + cobra.WriteStringAndCheck(buf, fmt.Sprintf(format, flag.DefValue, flag.Usage)) + }) +} + +func manPrintOptions(buf io.StringWriter, command *cobra.Command) { + flags := command.NonInheritedFlags() + if flags.HasAvailableFlags() { + cobra.WriteStringAndCheck(buf, "# OPTIONS\n") + manPrintFlags(buf, flags) + cobra.WriteStringAndCheck(buf, "\n") + } + flags = command.InheritedFlags() + if flags.HasAvailableFlags() { + cobra.WriteStringAndCheck(buf, "# OPTIONS INHERITED FROM PARENT COMMANDS\n") + manPrintFlags(buf, flags) + cobra.WriteStringAndCheck(buf, "\n") + } +} + +func genMan(cmd *cobra.Command, header *GenManHeader) []byte { + cmd.InitDefaultHelpCmd() + cmd.InitDefaultHelpFlag() + + // something like `rootcmd-subcmd1-subcmd2` + dashCommandName := strings.Replace(cmd.CommandPath(), " ", "-", -1) + + buf := new(bytes.Buffer) + + manPreamble(buf, header, cmd, dashCommandName) + manPrintOptions(buf, cmd) + if len(cmd.Example) > 0 { + buf.WriteString("# EXAMPLE\n") + buf.WriteString(fmt.Sprintf("```\n%s\n```\n", cmd.Example)) + } + if hasSeeAlso(cmd) { + buf.WriteString("# SEE ALSO\n") + seealsos := make([]string, 0) + if cmd.HasParent() { + parentPath := cmd.Parent().CommandPath() + dashParentPath := strings.Replace(parentPath, " ", "-", -1) + seealso := fmt.Sprintf("**%s(%s)**", dashParentPath, header.Section) + seealsos = append(seealsos, seealso) + cmd.VisitParents(func(c *cobra.Command) { + if c.DisableAutoGenTag { + cmd.DisableAutoGenTag = c.DisableAutoGenTag + } + }) + } + children := cmd.Commands() + sort.Sort(byName(children)) + for _, c := range children { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + seealso := fmt.Sprintf("**%s-%s(%s)**", dashCommandName, c.Name(), header.Section) + seealsos = append(seealsos, seealso) + } + buf.WriteString(strings.Join(seealsos, ", ") + "\n") + } + if !cmd.DisableAutoGenTag { + buf.WriteString(fmt.Sprintf("# HISTORY\n%s Auto generated by spf13/cobra\n", header.Date.Format("2-Jan-2006"))) + } + return buf.Bytes() +} diff --git a/vendor/github.com/spf13/cobra/doc/man_docs.md b/vendor/github.com/spf13/cobra/doc/man_docs.md new file mode 100644 index 000000000..3709160f3 --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/man_docs.md @@ -0,0 +1,31 @@ +# Generating Man Pages For Your Own cobra.Command + +Generating man pages from a cobra command is incredibly easy. An example is as follows: + +```go +package main + +import ( + "log" + + "github.com/spf13/cobra" + "github.com/spf13/cobra/doc" +) + +func main() { + cmd := &cobra.Command{ + Use: "test", + Short: "my test program", + } + header := &doc.GenManHeader{ + Title: "MINE", + Section: "3", + } + err := doc.GenManTree(cmd, header, "/tmp") + if err != nil { + log.Fatal(err) + } +} +``` + +That will get you a man page `/tmp/test.3` diff --git a/vendor/github.com/spf13/cobra/doc/md_docs.go b/vendor/github.com/spf13/cobra/doc/md_docs.go new file mode 100644 index 000000000..5181af8dc --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/md_docs.go @@ -0,0 +1,155 @@ +//Copyright 2015 Red Hat Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doc + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" + "time" + + "github.com/spf13/cobra" +) + +func printOptions(buf *bytes.Buffer, cmd *cobra.Command, name string) error { + flags := cmd.NonInheritedFlags() + flags.SetOutput(buf) + if flags.HasAvailableFlags() { + buf.WriteString("### Options\n\n```\n") + flags.PrintDefaults() + buf.WriteString("```\n\n") + } + + parentFlags := cmd.InheritedFlags() + parentFlags.SetOutput(buf) + if parentFlags.HasAvailableFlags() { + buf.WriteString("### Options inherited from parent commands\n\n```\n") + parentFlags.PrintDefaults() + buf.WriteString("```\n\n") + } + return nil +} + +// GenMarkdown creates markdown output. +func GenMarkdown(cmd *cobra.Command, w io.Writer) error { + return GenMarkdownCustom(cmd, w, func(s string) string { return s }) +} + +// GenMarkdownCustom creates custom markdown output. +func GenMarkdownCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string) string) error { + cmd.InitDefaultHelpCmd() + cmd.InitDefaultHelpFlag() + + buf := new(bytes.Buffer) + name := cmd.CommandPath() + + buf.WriteString("## " + name + "\n\n") + buf.WriteString(cmd.Short + "\n\n") + if len(cmd.Long) > 0 { + buf.WriteString("### Synopsis\n\n") + buf.WriteString(cmd.Long + "\n\n") + } + + if cmd.Runnable() { + buf.WriteString(fmt.Sprintf("```\n%s\n```\n\n", cmd.UseLine())) + } + + if len(cmd.Example) > 0 { + buf.WriteString("### Examples\n\n") + buf.WriteString(fmt.Sprintf("```\n%s\n```\n\n", cmd.Example)) + } + + if err := printOptions(buf, cmd, name); err != nil { + return err + } + if hasSeeAlso(cmd) { + buf.WriteString("### SEE ALSO\n\n") + if cmd.HasParent() { + parent := cmd.Parent() + pname := parent.CommandPath() + link := pname + ".md" + link = strings.Replace(link, " ", "_", -1) + buf.WriteString(fmt.Sprintf("* [%s](%s)\t - %s\n", pname, linkHandler(link), parent.Short)) + cmd.VisitParents(func(c *cobra.Command) { + if c.DisableAutoGenTag { + cmd.DisableAutoGenTag = c.DisableAutoGenTag + } + }) + } + + children := cmd.Commands() + sort.Sort(byName(children)) + + for _, child := range children { + if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() { + continue + } + cname := name + " " + child.Name() + link := cname + ".md" + link = strings.Replace(link, " ", "_", -1) + buf.WriteString(fmt.Sprintf("* [%s](%s)\t - %s\n", cname, linkHandler(link), child.Short)) + } + buf.WriteString("\n") + } + if !cmd.DisableAutoGenTag { + buf.WriteString("###### Auto generated by spf13/cobra on " + time.Now().Format("2-Jan-2006") + "\n") + } + _, err := buf.WriteTo(w) + return err +} + +// GenMarkdownTree will generate a markdown page for this command and all +// descendants in the directory given. The header may be nil. +// This function may not work correctly if your command names have `-` in them. +// If you have `cmd` with two subcmds, `sub` and `sub-third`, +// and `sub` has a subcommand called `third`, it is undefined which +// help output will be in the file `cmd-sub-third.1`. +func GenMarkdownTree(cmd *cobra.Command, dir string) error { + identity := func(s string) string { return s } + emptyStr := func(s string) string { return "" } + return GenMarkdownTreeCustom(cmd, dir, emptyStr, identity) +} + +// GenMarkdownTreeCustom is the the same as GenMarkdownTree, but +// with custom filePrepender and linkHandler. +func GenMarkdownTreeCustom(cmd *cobra.Command, dir string, filePrepender, linkHandler func(string) string) error { + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + if err := GenMarkdownTreeCustom(c, dir, filePrepender, linkHandler); err != nil { + return err + } + } + + basename := strings.Replace(cmd.CommandPath(), " ", "_", -1) + ".md" + filename := filepath.Join(dir, basename) + f, err := os.Create(filename) + if err != nil { + return err + } + defer f.Close() + + if _, err := io.WriteString(f, filePrepender(filename)); err != nil { + return err + } + if err := GenMarkdownCustom(cmd, f, linkHandler); err != nil { + return err + } + return nil +} diff --git a/vendor/github.com/spf13/cobra/doc/md_docs.md b/vendor/github.com/spf13/cobra/doc/md_docs.md new file mode 100644 index 000000000..5c870625f --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/md_docs.md @@ -0,0 +1,115 @@ +# Generating Markdown Docs For Your Own cobra.Command + +Generating Markdown pages from a cobra command is incredibly easy. An example is as follows: + +```go +package main + +import ( + "log" + + "github.com/spf13/cobra" + "github.com/spf13/cobra/doc" +) + +func main() { + cmd := &cobra.Command{ + Use: "test", + Short: "my test program", + } + err := doc.GenMarkdownTree(cmd, "/tmp") + if err != nil { + log.Fatal(err) + } +} +``` + +That will get you a Markdown document `/tmp/test.md` + +## Generate markdown docs for the entire command tree + +This program can actually generate docs for the kubectl command in the kubernetes project + +```go +package main + +import ( + "log" + "io/ioutil" + "os" + + "k8s.io/kubernetes/pkg/kubectl/cmd" + cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" + + "github.com/spf13/cobra/doc" +) + +func main() { + kubectl := cmd.NewKubectlCommand(cmdutil.NewFactory(nil), os.Stdin, ioutil.Discard, ioutil.Discard) + err := doc.GenMarkdownTree(kubectl, "./") + if err != nil { + log.Fatal(err) + } +} +``` + +This will generate a whole series of files, one for each command in the tree, in the directory specified (in this case "./") + +## Generate markdown docs for a single command + +You may wish to have more control over the output, or only generate for a single command, instead of the entire command tree. If this is the case you may prefer to `GenMarkdown` instead of `GenMarkdownTree` + +```go + out := new(bytes.Buffer) + err := doc.GenMarkdown(cmd, out) + if err != nil { + log.Fatal(err) + } +``` + +This will write the markdown doc for ONLY "cmd" into the out, buffer. + +## Customize the output + +Both `GenMarkdown` and `GenMarkdownTree` have alternate versions with callbacks to get some control of the output: + +```go +func GenMarkdownTreeCustom(cmd *Command, dir string, filePrepender, linkHandler func(string) string) error { + //... +} +``` + +```go +func GenMarkdownCustom(cmd *Command, out *bytes.Buffer, linkHandler func(string) string) error { + //... +} +``` + +The `filePrepender` will prepend the return value given the full filepath to the rendered Markdown file. A common use case is to add front matter to use the generated documentation with [Hugo](http://gohugo.io/): + +```go +const fmTemplate = `--- +date: %s +title: "%s" +slug: %s +url: %s +--- +` + +filePrepender := func(filename string) string { + now := time.Now().Format(time.RFC3339) + name := filepath.Base(filename) + base := strings.TrimSuffix(name, path.Ext(name)) + url := "/commands/" + strings.ToLower(base) + "/" + return fmt.Sprintf(fmTemplate, now, strings.Replace(base, "_", " ", -1), base, url) +} +``` + +The `linkHandler` can be used to customize the rendered internal links to the commands, given a filename: + +```go +linkHandler := func(name string) string { + base := strings.TrimSuffix(name, path.Ext(name)) + return "/commands/" + strings.ToLower(base) + "/" +} +``` diff --git a/vendor/github.com/spf13/cobra/doc/rest_docs.go b/vendor/github.com/spf13/cobra/doc/rest_docs.go new file mode 100644 index 000000000..051d8dc83 --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/rest_docs.go @@ -0,0 +1,185 @@ +//Copyright 2015 Red Hat Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doc + +import ( + "bytes" + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" + "time" + + "github.com/spf13/cobra" +) + +func printOptionsReST(buf *bytes.Buffer, cmd *cobra.Command, name string) error { + flags := cmd.NonInheritedFlags() + flags.SetOutput(buf) + if flags.HasAvailableFlags() { + buf.WriteString("Options\n") + buf.WriteString("~~~~~~~\n\n::\n\n") + flags.PrintDefaults() + buf.WriteString("\n") + } + + parentFlags := cmd.InheritedFlags() + parentFlags.SetOutput(buf) + if parentFlags.HasAvailableFlags() { + buf.WriteString("Options inherited from parent commands\n") + buf.WriteString("~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n\n::\n\n") + parentFlags.PrintDefaults() + buf.WriteString("\n") + } + return nil +} + +// linkHandler for default ReST hyperlink markup +func defaultLinkHandler(name, ref string) string { + return fmt.Sprintf("`%s <%s.rst>`_", name, ref) +} + +// GenReST creates reStructured Text output. +func GenReST(cmd *cobra.Command, w io.Writer) error { + return GenReSTCustom(cmd, w, defaultLinkHandler) +} + +// GenReSTCustom creates custom reStructured Text output. +func GenReSTCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string, string) string) error { + cmd.InitDefaultHelpCmd() + cmd.InitDefaultHelpFlag() + + buf := new(bytes.Buffer) + name := cmd.CommandPath() + + short := cmd.Short + long := cmd.Long + if len(long) == 0 { + long = short + } + ref := strings.Replace(name, " ", "_", -1) + + buf.WriteString(".. _" + ref + ":\n\n") + buf.WriteString(name + "\n") + buf.WriteString(strings.Repeat("-", len(name)) + "\n\n") + buf.WriteString(short + "\n\n") + buf.WriteString("Synopsis\n") + buf.WriteString("~~~~~~~~\n\n") + buf.WriteString("\n" + long + "\n\n") + + if cmd.Runnable() { + buf.WriteString(fmt.Sprintf("::\n\n %s\n\n", cmd.UseLine())) + } + + if len(cmd.Example) > 0 { + buf.WriteString("Examples\n") + buf.WriteString("~~~~~~~~\n\n") + buf.WriteString(fmt.Sprintf("::\n\n%s\n\n", indentString(cmd.Example, " "))) + } + + if err := printOptionsReST(buf, cmd, name); err != nil { + return err + } + if hasSeeAlso(cmd) { + buf.WriteString("SEE ALSO\n") + buf.WriteString("~~~~~~~~\n\n") + if cmd.HasParent() { + parent := cmd.Parent() + pname := parent.CommandPath() + ref = strings.Replace(pname, " ", "_", -1) + buf.WriteString(fmt.Sprintf("* %s \t - %s\n", linkHandler(pname, ref), parent.Short)) + cmd.VisitParents(func(c *cobra.Command) { + if c.DisableAutoGenTag { + cmd.DisableAutoGenTag = c.DisableAutoGenTag + } + }) + } + + children := cmd.Commands() + sort.Sort(byName(children)) + + for _, child := range children { + if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() { + continue + } + cname := name + " " + child.Name() + ref = strings.Replace(cname, " ", "_", -1) + buf.WriteString(fmt.Sprintf("* %s \t - %s\n", linkHandler(cname, ref), child.Short)) + } + buf.WriteString("\n") + } + if !cmd.DisableAutoGenTag { + buf.WriteString("*Auto generated by spf13/cobra on " + time.Now().Format("2-Jan-2006") + "*\n") + } + _, err := buf.WriteTo(w) + return err +} + +// GenReSTTree will generate a ReST page for this command and all +// descendants in the directory given. +// This function may not work correctly if your command names have `-` in them. +// If you have `cmd` with two subcmds, `sub` and `sub-third`, +// and `sub` has a subcommand called `third`, it is undefined which +// help output will be in the file `cmd-sub-third.1`. +func GenReSTTree(cmd *cobra.Command, dir string) error { + emptyStr := func(s string) string { return "" } + return GenReSTTreeCustom(cmd, dir, emptyStr, defaultLinkHandler) +} + +// GenReSTTreeCustom is the the same as GenReSTTree, but +// with custom filePrepender and linkHandler. +func GenReSTTreeCustom(cmd *cobra.Command, dir string, filePrepender func(string) string, linkHandler func(string, string) string) error { + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + if err := GenReSTTreeCustom(c, dir, filePrepender, linkHandler); err != nil { + return err + } + } + + basename := strings.Replace(cmd.CommandPath(), " ", "_", -1) + ".rst" + filename := filepath.Join(dir, basename) + f, err := os.Create(filename) + if err != nil { + return err + } + defer f.Close() + + if _, err := io.WriteString(f, filePrepender(filename)); err != nil { + return err + } + if err := GenReSTCustom(cmd, f, linkHandler); err != nil { + return err + } + return nil +} + +// adapted from: https://github.com/kr/text/blob/main/indent.go +func indentString(s, p string) string { + var res []byte + b := []byte(s) + prefix := []byte(p) + bol := true + for _, c := range b { + if bol && c != '\n' { + res = append(res, prefix...) + } + res = append(res, c) + bol = c == '\n' + } + return string(res) +} diff --git a/vendor/github.com/spf13/cobra/doc/rest_docs.md b/vendor/github.com/spf13/cobra/doc/rest_docs.md new file mode 100644 index 000000000..6098430ef --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/rest_docs.md @@ -0,0 +1,114 @@ +# Generating ReStructured Text Docs For Your Own cobra.Command + +Generating ReST pages from a cobra command is incredibly easy. An example is as follows: + +```go +package main + +import ( + "log" + + "github.com/spf13/cobra" + "github.com/spf13/cobra/doc" +) + +func main() { + cmd := &cobra.Command{ + Use: "test", + Short: "my test program", + } + err := doc.GenReSTTree(cmd, "/tmp") + if err != nil { + log.Fatal(err) + } +} +``` + +That will get you a ReST document `/tmp/test.rst` + +## Generate ReST docs for the entire command tree + +This program can actually generate docs for the kubectl command in the kubernetes project + +```go +package main + +import ( + "log" + "io/ioutil" + "os" + + "k8s.io/kubernetes/pkg/kubectl/cmd" + cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" + + "github.com/spf13/cobra/doc" +) + +func main() { + kubectl := cmd.NewKubectlCommand(cmdutil.NewFactory(nil), os.Stdin, ioutil.Discard, ioutil.Discard) + err := doc.GenReSTTree(kubectl, "./") + if err != nil { + log.Fatal(err) + } +} +``` + +This will generate a whole series of files, one for each command in the tree, in the directory specified (in this case "./") + +## Generate ReST docs for a single command + +You may wish to have more control over the output, or only generate for a single command, instead of the entire command tree. If this is the case you may prefer to `GenReST` instead of `GenReSTTree` + +```go + out := new(bytes.Buffer) + err := doc.GenReST(cmd, out) + if err != nil { + log.Fatal(err) + } +``` + +This will write the ReST doc for ONLY "cmd" into the out, buffer. + +## Customize the output + +Both `GenReST` and `GenReSTTree` have alternate versions with callbacks to get some control of the output: + +```go +func GenReSTTreeCustom(cmd *Command, dir string, filePrepender func(string) string, linkHandler func(string, string) string) error { + //... +} +``` + +```go +func GenReSTCustom(cmd *Command, out *bytes.Buffer, linkHandler func(string, string) string) error { + //... +} +``` + +The `filePrepender` will prepend the return value given the full filepath to the rendered ReST file. A common use case is to add front matter to use the generated documentation with [Hugo](http://gohugo.io/): + +```go +const fmTemplate = `--- +date: %s +title: "%s" +slug: %s +url: %s +--- +` +filePrepender := func(filename string) string { + now := time.Now().Format(time.RFC3339) + name := filepath.Base(filename) + base := strings.TrimSuffix(name, path.Ext(name)) + url := "/commands/" + strings.ToLower(base) + "/" + return fmt.Sprintf(fmTemplate, now, strings.Replace(base, "_", " ", -1), base, url) +} +``` + +The `linkHandler` can be used to customize the rendered links to the commands, given a command name and reference. This is useful while converting rst to html or while generating documentation with tools like Sphinx where `:ref:` is used: + +```go +// Sphinx cross-referencing format +linkHandler := func(name, ref string) string { + return fmt.Sprintf(":ref:`%s <%s>`", name, ref) +} +``` diff --git a/vendor/github.com/spf13/cobra/doc/util.go b/vendor/github.com/spf13/cobra/doc/util.go new file mode 100644 index 000000000..bffde94d5 --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/util.go @@ -0,0 +1,51 @@ +// Copyright 2015 Red Hat Inc. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doc + +import ( + "strings" + + "github.com/spf13/cobra" +) + +// Test to see if we have a reason to print See Also information in docs +// Basically this is a test for a parent command or a subcommand which is +// both not deprecated and not the autogenerated help command. +func hasSeeAlso(cmd *cobra.Command) bool { + if cmd.HasParent() { + return true + } + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + return true + } + return false +} + +// Temporary workaround for yaml lib generating incorrect yaml with long strings +// that do not contain \n. +func forceMultiLine(s string) string { + if len(s) > 60 && !strings.Contains(s, "\n") { + s = s + "\n" + } + return s +} + +type byName []*cobra.Command + +func (s byName) Len() int { return len(s) } +func (s byName) Swap(i, j int) { s[i], s[j] = s[j], s[i] } +func (s byName) Less(i, j int) bool { return s[i].Name() < s[j].Name() } diff --git a/vendor/github.com/spf13/cobra/doc/yaml_docs.go b/vendor/github.com/spf13/cobra/doc/yaml_docs.go new file mode 100644 index 000000000..96e6ad721 --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/yaml_docs.go @@ -0,0 +1,174 @@ +// Copyright 2016 French Ben. All rights reserved. +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package doc + +import ( + "fmt" + "io" + "os" + "path/filepath" + "sort" + "strings" + + "github.com/spf13/cobra" + "github.com/spf13/pflag" + "gopkg.in/yaml.v2" +) + +type cmdOption struct { + Name string + Shorthand string `yaml:",omitempty"` + DefaultValue string `yaml:"default_value,omitempty"` + Usage string `yaml:",omitempty"` +} + +type cmdDoc struct { + Name string + Synopsis string `yaml:",omitempty"` + Description string `yaml:",omitempty"` + Usage string `yaml:",omitempty"` + Options []cmdOption `yaml:",omitempty"` + InheritedOptions []cmdOption `yaml:"inherited_options,omitempty"` + Example string `yaml:",omitempty"` + SeeAlso []string `yaml:"see_also,omitempty"` +} + +// GenYamlTree creates yaml structured ref files for this command and all descendants +// in the directory given. This function may not work +// correctly if your command names have `-` in them. If you have `cmd` with two +// subcmds, `sub` and `sub-third`, and `sub` has a subcommand called `third` +// it is undefined which help output will be in the file `cmd-sub-third.1`. +func GenYamlTree(cmd *cobra.Command, dir string) error { + identity := func(s string) string { return s } + emptyStr := func(s string) string { return "" } + return GenYamlTreeCustom(cmd, dir, emptyStr, identity) +} + +// GenYamlTreeCustom creates yaml structured ref files. +func GenYamlTreeCustom(cmd *cobra.Command, dir string, filePrepender, linkHandler func(string) string) error { + for _, c := range cmd.Commands() { + if !c.IsAvailableCommand() || c.IsAdditionalHelpTopicCommand() { + continue + } + if err := GenYamlTreeCustom(c, dir, filePrepender, linkHandler); err != nil { + return err + } + } + + basename := strings.Replace(cmd.CommandPath(), " ", "_", -1) + ".yaml" + filename := filepath.Join(dir, basename) + f, err := os.Create(filename) + if err != nil { + return err + } + defer f.Close() + + if _, err := io.WriteString(f, filePrepender(filename)); err != nil { + return err + } + if err := GenYamlCustom(cmd, f, linkHandler); err != nil { + return err + } + return nil +} + +// GenYaml creates yaml output. +func GenYaml(cmd *cobra.Command, w io.Writer) error { + return GenYamlCustom(cmd, w, func(s string) string { return s }) +} + +// GenYamlCustom creates custom yaml output. +func GenYamlCustom(cmd *cobra.Command, w io.Writer, linkHandler func(string) string) error { + cmd.InitDefaultHelpCmd() + cmd.InitDefaultHelpFlag() + + yamlDoc := cmdDoc{} + yamlDoc.Name = cmd.CommandPath() + + yamlDoc.Synopsis = forceMultiLine(cmd.Short) + yamlDoc.Description = forceMultiLine(cmd.Long) + + if cmd.Runnable() { + yamlDoc.Usage = cmd.UseLine() + } + + if len(cmd.Example) > 0 { + yamlDoc.Example = cmd.Example + } + + flags := cmd.NonInheritedFlags() + if flags.HasFlags() { + yamlDoc.Options = genFlagResult(flags) + } + flags = cmd.InheritedFlags() + if flags.HasFlags() { + yamlDoc.InheritedOptions = genFlagResult(flags) + } + + if hasSeeAlso(cmd) { + result := []string{} + if cmd.HasParent() { + parent := cmd.Parent() + result = append(result, parent.CommandPath()+" - "+parent.Short) + } + children := cmd.Commands() + sort.Sort(byName(children)) + for _, child := range children { + if !child.IsAvailableCommand() || child.IsAdditionalHelpTopicCommand() { + continue + } + result = append(result, child.Name()+" - "+child.Short) + } + yamlDoc.SeeAlso = result + } + + final, err := yaml.Marshal(&yamlDoc) + if err != nil { + fmt.Println(err) + os.Exit(1) + } + + if _, err := w.Write(final); err != nil { + return err + } + return nil +} + +func genFlagResult(flags *pflag.FlagSet) []cmdOption { + var result []cmdOption + + flags.VisitAll(func(flag *pflag.Flag) { + // Todo, when we mark a shorthand is deprecated, but specify an empty message. + // The flag.ShorthandDeprecated is empty as the shorthand is deprecated. + // Using len(flag.ShorthandDeprecated) > 0 can't handle this, others are ok. + if !(len(flag.ShorthandDeprecated) > 0) && len(flag.Shorthand) > 0 { + opt := cmdOption{ + flag.Name, + flag.Shorthand, + flag.DefValue, + forceMultiLine(flag.Usage), + } + result = append(result, opt) + } else { + opt := cmdOption{ + Name: flag.Name, + DefaultValue: forceMultiLine(flag.DefValue), + Usage: forceMultiLine(flag.Usage), + } + result = append(result, opt) + } + }) + + return result +} diff --git a/vendor/github.com/spf13/cobra/doc/yaml_docs.md b/vendor/github.com/spf13/cobra/doc/yaml_docs.md new file mode 100644 index 000000000..1a9b7c6a3 --- /dev/null +++ b/vendor/github.com/spf13/cobra/doc/yaml_docs.md @@ -0,0 +1,112 @@ +# Generating Yaml Docs For Your Own cobra.Command + +Generating yaml files from a cobra command is incredibly easy. An example is as follows: + +```go +package main + +import ( + "log" + + "github.com/spf13/cobra" + "github.com/spf13/cobra/doc" +) + +func main() { + cmd := &cobra.Command{ + Use: "test", + Short: "my test program", + } + err := doc.GenYamlTree(cmd, "/tmp") + if err != nil { + log.Fatal(err) + } +} +``` + +That will get you a Yaml document `/tmp/test.yaml` + +## Generate yaml docs for the entire command tree + +This program can actually generate docs for the kubectl command in the kubernetes project + +```go +package main + +import ( + "io/ioutil" + "log" + "os" + + "k8s.io/kubernetes/pkg/kubectl/cmd" + cmdutil "k8s.io/kubernetes/pkg/kubectl/cmd/util" + + "github.com/spf13/cobra/doc" +) + +func main() { + kubectl := cmd.NewKubectlCommand(cmdutil.NewFactory(nil), os.Stdin, ioutil.Discard, ioutil.Discard) + err := doc.GenYamlTree(kubectl, "./") + if err != nil { + log.Fatal(err) + } +} +``` + +This will generate a whole series of files, one for each command in the tree, in the directory specified (in this case "./") + +## Generate yaml docs for a single command + +You may wish to have more control over the output, or only generate for a single command, instead of the entire command tree. If this is the case you may prefer to `GenYaml` instead of `GenYamlTree` + +```go + out := new(bytes.Buffer) + doc.GenYaml(cmd, out) +``` + +This will write the yaml doc for ONLY "cmd" into the out, buffer. + +## Customize the output + +Both `GenYaml` and `GenYamlTree` have alternate versions with callbacks to get some control of the output: + +```go +func GenYamlTreeCustom(cmd *Command, dir string, filePrepender, linkHandler func(string) string) error { + //... +} +``` + +```go +func GenYamlCustom(cmd *Command, out *bytes.Buffer, linkHandler func(string) string) error { + //... +} +``` + +The `filePrepender` will prepend the return value given the full filepath to the rendered Yaml file. A common use case is to add front matter to use the generated documentation with [Hugo](http://gohugo.io/): + +```go +const fmTemplate = `--- +date: %s +title: "%s" +slug: %s +url: %s +--- +` + +filePrepender := func(filename string) string { + now := time.Now().Format(time.RFC3339) + name := filepath.Base(filename) + base := strings.TrimSuffix(name, path.Ext(name)) + url := "/commands/" + strings.ToLower(base) + "/" + return fmt.Sprintf(fmTemplate, now, strings.Replace(base, "_", " ", -1), base, url) +} +``` + +The `linkHandler` can be used to customize the rendered internal links to the commands, given a filename: + +```go +linkHandler := func(name string) string { + base := strings.TrimSuffix(name, path.Ext(name)) + return "/commands/" + strings.ToLower(base) + "/" +} +``` diff --git a/vendor/modules.txt b/vendor/modules.txt index 9787675a3..8bb2a7ac0 100644 --- a/vendor/modules.txt +++ b/vendor/modules.txt @@ -51,6 +51,8 @@ github.com/briandowns/spinner ## explicit github.com/charmbracelet/glamour github.com/charmbracelet/glamour/ansi +# github.com/cpuguy83/go-md2man/v2 v2.0.0 +github.com/cpuguy83/go-md2man/v2/md2man # github.com/danieljoos/wincred v1.1.0 github.com/danieljoos/wincred # github.com/danwakefield/fnmatch v0.0.0-20160403171240-cbb64ac3d964 @@ -123,6 +125,9 @@ github.com/hashicorp/hcl/json/scanner github.com/hashicorp/hcl/json/token # github.com/inconshreveable/mousetrap v1.0.0 github.com/inconshreveable/mousetrap +# github.com/joeshaw/envdecode v0.0.0-20200121155833-099f1fc765bd +## explicit +github.com/joeshaw/envdecode # github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 ## explicit github.com/kballard/go-shellquote @@ -216,6 +221,10 @@ github.com/pmezard/go-difflib/difflib # github.com/rivo/uniseg v0.2.0 ## explicit github.com/rivo/uniseg +# github.com/russross/blackfriday/v2 v2.0.1 +github.com/russross/blackfriday/v2 +# github.com/shurcooL/sanitized_anchor_name v1.0.0 +github.com/shurcooL/sanitized_anchor_name # github.com/spf13/afero v1.1.2 github.com/spf13/afero github.com/spf13/afero/mem @@ -224,6 +233,7 @@ github.com/spf13/cast # github.com/spf13/cobra v1.1.3 ## explicit github.com/spf13/cobra +github.com/spf13/cobra/doc # github.com/spf13/jwalterweatherman v1.0.0 github.com/spf13/jwalterweatherman # github.com/spf13/pflag v1.0.5