update chroma to v0.8.0 (#12337)
This commit is contained in:
parent
4315e313d1
commit
bfb25e4be1
19 changed files with 469 additions and 97 deletions
2
go.mod
2
go.mod
|
@ -20,7 +20,7 @@ require (
|
||||||
github.com/BurntSushi/toml v0.3.1
|
github.com/BurntSushi/toml v0.3.1
|
||||||
github.com/PuerkitoBio/goquery v1.5.1
|
github.com/PuerkitoBio/goquery v1.5.1
|
||||||
github.com/RoaringBitmap/roaring v0.4.23 // indirect
|
github.com/RoaringBitmap/roaring v0.4.23 // indirect
|
||||||
github.com/alecthomas/chroma v0.7.3
|
github.com/alecthomas/chroma v0.8.0
|
||||||
github.com/bgentry/speakeasy v0.1.0 // indirect
|
github.com/bgentry/speakeasy v0.1.0 // indirect
|
||||||
github.com/blevesearch/bleve v1.0.7
|
github.com/blevesearch/bleve v1.0.7
|
||||||
github.com/couchbase/gomemcached v0.0.0-20191004160342-7b5da2ec40b2 // indirect
|
github.com/couchbase/gomemcached v0.0.0-20191004160342-7b5da2ec40b2 // indirect
|
||||||
|
|
4
go.sum
4
go.sum
|
@ -69,8 +69,8 @@ github.com/alcortesm/tgz v0.0.0-20161220082320-9c5fe88206d7/go.mod h1:6zEj6s6u/g
|
||||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38 h1:smF2tmSOzy2Mm+0dGI2AIUHY+w0BUc+4tn40djz7+6U=
|
||||||
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
github.com/alecthomas/assert v0.0.0-20170929043011-405dbfeb8e38/go.mod h1:r7bzyVFMNntcxPZXK3/+KdruV1H5KSlyVY0gc+NgInI=
|
||||||
github.com/alecthomas/chroma v0.7.2-0.20200305040604-4f3623dce67a/go.mod h1:fv5SzZPFJbwp2NXJWpFIX7DZS4HgV1K4ew4Pc2OZD9s=
|
github.com/alecthomas/chroma v0.7.2-0.20200305040604-4f3623dce67a/go.mod h1:fv5SzZPFJbwp2NXJWpFIX7DZS4HgV1K4ew4Pc2OZD9s=
|
||||||
github.com/alecthomas/chroma v0.7.3 h1:NfdAERMy+esYQs8OXk0I868/qDxxCEo7FMz1WIqMAeI=
|
github.com/alecthomas/chroma v0.8.0 h1:HS+HE97sgcqjQGu5uVr8jIE55Mmh5UeQ7kckAhHg2pY=
|
||||||
github.com/alecthomas/chroma v0.7.3/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
|
github.com/alecthomas/chroma v0.8.0/go.mod h1:sko8vR34/90zvl5QdcUdvzL3J8NKjAUx9va9jPuFNoM=
|
||||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721 h1:JHZL0hZKJ1VENNfmXvHbgYlbUOvpzYzvy2aZU5gXVeo=
|
||||||
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
github.com/alecthomas/colour v0.0.0-20160524082231-60882d9e2721/go.mod h1:QO9JBoKquHd+jz9nshCh40fOfO+JzsoXy8qTHF68zU0=
|
||||||
github.com/alecthomas/kong v0.1.17-0.20190424132513-439c674f7ae0/go.mod h1:+inYUSluD+p4L8KdviBSgzcqEjUQOfC5fQDRFuc36lI=
|
github.com/alecthomas/kong v0.1.17-0.20190424132513-439c674f7ae0/go.mod h1:+inYUSluD+p4L8KdviBSgzcqEjUQOfC5fQDRFuc36lI=
|
||||||
|
|
5
vendor/github.com/alecthomas/chroma/.golangci.yml
generated
vendored
5
vendor/github.com/alecthomas/chroma/.golangci.yml
generated
vendored
|
@ -20,6 +20,11 @@ linters:
|
||||||
- wsl
|
- wsl
|
||||||
- gomnd
|
- gomnd
|
||||||
- gocognit
|
- gocognit
|
||||||
|
- goerr113
|
||||||
|
- nolintlint
|
||||||
|
- testpackage
|
||||||
|
- godot
|
||||||
|
- nestif
|
||||||
|
|
||||||
linters-settings:
|
linters-settings:
|
||||||
govet:
|
govet:
|
||||||
|
|
2
vendor/github.com/alecthomas/chroma/.travis.yml
generated
vendored
2
vendor/github.com/alecthomas/chroma/.travis.yml
generated
vendored
|
@ -4,7 +4,7 @@ go:
|
||||||
- "1.13.x"
|
- "1.13.x"
|
||||||
script:
|
script:
|
||||||
- go test -v ./...
|
- go test -v ./...
|
||||||
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.22.2
|
- curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | bash -s v1.26.0
|
||||||
- ./bin/golangci-lint run
|
- ./bin/golangci-lint run
|
||||||
- git clean -fdx .
|
- git clean -fdx .
|
||||||
after_success:
|
after_success:
|
||||||
|
|
2
vendor/github.com/alecthomas/chroma/lexers/b/bash.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/lexers/b/bash.go
generated
vendored
|
@ -36,7 +36,7 @@ var Bash = internal.Register(MustNewLexer(
|
||||||
{`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil},
|
{`\b(if|fi|else|while|do|done|for|then|return|function|case|select|continue|until|esac|elif)(\s*)\b`, ByGroups(Keyword, Text), nil},
|
||||||
{"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil},
|
{"\\b(alias|bg|bind|break|builtin|caller|cd|command|compgen|complete|declare|dirs|disown|echo|enable|eval|exec|exit|export|false|fc|fg|getopts|hash|help|history|jobs|kill|let|local|logout|popd|printf|pushd|pwd|read|readonly|set|shift|shopt|source|suspend|test|time|times|trap|true|type|typeset|ulimit|umask|unalias|unset|wait)(?=[\\s)`])", NameBuiltin, nil},
|
||||||
{`\A#!.+\n`, CommentPreproc, nil},
|
{`\A#!.+\n`, CommentPreproc, nil},
|
||||||
{`#.*\S`, CommentSingle, nil},
|
{`#.*(\S|$)`, CommentSingle, nil},
|
||||||
{`\\[\w\W]`, LiteralStringEscape, nil},
|
{`\\[\w\W]`, LiteralStringEscape, nil},
|
||||||
{`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil},
|
{`(\b\w+)(\s*)(\+?=)`, ByGroups(NameVariable, Text, Operator), nil},
|
||||||
{`[\[\]{}()=]`, Operator, nil},
|
{`[\[\]{}()=]`, Operator, nil},
|
||||||
|
|
206
vendor/github.com/alecthomas/chroma/lexers/c/caddyfile.go
generated
vendored
Normal file
206
vendor/github.com/alecthomas/chroma/lexers/c/caddyfile.go
generated
vendored
Normal file
|
@ -0,0 +1,206 @@
|
||||||
|
package c
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// caddyfileCommon are the rules common to both of the lexer variants
|
||||||
|
var caddyfileCommon = Rules{
|
||||||
|
"site_block_common": {
|
||||||
|
// Import keyword
|
||||||
|
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
|
||||||
|
// Matcher definition
|
||||||
|
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
||||||
|
// Matcher token stub for docs
|
||||||
|
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
|
||||||
|
// These cannot have matchers but may have things that look like
|
||||||
|
// matchers in their arguments, so we just parse as a subdirective.
|
||||||
|
{`try_files`, Keyword, Push("subdirective")},
|
||||||
|
// These are special, they can nest more directives
|
||||||
|
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
|
||||||
|
// Any other directive
|
||||||
|
{`[^\s#]+`, Keyword, Push("directive")},
|
||||||
|
Include("base"),
|
||||||
|
},
|
||||||
|
"matcher": {
|
||||||
|
{`\{`, Punctuation, Push("block")},
|
||||||
|
// Not can be one-liner
|
||||||
|
{`not`, Keyword, Push("deep_not_matcher")},
|
||||||
|
// Any other same-line matcher
|
||||||
|
{`[^\s#]+`, Keyword, Push("arguments")},
|
||||||
|
// Terminators
|
||||||
|
{`\n`, Text, Pop(1)},
|
||||||
|
{`\}`, Punctuation, Pop(1)},
|
||||||
|
Include("base"),
|
||||||
|
},
|
||||||
|
"block": {
|
||||||
|
{`\}`, Punctuation, Pop(2)},
|
||||||
|
// Not can be one-liner
|
||||||
|
{`not`, Keyword, Push("not_matcher")},
|
||||||
|
// Any other subdirective
|
||||||
|
{`[^\s#]+`, Keyword, Push("subdirective")},
|
||||||
|
Include("base"),
|
||||||
|
},
|
||||||
|
"nested_block": {
|
||||||
|
{`\}`, Punctuation, Pop(2)},
|
||||||
|
// Matcher definition
|
||||||
|
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
||||||
|
// Something that starts with literally < is probably a docs stub
|
||||||
|
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
|
||||||
|
// Any other directive
|
||||||
|
{`[^\s#]+`, Keyword, Push("nested_directive")},
|
||||||
|
Include("base"),
|
||||||
|
},
|
||||||
|
"not_matcher": {
|
||||||
|
{`\}`, Punctuation, Pop(2)},
|
||||||
|
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||||
|
{`[^\s#]+`, Keyword, Push("arguments")},
|
||||||
|
{`\s+`, Text, nil},
|
||||||
|
},
|
||||||
|
"deep_not_matcher": {
|
||||||
|
{`\}`, Punctuation, Pop(2)},
|
||||||
|
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||||
|
{`[^\s#]+`, Keyword, Push("deep_subdirective")},
|
||||||
|
{`\s+`, Text, nil},
|
||||||
|
},
|
||||||
|
"directive": {
|
||||||
|
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||||
|
Include("matcher_token"),
|
||||||
|
Include("comments_pop_1"),
|
||||||
|
{`\n`, Text, Pop(1)},
|
||||||
|
Include("base"),
|
||||||
|
},
|
||||||
|
"nested_directive": {
|
||||||
|
{`\{(?=\s)`, Punctuation, Push("nested_block")},
|
||||||
|
Include("matcher_token"),
|
||||||
|
Include("comments_pop_1"),
|
||||||
|
{`\n`, Text, Pop(1)},
|
||||||
|
Include("base"),
|
||||||
|
},
|
||||||
|
"subdirective": {
|
||||||
|
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||||
|
Include("comments_pop_1"),
|
||||||
|
{`\n`, Text, Pop(1)},
|
||||||
|
Include("base"),
|
||||||
|
},
|
||||||
|
"arguments": {
|
||||||
|
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||||
|
Include("comments_pop_2"),
|
||||||
|
{`\\\n`, Text, nil}, // Skip escaped newlines
|
||||||
|
{`\n`, Text, Pop(2)},
|
||||||
|
Include("base"),
|
||||||
|
},
|
||||||
|
"deep_subdirective": {
|
||||||
|
{`\{(?=\s)`, Punctuation, Push("block")},
|
||||||
|
Include("comments_pop_3"),
|
||||||
|
{`\n`, Text, Pop(3)},
|
||||||
|
Include("base"),
|
||||||
|
},
|
||||||
|
"matcher_token": {
|
||||||
|
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
|
||||||
|
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
|
||||||
|
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
|
||||||
|
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
|
||||||
|
},
|
||||||
|
"comments": {
|
||||||
|
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
|
||||||
|
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
|
||||||
|
},
|
||||||
|
"comments_pop_1": {
|
||||||
|
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
|
||||||
|
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
|
||||||
|
},
|
||||||
|
"comments_pop_2": {
|
||||||
|
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
|
||||||
|
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
|
||||||
|
},
|
||||||
|
"comments_pop_3": {
|
||||||
|
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
|
||||||
|
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
|
||||||
|
},
|
||||||
|
"base": {
|
||||||
|
Include("comments"),
|
||||||
|
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
|
||||||
|
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
|
||||||
|
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
|
||||||
|
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
|
||||||
|
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
|
||||||
|
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
|
||||||
|
{`\]|\|`, Punctuation, nil},
|
||||||
|
{`[^\s#{}$\]]+`, LiteralString, nil},
|
||||||
|
{`/[^\s#]*`, Name, nil},
|
||||||
|
{`\s+`, Text, nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
// Caddyfile lexer.
|
||||||
|
var Caddyfile = internal.Register(MustNewLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "Caddyfile",
|
||||||
|
Aliases: []string{"caddyfile", "caddy"},
|
||||||
|
Filenames: []string{"Caddyfile*"},
|
||||||
|
MimeTypes: []string{},
|
||||||
|
},
|
||||||
|
Rules{
|
||||||
|
"root": {
|
||||||
|
Include("comments"),
|
||||||
|
// Global options block
|
||||||
|
{`^\s*(\{)\s*$`, ByGroups(Punctuation), Push("globals")},
|
||||||
|
// Snippets
|
||||||
|
{`(\([^\s#]+\))(\s*)(\{)`, ByGroups(NameVariableAnonymous, Text, Punctuation), Push("snippet")},
|
||||||
|
// Site label
|
||||||
|
{`[^#{(\s,]+`, GenericHeading, Push("label")},
|
||||||
|
// Site label with placeholder
|
||||||
|
{`\{[\w+.\$-]+\}`, LiteralStringEscape, Push("label")},
|
||||||
|
{`\s+`, Text, nil},
|
||||||
|
},
|
||||||
|
"globals": {
|
||||||
|
{`\}`, Punctuation, Pop(1)},
|
||||||
|
{`[^\s#]+`, Keyword, Push("directive")},
|
||||||
|
Include("base"),
|
||||||
|
},
|
||||||
|
"snippet": {
|
||||||
|
{`\}`, Punctuation, Pop(1)},
|
||||||
|
// Matcher definition
|
||||||
|
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
|
||||||
|
// Any directive
|
||||||
|
{`[^\s#]+`, Keyword, Push("directive")},
|
||||||
|
Include("base"),
|
||||||
|
},
|
||||||
|
"label": {
|
||||||
|
// Allow multiple labels, comma separated, newlines after
|
||||||
|
// a comma means another label is coming
|
||||||
|
{`,\s*\n?`, Text, nil},
|
||||||
|
{` `, Text, nil},
|
||||||
|
// Site label with placeholder
|
||||||
|
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil},
|
||||||
|
// Site label
|
||||||
|
{`[^#{(\s,]+`, GenericHeading, nil},
|
||||||
|
// Comment after non-block label (hack because comments end in \n)
|
||||||
|
{`#.*\n`, CommentSingle, Push("site_block")},
|
||||||
|
// Note: if \n, we'll never pop out of the site_block, it's valid
|
||||||
|
{`\{(?=\s)|\n`, Punctuation, Push("site_block")},
|
||||||
|
},
|
||||||
|
"site_block": {
|
||||||
|
{`\}`, Punctuation, Pop(2)},
|
||||||
|
Include("site_block_common"),
|
||||||
|
},
|
||||||
|
}.Merge(caddyfileCommon),
|
||||||
|
))
|
||||||
|
|
||||||
|
// Caddyfile directive-only lexer.
|
||||||
|
var CaddyfileDirectives = internal.Register(MustNewLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "Caddyfile Directives",
|
||||||
|
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
|
||||||
|
Filenames: []string{},
|
||||||
|
MimeTypes: []string{},
|
||||||
|
},
|
||||||
|
Rules{
|
||||||
|
// Same as "site_block" in Caddyfile
|
||||||
|
"root": {
|
||||||
|
Include("site_block_common"),
|
||||||
|
},
|
||||||
|
}.Merge(caddyfileCommon),
|
||||||
|
))
|
137
vendor/github.com/alecthomas/chroma/lexers/circular/php.go
generated
vendored
137
vendor/github.com/alecthomas/chroma/lexers/circular/php.go
generated
vendored
|
@ -1,15 +1,12 @@
|
||||||
package circular
|
package circular
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"strings"
|
|
||||||
|
|
||||||
. "github.com/alecthomas/chroma" // nolint
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
"github.com/alecthomas/chroma/lexers/h"
|
|
||||||
"github.com/alecthomas/chroma/lexers/internal"
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
)
|
)
|
||||||
|
|
||||||
// PHP lexer.
|
// PHP lexer for pure PHP code (not embedded in HTML).
|
||||||
var PHP = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
var PHP = internal.Register(MustNewLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "PHP",
|
Name: "PHP",
|
||||||
Aliases: []string{"php", "php3", "php4", "php5"},
|
Aliases: []string{"php", "php3", "php4", "php5"},
|
||||||
|
@ -19,73 +16,65 @@ var PHP = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
||||||
CaseInsensitive: true,
|
CaseInsensitive: true,
|
||||||
EnsureNL: true,
|
EnsureNL: true,
|
||||||
},
|
},
|
||||||
Rules{
|
phpCommonRules.Rename("php", "root"),
|
||||||
"root": {
|
))
|
||||||
{`<\?(php)?`, CommentPreproc, Push("php")},
|
|
||||||
{`[^<]+`, Other, nil},
|
var phpCommonRules = Rules{
|
||||||
{`<`, Other, nil},
|
"php": {
|
||||||
},
|
{`\?>`, CommentPreproc, Pop(1)},
|
||||||
"php": {
|
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil},
|
||||||
{`\?>`, CommentPreproc, Pop(1)},
|
{`\s+`, Text, nil},
|
||||||
{`(<<<)([\'"]?)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)(\2\n.*?\n\s*)(\3)(;?)(\n)`, ByGroups(LiteralString, LiteralString, LiteralStringDelimiter, LiteralString, LiteralStringDelimiter, Punctuation, Text), nil},
|
{`#.*?\n`, CommentSingle, nil},
|
||||||
{`\s+`, Text, nil},
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
{`#.*?\n`, CommentSingle, nil},
|
{`/\*\*/`, CommentMultiline, nil},
|
||||||
{`//.*?\n`, CommentSingle, nil},
|
{`/\*\*.*?\*/`, LiteralStringDoc, nil},
|
||||||
{`/\*\*/`, CommentMultiline, nil},
|
{`/\*.*?\*/`, CommentMultiline, nil},
|
||||||
{`/\*\*.*?\*/`, LiteralStringDoc, nil},
|
{`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil},
|
||||||
{`/\*.*?\*/`, CommentMultiline, nil},
|
{`[~!%^&*+=|:.<>/@-]+`, Operator, nil},
|
||||||
{`(->|::)(\s*)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Operator, Text, NameAttribute), nil},
|
{`\?`, Operator, nil},
|
||||||
{`[~!%^&*+=|:.<>/@-]+`, Operator, nil},
|
{`[\[\]{}();,]+`, Punctuation, nil},
|
||||||
{`\?`, Operator, nil},
|
{`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")},
|
||||||
{`[\[\]{}();,]+`, Punctuation, nil},
|
{`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil},
|
||||||
{`(class)(\s+)`, ByGroups(Keyword, Text), Push("classname")},
|
{`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")},
|
||||||
{`(function)(\s*)(?=\()`, ByGroups(Keyword, Text), nil},
|
{`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil},
|
||||||
{`(function)(\s+)(&?)(\s*)`, ByGroups(Keyword, Text, Operator, Text), Push("functionname")},
|
{`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil},
|
||||||
{`(const)(\s+)((?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)`, ByGroups(Keyword, Text, NameConstant), nil},
|
{`(true|false|null)\b`, KeywordConstant, nil},
|
||||||
{`(and|E_PARSE|old_function|E_ERROR|or|as|E_WARNING|parent|eval|PHP_OS|break|exit|case|extends|PHP_VERSION|cfunction|FALSE|print|for|require|continue|foreach|require_once|declare|return|default|static|do|switch|die|stdClass|echo|else|TRUE|elseif|var|empty|if|xor|enddeclare|include|virtual|endfor|include_once|while|endforeach|global|endif|list|endswitch|new|endwhile|not|array|E_ALL|NULL|final|php_user_filter|interface|implements|public|private|protected|abstract|clone|try|catch|throw|this|use|namespace|trait|yield|finally)\b`, Keyword, nil},
|
Include("magicconstants"),
|
||||||
{`(true|false|null)\b`, KeywordConstant, nil},
|
{`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil},
|
||||||
Include("magicconstants"),
|
{`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil},
|
||||||
{`\$\{\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*\}`, NameVariable, nil},
|
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil},
|
||||||
{`\$+(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameVariable, nil},
|
{`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil},
|
||||||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameOther, nil},
|
{`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil},
|
||||||
{`(\d+\.\d*|\d*\.\d+)(e[+-]?[0-9]+)?`, LiteralNumberFloat, nil},
|
{`0[0-7]+`, LiteralNumberOct, nil},
|
||||||
{`\d+e[+-]?[0-9]+`, LiteralNumberFloat, nil},
|
{`0x[a-f0-9]+`, LiteralNumberHex, nil},
|
||||||
{`0[0-7]+`, LiteralNumberOct, nil},
|
{`\d+`, LiteralNumberInteger, nil},
|
||||||
{`0x[a-f0-9]+`, LiteralNumberHex, nil},
|
{`0b[01]+`, LiteralNumberBin, nil},
|
||||||
{`\d+`, LiteralNumberInteger, nil},
|
{`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil},
|
||||||
{`0b[01]+`, LiteralNumberBin, nil},
|
{"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil},
|
||||||
{`'([^'\\]*(?:\\.[^'\\]*)*)'`, LiteralStringSingle, nil},
|
{`"`, LiteralStringDouble, Push("string")},
|
||||||
{"`([^`\\\\]*(?:\\\\.[^`\\\\]*)*)`", LiteralStringBacktick, nil},
|
|
||||||
{`"`, LiteralStringDouble, Push("string")},
|
|
||||||
},
|
|
||||||
"magicfuncs": {
|
|
||||||
{Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil},
|
|
||||||
},
|
|
||||||
"magicconstants": {
|
|
||||||
{Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil},
|
|
||||||
},
|
|
||||||
"classname": {
|
|
||||||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)},
|
|
||||||
},
|
|
||||||
"functionname": {
|
|
||||||
Include("magicfuncs"),
|
|
||||||
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)},
|
|
||||||
Default(Pop(1)),
|
|
||||||
},
|
|
||||||
"string": {
|
|
||||||
{`"`, LiteralStringDouble, Pop(1)},
|
|
||||||
{`[^{$"\\]+`, LiteralStringDouble, nil},
|
|
||||||
{`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil},
|
|
||||||
{`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil},
|
|
||||||
{`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
|
|
||||||
{`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
|
|
||||||
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
|
|
||||||
{`[${\\]`, LiteralStringDouble, nil},
|
|
||||||
},
|
|
||||||
},
|
},
|
||||||
).SetAnalyser(func(text string) float32 {
|
"magicfuncs": {
|
||||||
if strings.Contains(text, "<?php") {
|
{Words(``, `\b`, `__construct`, `__destruct`, `__call`, `__callStatic`, `__get`, `__set`, `__isset`, `__unset`, `__sleep`, `__wakeup`, `__toString`, `__invoke`, `__set_state`, `__clone`, `__debugInfo`), NameFunctionMagic, nil},
|
||||||
return 0.5
|
},
|
||||||
}
|
"magicconstants": {
|
||||||
return 0.0
|
{Words(``, `\b`, `__LINE__`, `__FILE__`, `__DIR__`, `__FUNCTION__`, `__CLASS__`, `__TRAIT__`, `__METHOD__`, `__NAMESPACE__`), NameConstant, nil},
|
||||||
})))
|
},
|
||||||
|
"classname": {
|
||||||
|
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameClass, Pop(1)},
|
||||||
|
},
|
||||||
|
"functionname": {
|
||||||
|
Include("magicfuncs"),
|
||||||
|
{`(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*`, NameFunction, Pop(1)},
|
||||||
|
Default(Pop(1)),
|
||||||
|
},
|
||||||
|
"string": {
|
||||||
|
{`"`, LiteralStringDouble, Pop(1)},
|
||||||
|
{`[^{$"\\]+`, LiteralStringDouble, nil},
|
||||||
|
{`\\([nrt"$\\]|[0-7]{1,3}|x[0-9a-f]{1,2})`, LiteralStringEscape, nil},
|
||||||
|
{`\$(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*(\[\S+?\]|->(?:[\\_a-z]|[^\x00-\x7f])(?:[\\\w]|[^\x00-\x7f])*)?`, LiteralStringInterpol, nil},
|
||||||
|
{`(\{\$\{)(.*?)(\}\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
|
||||||
|
{`(\{)(\$.*?)(\})`, ByGroups(LiteralStringInterpol, UsingSelf("root"), LiteralStringInterpol), nil},
|
||||||
|
{`(\$\{)(\S+)(\})`, ByGroups(LiteralStringInterpol, NameVariable, LiteralStringInterpol), nil},
|
||||||
|
{`[${\\]`, LiteralStringDouble, nil},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
34
vendor/github.com/alecthomas/chroma/lexers/circular/phtml.go
generated
vendored
Normal file
34
vendor/github.com/alecthomas/chroma/lexers/circular/phtml.go
generated
vendored
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
package circular
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/h"
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// PHTML lexer is PHP in HTML.
|
||||||
|
var PHTML = internal.Register(DelegatingLexer(h.HTML, MustNewLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "PHTML",
|
||||||
|
Aliases: []string{"phtml"},
|
||||||
|
Filenames: []string{"*.phtml"},
|
||||||
|
MimeTypes: []string{"application/x-php", "application/x-httpd-php", "application/x-httpd-php3", "application/x-httpd-php4", "application/x-httpd-php5"},
|
||||||
|
DotAll: true,
|
||||||
|
CaseInsensitive: true,
|
||||||
|
EnsureNL: true,
|
||||||
|
},
|
||||||
|
Rules{
|
||||||
|
"root": {
|
||||||
|
{`<\?(php)?`, CommentPreproc, Push("php")},
|
||||||
|
{`[^<]+`, Other, nil},
|
||||||
|
{`<`, Other, nil},
|
||||||
|
},
|
||||||
|
}.Merge(phpCommonRules),
|
||||||
|
).SetAnalyser(func(text string) float32 {
|
||||||
|
if strings.Contains(text, "<?php") {
|
||||||
|
return 0.5
|
||||||
|
}
|
||||||
|
return 0.0
|
||||||
|
})))
|
1
vendor/github.com/alecthomas/chroma/lexers/g/go.go
generated
vendored
1
vendor/github.com/alecthomas/chroma/lexers/g/go.go
generated
vendored
|
@ -15,6 +15,7 @@ var Go = internal.Register(MustNewLexer(
|
||||||
Aliases: []string{"go", "golang"},
|
Aliases: []string{"go", "golang"},
|
||||||
Filenames: []string{"*.go"},
|
Filenames: []string{"*.go"},
|
||||||
MimeTypes: []string{"text/x-gosrc"},
|
MimeTypes: []string{"text/x-gosrc"},
|
||||||
|
EnsureNL: true,
|
||||||
},
|
},
|
||||||
Rules{
|
Rules{
|
||||||
"root": {
|
"root": {
|
||||||
|
|
4
vendor/github.com/alecthomas/chroma/lexers/h/http.go
generated
vendored
4
vendor/github.com/alecthomas/chroma/lexers/h/http.go
generated
vendored
|
@ -19,8 +19,8 @@ var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
|
||||||
},
|
},
|
||||||
Rules{
|
Rules{
|
||||||
"root": {
|
"root": {
|
||||||
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)(1\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
|
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
|
||||||
{`(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
|
{`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
|
||||||
},
|
},
|
||||||
"headers": {
|
"headers": {
|
||||||
{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
|
{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
|
||||||
|
|
2
vendor/github.com/alecthomas/chroma/lexers/i/ini.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/lexers/i/ini.go
generated
vendored
|
@ -10,7 +10,7 @@ var Ini = internal.Register(MustNewLexer(
|
||||||
&Config{
|
&Config{
|
||||||
Name: "INI",
|
Name: "INI",
|
||||||
Aliases: []string{"ini", "cfg", "dosini"},
|
Aliases: []string{"ini", "cfg", "dosini"},
|
||||||
Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig"},
|
Filenames: []string{"*.ini", "*.cfg", "*.inf", ".gitconfig", ".editorconfig"},
|
||||||
MimeTypes: []string{"text/x-ini", "text/inf"},
|
MimeTypes: []string{"text/x-ini", "text/inf"},
|
||||||
},
|
},
|
||||||
Rules{
|
Rules{
|
||||||
|
|
1
vendor/github.com/alecthomas/chroma/lexers/lexers.go
generated
vendored
1
vendor/github.com/alecthomas/chroma/lexers/lexers.go
generated
vendored
|
@ -32,6 +32,7 @@ import (
|
||||||
_ "github.com/alecthomas/chroma/lexers/w"
|
_ "github.com/alecthomas/chroma/lexers/w"
|
||||||
_ "github.com/alecthomas/chroma/lexers/x"
|
_ "github.com/alecthomas/chroma/lexers/x"
|
||||||
_ "github.com/alecthomas/chroma/lexers/y"
|
_ "github.com/alecthomas/chroma/lexers/y"
|
||||||
|
_ "github.com/alecthomas/chroma/lexers/z"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Registry of Lexers.
|
// Registry of Lexers.
|
||||||
|
|
59
vendor/github.com/alecthomas/chroma/lexers/p/pony.go
generated
vendored
Normal file
59
vendor/github.com/alecthomas/chroma/lexers/p/pony.go
generated
vendored
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
package p
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Pony lexer.
|
||||||
|
var Pony = internal.Register(MustNewLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "Pony",
|
||||||
|
Aliases: []string{"pony"},
|
||||||
|
Filenames: []string{"*.pony"},
|
||||||
|
MimeTypes: []string{},
|
||||||
|
},
|
||||||
|
Rules{
|
||||||
|
"root": {
|
||||||
|
{`\n`, Text, nil},
|
||||||
|
{`[^\S\n]+`, Text, nil},
|
||||||
|
{`//.*\n`, CommentSingle, nil},
|
||||||
|
{`/\*`, CommentMultiline, Push("nested_comment")},
|
||||||
|
{`"""(?:.|\n)*?"""`, LiteralStringDoc, nil},
|
||||||
|
{`"`, LiteralString, Push("string")},
|
||||||
|
{`\'.*\'`, LiteralStringChar, nil},
|
||||||
|
{`=>|[]{}:().~;,|&!^?[]`, Punctuation, nil},
|
||||||
|
{Words(``, `\b`, `addressof`, `and`, `as`, `consume`, `digestof`, `is`, `isnt`, `not`, `or`), OperatorWord, nil},
|
||||||
|
{`!=|==|<<|>>|[-+/*%=<>]`, Operator, nil},
|
||||||
|
{Words(``, `\b`, `box`, `break`, `compile_error`, `compile_intrinsic`, `continue`, `do`, `else`, `elseif`, `embed`, `end`, `error`, `for`, `if`, `ifdef`, `in`, `iso`, `lambda`, `let`, `match`, `object`, `recover`, `ref`, `repeat`, `return`, `tag`, `then`, `this`, `trn`, `try`, `until`, `use`, `var`, `val`, `where`, `while`, `with`, `#any`, `#read`, `#send`, `#share`), Keyword, nil},
|
||||||
|
{`(actor|class|struct|primitive|interface|trait|type)((?:\s)+)`, ByGroups(Keyword, Text), Push("typename")},
|
||||||
|
{`(new|fun|be)((?:\s)+)`, ByGroups(Keyword, Text), Push("methodname")},
|
||||||
|
{Words(``, `\b`, `U8`, `U16`, `U32`, `U64`, `ULong`, `USize`, `U128`, `Unsigned`, `Stringable`, `String`, `StringBytes`, `StringRunes`, `InputNotify`, `InputStream`, `Stdin`, `ByteSeq`, `ByteSeqIter`, `OutStream`, `StdStream`, `SourceLoc`, `I8`, `I16`, `I32`, `I64`, `ILong`, `ISize`, `I128`, `Signed`, `Seq`, `RuntimeOptions`, `Real`, `Integer`, `SignedInteger`, `UnsignedInteger`, `FloatingPoint`, `Number`, `Int`, `ReadSeq`, `ReadElement`, `Pointer`, `Platform`, `NullablePointer`, `None`, `Iterator`, `F32`, `F64`, `Float`, `Env`, `DoNotOptimise`, `DisposableActor`, `Less`, `Equal`, `Greater`, `Compare`, `HasEq`, `Equatable`, `Comparable`, `Bool`, `AsioEventID`, `AsioEventNotify`, `AsioEvent`, `Array`, `ArrayKeys`, `ArrayValues`, `ArrayPairs`, `Any`, `AmbientAuth`), KeywordType, nil},
|
||||||
|
{`_?[A-Z]\w*`, NameClass, nil},
|
||||||
|
{`string\(\)`, NameOther, nil},
|
||||||
|
{`(\d+\.\d*|\.\d+|\d+)[eE][+-]?\d+`, LiteralNumberFloat, nil},
|
||||||
|
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
|
||||||
|
{`\d+`, LiteralNumberInteger, nil},
|
||||||
|
{`(true|false)\b`, Keyword, nil},
|
||||||
|
{`_\d*`, Name, nil},
|
||||||
|
{`_?[a-z][\w\'_]*`, Name, nil},
|
||||||
|
},
|
||||||
|
"typename": {
|
||||||
|
{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[A-Z]\w*)`, ByGroups(Keyword, Text, NameClass), Pop(1)},
|
||||||
|
},
|
||||||
|
"methodname": {
|
||||||
|
{`(iso|trn|ref|val|box|tag)?((?:\s)*)(_?[a-z]\w*)`, ByGroups(Keyword, Text, NameFunction), Pop(1)},
|
||||||
|
},
|
||||||
|
"nested_comment": {
|
||||||
|
{`[^*/]+`, CommentMultiline, nil},
|
||||||
|
{`/\*`, CommentMultiline, Push()},
|
||||||
|
{`\*/`, CommentMultiline, Pop(1)},
|
||||||
|
{`[*/]`, CommentMultiline, nil},
|
||||||
|
},
|
||||||
|
"string": {
|
||||||
|
{`"`, LiteralString, Pop(1)},
|
||||||
|
{`\\"`, LiteralString, nil},
|
||||||
|
{`[^\\"]+`, LiteralString, nil},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
))
|
2
vendor/github.com/alecthomas/chroma/lexers/t/toml.go
generated
vendored
2
vendor/github.com/alecthomas/chroma/lexers/t/toml.go
generated
vendored
|
@ -22,7 +22,7 @@ var TOML = internal.Register(MustNewLexer(
|
||||||
{`[+-]?[0-9](_?\d)*`, LiteralNumberInteger, nil},
|
{`[+-]?[0-9](_?\d)*`, LiteralNumberInteger, nil},
|
||||||
{`"(\\\\|\\"|[^"])*"`, StringDouble, nil},
|
{`"(\\\\|\\"|[^"])*"`, StringDouble, nil},
|
||||||
{`'(\\\\|\\'|[^'])*'`, StringSingle, nil},
|
{`'(\\\\|\\'|[^'])*'`, StringSingle, nil},
|
||||||
{`[.,=\[\]]`, Punctuation, nil},
|
{`[.,=\[\]{}]`, Punctuation, nil},
|
||||||
{`[^\W\d]\w*`, NameOther, nil},
|
{`[^\W\d]\w*`, NameOther, nil},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
10
vendor/github.com/alecthomas/chroma/lexers/t/typescript.go
generated
vendored
10
vendor/github.com/alecthomas/chroma/lexers/t/typescript.go
generated
vendored
|
@ -38,14 +38,14 @@ var TypeScript = internal.Register(MustNewLexer(
|
||||||
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")},
|
{`\+\+|--|~|&&|\?|:|\|\||\\(?=\n)|(<<|>>>?|==?|!=?|[-<>+*%&|^/])=?`, Operator, Push("slashstartsregex")},
|
||||||
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
|
{`[{(\[;,]`, Punctuation, Push("slashstartsregex")},
|
||||||
{`[})\].]`, Punctuation, nil},
|
{`[})\].]`, Punctuation, nil},
|
||||||
{`(for|in|while|do|break|return|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|void|this)\b`, Keyword, Push("slashstartsregex")},
|
{`(for|in|of|while|do|break|return|yield|continue|switch|case|default|if|else|throw|try|catch|finally|new|delete|typeof|instanceof|keyof|asserts|is|infer|await|void|this)\b`, Keyword, Push("slashstartsregex")},
|
||||||
{`(var|let|with|function)\b`, KeywordDeclaration, Push("slashstartsregex")},
|
{`(var|let|with|function)\b`, KeywordDeclaration, Push("slashstartsregex")},
|
||||||
{`(abstract|boolean|byte|char|class|const|debugger|double|enum|export|extends|final|float|goto|implements|import|int|interface|long|native|package|private|protected|public|short|static|super|synchronized|throws|transient|volatile)\b`, KeywordReserved, nil},
|
{`(abstract|async|boolean|class|const|debugger|enum|export|extends|from|get|global|goto|implements|import|interface|namespace|package|private|protected|public|readonly|require|set|static|super|type)\b`, KeywordReserved, nil},
|
||||||
{`(true|false|null|NaN|Infinity|undefined)\b`, KeywordConstant, nil},
|
{`(true|false|null|NaN|Infinity|undefined)\b`, KeywordConstant, nil},
|
||||||
{`(Array|Boolean|Date|Error|Function|Math|netscape|Number|Object|Packages|RegExp|String|sun|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|Error|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil},
|
{`(Array|Boolean|Date|Error|Function|Math|Number|Object|Packages|RegExp|String|decodeURI|decodeURIComponent|encodeURI|encodeURIComponent|eval|isFinite|isNaN|parseFloat|parseInt|document|this|window)\b`, NameBuiltin, nil},
|
||||||
{`\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)`, ByGroups(KeywordReserved, Text, NameOther, Text), Push("slashstartsregex")},
|
{`\b(module)(\s*)(\s*[\w?.$][\w?.$]*)(\s*)`, ByGroups(KeywordReserved, Text, NameOther, Text), Push("slashstartsregex")},
|
||||||
{`\b(string|bool|number)\b`, KeywordType, nil},
|
{`\b(string|bool|number|any|never|object|symbol|unique|unknown|bigint)\b`, KeywordType, nil},
|
||||||
{`\b(constructor|declare|interface|as|AS)\b`, KeywordReserved, nil},
|
{`\b(constructor|declare|interface|as)\b`, KeywordReserved, nil},
|
||||||
{`(super)(\s*)(\([\w,?.$\s]+\s*\))`, ByGroups(KeywordReserved, Text), Push("slashstartsregex")},
|
{`(super)(\s*)(\([\w,?.$\s]+\s*\))`, ByGroups(KeywordReserved, Text), Push("slashstartsregex")},
|
||||||
{`([a-zA-Z_?.$][\w?.$]*)\(\) \{`, NameOther, Push("slashstartsregex")},
|
{`([a-zA-Z_?.$][\w?.$]*)\(\) \{`, NameOther, Push("slashstartsregex")},
|
||||||
{`([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)`, ByGroups(NameOther, Text, KeywordType), nil},
|
{`([\w?.$][\w?.$]*)(\s*:\s*)([\w?.$][\w?.$]*)`, ByGroups(NameOther, Text, KeywordType), nil},
|
||||||
|
|
19
vendor/github.com/alecthomas/chroma/lexers/y/yaml.go
generated
vendored
19
vendor/github.com/alecthomas/chroma/lexers/y/yaml.go
generated
vendored
|
@ -15,32 +15,35 @@ var YAML = internal.Register(MustNewLexer(
|
||||||
Rules{
|
Rules{
|
||||||
"root": {
|
"root": {
|
||||||
Include("whitespace"),
|
Include("whitespace"),
|
||||||
{`^---`, Text, nil},
|
{`^---`, NameNamespace, nil},
|
||||||
|
{`^\.\.\.`, NameNamespace, nil},
|
||||||
{`[\n?]?\s*- `, Text, nil},
|
{`[\n?]?\s*- `, Text, nil},
|
||||||
{`#.*$`, Comment, nil},
|
{`#.*$`, Comment, nil},
|
||||||
{`!![^\s]+`, CommentPreproc, nil},
|
{`!![^\s]+`, CommentPreproc, nil},
|
||||||
{`&[^\s]+`, CommentPreproc, nil},
|
{`&[^\s]+`, CommentPreproc, nil},
|
||||||
{`\*[^\s]+`, CommentPreproc, nil},
|
{`\*[^\s]+`, CommentPreproc, nil},
|
||||||
{`^%include\s+[^\n\r]+`, CommentPreproc, nil},
|
{`^%include\s+[^\n\r]+`, CommentPreproc, nil},
|
||||||
{`([>|+-]\s+)(\s+)((?:(?:.*?$)(?:[\n\r]*?)?)*)`, ByGroups(StringDoc, StringDoc, StringDoc), nil},
|
{`[>|](?:[+-])?\s(?:^(?:[ \n]{1})+.*\n?)*$`, StringDoc, nil},
|
||||||
Include("key"),
|
Include("key"),
|
||||||
Include("value"),
|
Include("value"),
|
||||||
{`[?:,\[\]]`, Punctuation, nil},
|
{`[?:,\[\]]`, Punctuation, nil},
|
||||||
{`.`, Text, nil},
|
{`.`, Text, nil},
|
||||||
},
|
},
|
||||||
"value": {
|
"value": {
|
||||||
{Words(``, `\b`, "true", "false", "null"), KeywordConstant, nil},
|
{Words(``, `\b`, "true", "True", "TRUE", "false", "False", "FALSE", "null",
|
||||||
|
"y", "Y", "yes", "Yes", "YES", "n", "N", "no", "No", "NO",
|
||||||
|
"on", "On", "ON", "off", "Off", "OFF"), KeywordConstant, nil},
|
||||||
{`"(?:\\.|[^"])*"`, StringDouble, nil},
|
{`"(?:\\.|[^"])*"`, StringDouble, nil},
|
||||||
{`'(?:\\.|[^'])*'`, StringSingle, nil},
|
{`'(?:\\.|[^'])*'`, StringSingle, nil},
|
||||||
{`\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?`, LiteralDate, nil},
|
{`\d\d\d\d-\d\d-\d\d([T ]\d\d:\d\d:\d\d(\.\d+)?(Z|\s+[-+]\d+)?)?`, LiteralDate, nil},
|
||||||
{`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil},
|
{`\b[+\-]?(0x[\da-f]+|0o[0-7]+|(\d+\.?\d*|\.?\d+)(e[\+\-]?\d+)?|\.inf|\.nan)\b`, Number, nil},
|
||||||
{`\b[\w]+\b`, Text, nil},
|
{`[^\{\}\[\]\?,\:\!\-\*&\@].*`, Literal, nil},
|
||||||
},
|
},
|
||||||
"key": {
|
"key": {
|
||||||
{`"[^"\n].*": `, Keyword, nil},
|
{`"[^"\n].*": `, NameTag, nil},
|
||||||
{`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, Keyword, Punctuation, Whitespace), nil},
|
{`(-)( )([^"\n{]*)(:)( )`, ByGroups(Punctuation, Whitespace, NameTag, Punctuation, Whitespace), nil},
|
||||||
{`([^"\n{]*)(:)( )`, ByGroups(Keyword, Punctuation, Whitespace), nil},
|
{`([^"\n{]*)(:)( )`, ByGroups(NameTag, Punctuation, Whitespace), nil},
|
||||||
{`([^"\n{]*)(:)(\n)`, ByGroups(Keyword, Punctuation, Whitespace), nil},
|
{`([^"\n{]*)(:)(\n)`, ByGroups(NameTag, Punctuation, Whitespace), nil},
|
||||||
},
|
},
|
||||||
"whitespace": {
|
"whitespace": {
|
||||||
{`\s+`, Whitespace, nil},
|
{`\s+`, Whitespace, nil},
|
||||||
|
|
54
vendor/github.com/alecthomas/chroma/lexers/z/zig.go
generated
vendored
Normal file
54
vendor/github.com/alecthomas/chroma/lexers/z/zig.go
generated
vendored
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
package z
|
||||||
|
|
||||||
|
import (
|
||||||
|
. "github.com/alecthomas/chroma" // nolint
|
||||||
|
"github.com/alecthomas/chroma/lexers/internal"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Zig lexer.
|
||||||
|
var Zig = internal.Register(MustNewLexer(
|
||||||
|
&Config{
|
||||||
|
Name: "Zig",
|
||||||
|
Aliases: []string{"zig"},
|
||||||
|
Filenames: []string{"*.zig"},
|
||||||
|
MimeTypes: []string{"text/zig"},
|
||||||
|
},
|
||||||
|
Rules{
|
||||||
|
"root": {
|
||||||
|
{`\n`, TextWhitespace, nil},
|
||||||
|
{`\s+`, TextWhitespace, nil},
|
||||||
|
{`//.*?\n`, CommentSingle, nil},
|
||||||
|
{Words(``, `\b`, `break`, `return`, `continue`, `asm`, `defer`, `errdefer`, `unreachable`, `try`, `catch`, `async`, `await`, `suspend`, `resume`, `cancel`), Keyword, nil},
|
||||||
|
{Words(``, `\b`, `const`, `var`, `extern`, `packed`, `export`, `pub`, `noalias`, `inline`, `comptime`, `nakedcc`, `stdcallcc`, `volatile`, `allowzero`, `align`, `linksection`, `threadlocal`), KeywordReserved, nil},
|
||||||
|
{Words(``, `\b`, `struct`, `enum`, `union`, `error`), Keyword, nil},
|
||||||
|
{Words(``, `\b`, `while`, `for`), Keyword, nil},
|
||||||
|
{Words(``, `\b`, `bool`, `f16`, `f32`, `f64`, `f128`, `void`, `noreturn`, `type`, `anyerror`, `promise`, `i0`, `u0`, `isize`, `usize`, `comptime_int`, `comptime_float`, `c_short`, `c_ushort`, `c_int`, `c_uint`, `c_long`, `c_ulong`, `c_longlong`, `c_ulonglong`, `c_longdouble`, `c_voidi8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`), KeywordType, nil},
|
||||||
|
{Words(``, `\b`, `true`, `false`, `null`, `undefined`), KeywordConstant, nil},
|
||||||
|
{Words(``, `\b`, `if`, `else`, `switch`, `and`, `or`, `orelse`), Keyword, nil},
|
||||||
|
{Words(``, `\b`, `fn`, `usingnamespace`, `test`), Keyword, nil},
|
||||||
|
{`0x[0-9a-fA-F]+\.[0-9a-fA-F]+([pP][\-+]?[0-9a-fA-F]+)?`, LiteralNumberFloat, nil},
|
||||||
|
{`0x[0-9a-fA-F]+\.?[pP][\-+]?[0-9a-fA-F]+`, LiteralNumberFloat, nil},
|
||||||
|
{`[0-9]+\.[0-9]+([eE][-+]?[0-9]+)?`, LiteralNumberFloat, nil},
|
||||||
|
{`[0-9]+\.?[eE][-+]?[0-9]+`, LiteralNumberFloat, nil},
|
||||||
|
{`0b[01]+`, LiteralNumberBin, nil},
|
||||||
|
{`0o[0-7]+`, LiteralNumberOct, nil},
|
||||||
|
{`0x[0-9a-fA-F]+`, LiteralNumberHex, nil},
|
||||||
|
{`[0-9]+`, LiteralNumberInteger, nil},
|
||||||
|
{`@[a-zA-Z_]\w*`, NameBuiltin, nil},
|
||||||
|
{`[a-zA-Z_]\w*`, Name, nil},
|
||||||
|
{`\'\\\'\'`, LiteralStringEscape, nil},
|
||||||
|
{`\'\\(|x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])\'`, LiteralStringEscape, nil},
|
||||||
|
{`\'[^\\\']\'`, LiteralString, nil},
|
||||||
|
{`\\\\[^\n]*`, LiteralStringHeredoc, nil},
|
||||||
|
{`c\\\\[^\n]*`, LiteralStringHeredoc, nil},
|
||||||
|
{`c?"`, LiteralString, Push("string")},
|
||||||
|
{`[+%=><|^!?/\-*&~:]`, Operator, nil},
|
||||||
|
{`[{}()\[\],.;]`, Punctuation, nil},
|
||||||
|
},
|
||||||
|
"string": {
|
||||||
|
{`\\(x[a-fA-F0-9]{2}|u[a-fA-F0-9]{4}|U[a-fA-F0-9]{6}|[nr\\t\'"])`, LiteralStringEscape, nil},
|
||||||
|
{`[^\\"\n]+`, LiteralString, nil},
|
||||||
|
{`"`, LiteralString, Pop(1)},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
))
|
19
vendor/github.com/alecthomas/chroma/regexp.go
generated
vendored
19
vendor/github.com/alecthomas/chroma/regexp.go
generated
vendored
|
@ -6,6 +6,7 @@ import (
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"sync"
|
"sync"
|
||||||
|
"time"
|
||||||
"unicode/utf8"
|
"unicode/utf8"
|
||||||
|
|
||||||
"github.com/dlclark/regexp2"
|
"github.com/dlclark/regexp2"
|
||||||
|
@ -160,6 +161,14 @@ func Tokenise(lexer Lexer, options *TokeniseOptions, text string) ([]Token, erro
|
||||||
// Rules maps from state to a sequence of Rules.
|
// Rules maps from state to a sequence of Rules.
|
||||||
type Rules map[string][]Rule
|
type Rules map[string][]Rule
|
||||||
|
|
||||||
|
// Rename clones rules then a rule.
|
||||||
|
func (r Rules) Rename(old, new string) Rules {
|
||||||
|
r = r.Clone()
|
||||||
|
r[new] = r[old]
|
||||||
|
delete(r, old)
|
||||||
|
return r
|
||||||
|
}
|
||||||
|
|
||||||
// Clone returns a clone of the Rules.
|
// Clone returns a clone of the Rules.
|
||||||
func (r Rules) Clone() Rules {
|
func (r Rules) Clone() Rules {
|
||||||
out := map[string][]Rule{}
|
out := map[string][]Rule{}
|
||||||
|
@ -170,6 +179,15 @@ func (r Rules) Clone() Rules {
|
||||||
return out
|
return out
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Merge creates a clone of "r" then merges "rules" into the clone.
|
||||||
|
func (r Rules) Merge(rules Rules) Rules {
|
||||||
|
out := r.Clone()
|
||||||
|
for k, v := range rules.Clone() {
|
||||||
|
out[k] = v
|
||||||
|
}
|
||||||
|
return out
|
||||||
|
}
|
||||||
|
|
||||||
// MustNewLexer creates a new Lexer or panics.
|
// MustNewLexer creates a new Lexer or panics.
|
||||||
func MustNewLexer(config *Config, rules Rules) *RegexLexer {
|
func MustNewLexer(config *Config, rules Rules) *RegexLexer {
|
||||||
lexer, err := NewLexer(config, rules)
|
lexer, err := NewLexer(config, rules)
|
||||||
|
@ -376,6 +394,7 @@ func (r *RegexLexer) maybeCompile() (err error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err)
|
return fmt.Errorf("failed to compile rule %s.%d: %s", state, i, err)
|
||||||
}
|
}
|
||||||
|
rule.Regexp.MatchTimeout = time.Millisecond * 250
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
3
vendor/modules.txt
vendored
3
vendor/modules.txt
vendored
|
@ -62,7 +62,7 @@ github.com/PuerkitoBio/urlesc
|
||||||
# github.com/RoaringBitmap/roaring v0.4.23
|
# github.com/RoaringBitmap/roaring v0.4.23
|
||||||
## explicit
|
## explicit
|
||||||
github.com/RoaringBitmap/roaring
|
github.com/RoaringBitmap/roaring
|
||||||
# github.com/alecthomas/chroma v0.7.3
|
# github.com/alecthomas/chroma v0.8.0
|
||||||
## explicit
|
## explicit
|
||||||
github.com/alecthomas/chroma
|
github.com/alecthomas/chroma
|
||||||
github.com/alecthomas/chroma/formatters/html
|
github.com/alecthomas/chroma/formatters/html
|
||||||
|
@ -93,6 +93,7 @@ github.com/alecthomas/chroma/lexers/v
|
||||||
github.com/alecthomas/chroma/lexers/w
|
github.com/alecthomas/chroma/lexers/w
|
||||||
github.com/alecthomas/chroma/lexers/x
|
github.com/alecthomas/chroma/lexers/x
|
||||||
github.com/alecthomas/chroma/lexers/y
|
github.com/alecthomas/chroma/lexers/y
|
||||||
|
github.com/alecthomas/chroma/lexers/z
|
||||||
github.com/alecthomas/chroma/styles
|
github.com/alecthomas/chroma/styles
|
||||||
# github.com/andybalholm/brotli v0.0.0-20190621154722-5f990b63d2d6
|
# github.com/andybalholm/brotli v0.0.0-20190621154722-5f990b63d2d6
|
||||||
github.com/andybalholm/brotli
|
github.com/andybalholm/brotli
|
||||||
|
|
Reference in a new issue