1
0
Fork 0
forked from forgejo/forgejo

Vendor Update (#16121)

* update github.com/PuerkitoBio/goquery

* update github.com/alecthomas/chroma

* update github.com/blevesearch/bleve/v2

* update github.com/caddyserver/certmagic

* update github.com/go-enry/go-enry/v2

* update github.com/go-git/go-billy/v5

* update github.com/go-git/go-git/v5

* update github.com/go-redis/redis/v8

* update github.com/go-testfixtures/testfixtures/v3

* update github.com/jaytaylor/html2text

* update github.com/json-iterator/go

* update github.com/klauspost/compress

* update github.com/markbates/goth

* update github.com/mattn/go-isatty

* update github.com/mholt/archiver/v3

* update github.com/microcosm-cc/bluemonday

* update github.com/minio/minio-go/v7

* update github.com/prometheus/client_golang

* update github.com/unrolled/render

* update github.com/xanzy/go-gitlab

* update github.com/yuin/goldmark

* update github.com/yuin/goldmark-highlighting

Co-authored-by: techknowlogick <techknowlogick@gitea.io>
This commit is contained in:
6543 2021-06-10 16:44:25 +02:00 committed by GitHub
parent f088dc4ea1
commit 86e2789960
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
819 changed files with 38072 additions and 34969 deletions

View file

@ -6,14 +6,19 @@ import (
)
// C lexer.
var C = internal.Register(MustNewLexer(
var C = internal.Register(MustNewLazyLexer(
&Config{
Name: "C",
Aliases: []string{"c"},
Filenames: []string{"*.c", "*.h", "*.idc"},
MimeTypes: []string{"text/x-chdr", "text/x-csrc"},
EnsureNL: true,
},
Rules{
cRules,
))
func cRules() Rules {
return Rules{
"whitespace": {
{`^#if\s+0`, CommentPreproc, Push("if0")},
{`^#`, CommentPreproc, Push("macro")},
@ -87,5 +92,5 @@ var C = internal.Register(MustNewLexer(
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
{`.*?\n`, Comment, nil},
},
},
))
}
}

View file

@ -6,143 +6,149 @@ import (
)
// caddyfileCommon are the rules common to both of the lexer variants
var caddyfileCommon = Rules{
"site_block_common": {
// Import keyword
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Matcher token stub for docs
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
// These cannot have matchers but may have things that look like
// matchers in their arguments, so we just parse as a subdirective.
{`try_files`, Keyword, Push("subdirective")},
// These are special, they can nest more directives
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"matcher": {
{`\{`, Punctuation, Push("block")},
// Not can be one-liner
{`not`, Keyword, Push("deep_not_matcher")},
// Any other same-line matcher
{`[^\s#]+`, Keyword, Push("arguments")},
// Terminators
{`\n`, Text, Pop(1)},
{`\}`, Punctuation, Pop(1)},
Include("base"),
},
"block": {
{`\}`, Punctuation, Pop(2)},
// Not can be one-liner
{`not`, Keyword, Push("not_matcher")},
// Any other subdirective
{`[^\s#]+`, Keyword, Push("subdirective")},
Include("base"),
},
"nested_block": {
{`\}`, Punctuation, Pop(2)},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Something that starts with literally < is probably a docs stub
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("nested_directive")},
Include("base"),
},
"not_matcher": {
{`\}`, Punctuation, Pop(2)},
{`\{(?=\s)`, Punctuation, Push("block")},
{`[^\s#]+`, Keyword, Push("arguments")},
{`\s+`, Text, nil},
},
"deep_not_matcher": {
{`\}`, Punctuation, Pop(2)},
{`\{(?=\s)`, Punctuation, Push("block")},
{`[^\s#]+`, Keyword, Push("deep_subdirective")},
{`\s+`, Text, nil},
},
"directive": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"nested_directive": {
{`\{(?=\s)`, Punctuation, Push("nested_block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"arguments": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_2"),
{`\\\n`, Text, nil}, // Skip escaped newlines
{`\n`, Text, Pop(2)},
Include("base"),
},
"deep_subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_3"),
{`\n`, Text, Pop(3)},
Include("base"),
},
"matcher_token": {
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
},
"comments": {
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
},
"comments_pop_1": {
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
},
"comments_pop_2": {
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
},
"comments_pop_3": {
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
},
"base": {
Include("comments"),
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
{`\]|\|`, Punctuation, nil},
{`[^\s#{}$\]]+`, LiteralString, nil},
{`/[^\s#]*`, Name, nil},
{`\s+`, Text, nil},
},
func caddyfileCommonRules() Rules {
return Rules{
"site_block_common": {
// Import keyword
{`(import)(\s+)([^\s]+)`, ByGroups(Keyword, Text, NameVariableMagic), nil},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Matcher token stub for docs
{`\[\<matcher\>\]`, NameDecorator, Push("matcher")},
// These cannot have matchers but may have things that look like
// matchers in their arguments, so we just parse as a subdirective.
{`try_files`, Keyword, Push("subdirective")},
// These are special, they can nest more directives
{`handle_errors|handle|route|handle_path|not`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("directive")},
Include("base"),
},
"matcher": {
{`\{`, Punctuation, Push("block")},
// Not can be one-liner
{`not`, Keyword, Push("deep_not_matcher")},
// Any other same-line matcher
{`[^\s#]+`, Keyword, Push("arguments")},
// Terminators
{`\n`, Text, Pop(1)},
{`\}`, Punctuation, Pop(1)},
Include("base"),
},
"block": {
{`\}`, Punctuation, Pop(2)},
// Not can be one-liner
{`not`, Keyword, Push("not_matcher")},
// Any other subdirective
{`[^\s#]+`, Keyword, Push("subdirective")},
Include("base"),
},
"nested_block": {
{`\}`, Punctuation, Pop(2)},
// Matcher definition
{`@[^\s]+(?=\s)`, NameDecorator, Push("matcher")},
// Something that starts with literally < is probably a docs stub
{`\<[^#]+\>`, Keyword, Push("nested_directive")},
// Any other directive
{`[^\s#]+`, Keyword, Push("nested_directive")},
Include("base"),
},
"not_matcher": {
{`\}`, Punctuation, Pop(2)},
{`\{(?=\s)`, Punctuation, Push("block")},
{`[^\s#]+`, Keyword, Push("arguments")},
{`\s+`, Text, nil},
},
"deep_not_matcher": {
{`\}`, Punctuation, Pop(2)},
{`\{(?=\s)`, Punctuation, Push("block")},
{`[^\s#]+`, Keyword, Push("deep_subdirective")},
{`\s+`, Text, nil},
},
"directive": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"nested_directive": {
{`\{(?=\s)`, Punctuation, Push("nested_block")},
Include("matcher_token"),
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_1"),
{`\n`, Text, Pop(1)},
Include("base"),
},
"arguments": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_2"),
{`\\\n`, Text, nil}, // Skip escaped newlines
{`\n`, Text, Pop(2)},
Include("base"),
},
"deep_subdirective": {
{`\{(?=\s)`, Punctuation, Push("block")},
Include("comments_pop_3"),
{`\n`, Text, Pop(3)},
Include("base"),
},
"matcher_token": {
{`@[^\s]+`, NameDecorator, Push("arguments")}, // Named matcher
{`/[^\s]+`, NameDecorator, Push("arguments")}, // Path matcher
{`\*`, NameDecorator, Push("arguments")}, // Wildcard path matcher
{`\[\<matcher\>\]`, NameDecorator, Push("arguments")}, // Matcher token stub for docs
},
"comments": {
{`^#.*\n`, CommentSingle, nil}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, nil}, // Comment preceded by whitespace
},
"comments_pop_1": {
{`^#.*\n`, CommentSingle, Pop(1)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(1)}, // Comment preceded by whitespace
},
"comments_pop_2": {
{`^#.*\n`, CommentSingle, Pop(2)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(2)}, // Comment preceded by whitespace
},
"comments_pop_3": {
{`^#.*\n`, CommentSingle, Pop(3)}, // Comment at start of line
{`\s+#.*\n`, CommentSingle, Pop(3)}, // Comment preceded by whitespace
},
"base": {
Include("comments"),
{`(on|off|first|last|before|after|internal|strip_prefix|strip_suffix|replace)\b`, NameConstant, nil},
{`(https?://)?([a-z0-9.-]+)(:)([0-9]+)`, ByGroups(Name, Name, Punctuation, LiteralNumberInteger), nil},
{`[a-z-]+/[a-z-+]+`, LiteralString, nil},
{`[0-9]+[km]?\b`, LiteralNumberInteger, nil},
{`\{[\w+.\$-]+\}`, LiteralStringEscape, nil}, // Placeholder
{`\[(?=[^#{}$]+\])`, Punctuation, nil},
{`\]|\|`, Punctuation, nil},
{`[^\s#{}$\]]+`, LiteralString, nil},
{`/[^\s#]*`, Name, nil},
{`\s+`, Text, nil},
},
}
}
// Caddyfile lexer.
var Caddyfile = internal.Register(MustNewLexer(
var Caddyfile = internal.Register(MustNewLazyLexer(
&Config{
Name: "Caddyfile",
Aliases: []string{"caddyfile", "caddy"},
Filenames: []string{"Caddyfile*"},
MimeTypes: []string{},
},
Rules{
caddyfileRules,
))
func caddyfileRules() Rules {
return Rules{
"root": {
Include("comments"),
// Global options block
@ -186,21 +192,25 @@ var Caddyfile = internal.Register(MustNewLexer(
{`\}`, Punctuation, Pop(2)},
Include("site_block_common"),
},
}.Merge(caddyfileCommon),
))
}.Merge(caddyfileCommonRules())
}
// Caddyfile directive-only lexer.
var CaddyfileDirectives = internal.Register(MustNewLexer(
var CaddyfileDirectives = internal.Register(MustNewLazyLexer(
&Config{
Name: "Caddyfile Directives",
Aliases: []string{"caddyfile-directives", "caddyfile-d", "caddy-d"},
Filenames: []string{},
MimeTypes: []string{},
},
Rules{
caddyfileDirectivesRules,
))
func caddyfileDirectivesRules() Rules {
return Rules{
// Same as "site_block" in Caddyfile
"root": {
Include("site_block_common"),
},
}.Merge(caddyfileCommon),
))
}.Merge(caddyfileCommonRules())
}

View file

@ -6,14 +6,18 @@ import (
)
// Cap'N'Proto Proto lexer.
var CapNProto = internal.Register(MustNewLexer(
var CapNProto = internal.Register(MustNewLazyLexer(
&Config{
Name: "Cap'n Proto",
Aliases: []string{"capnp"},
Filenames: []string{"*.capnp"},
MimeTypes: []string{},
},
Rules{
capNProtoRules,
))
func capNProtoRules() Rules {
return Rules{
"root": {
{`#.*?$`, CommentSingle, nil},
{`@[0-9a-zA-Z]*`, NameDecorator, nil},
@ -57,5 +61,5 @@ var CapNProto = internal.Register(MustNewLexer(
{`[])]`, NameAttribute, Pop(1)},
Default(Pop(1)),
},
},
))
}
}

View file

@ -6,7 +6,7 @@ import (
)
// Ceylon lexer.
var Ceylon = internal.Register(MustNewLexer(
var Ceylon = internal.Register(MustNewLazyLexer(
&Config{
Name: "Ceylon",
Aliases: []string{"ceylon"},
@ -14,7 +14,11 @@ var Ceylon = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-ceylon"},
DotAll: true,
},
Rules{
ceylonRules,
))
func ceylonRules() Rules {
return Rules{
"root": {
{`^(\s*(?:[a-zA-Z_][\w.\[\]]*\s+)+?)([a-zA-Z_]\w*)(\s*)(\()`, ByGroups(UsingSelf("root"), NameFunction, Text, Operator), nil},
{`[^\S\n]+`, Text, nil},
@ -59,5 +63,5 @@ var Ceylon = internal.Register(MustNewLexer(
{`\*/`, CommentMultiline, Pop(1)},
{`[*/]`, CommentMultiline, nil},
},
},
))
}
}

View file

@ -6,14 +6,18 @@ import (
)
// Cfengine3 lexer.
var Cfengine3 = internal.Register(MustNewLexer(
var Cfengine3 = internal.Register(MustNewLazyLexer(
&Config{
Name: "CFEngine3",
Aliases: []string{"cfengine3", "cf3"},
Filenames: []string{"*.cf"},
MimeTypes: []string{},
},
Rules{
cfengine3Rules,
))
func cfengine3Rules() Rules {
return Rules{
"root": {
{`#.*?\n`, Comment, nil},
{`(body)(\s+)(\S+)(\s+)(control)`, ByGroups(Keyword, Text, Keyword, Text, Keyword), nil},
@ -52,5 +56,5 @@ var Cfengine3 = internal.Register(MustNewLexer(
{`\w+`, NameVariable, nil},
{`\s+`, Text, nil},
},
},
))
}
}

View file

@ -6,7 +6,7 @@ import (
)
// Chaiscript lexer.
var Chaiscript = internal.Register(MustNewLexer(
var Chaiscript = internal.Register(MustNewLazyLexer(
&Config{
Name: "ChaiScript",
Aliases: []string{"chai", "chaiscript"},
@ -14,7 +14,11 @@ var Chaiscript = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-chaiscript", "application/x-chaiscript"},
DotAll: true,
},
Rules{
chaiscriptRules,
))
func chaiscriptRules() Rules {
return Rules{
"commentsandwhitespace": {
{`\s+`, Text, nil},
{`//.*?\n`, CommentSingle, nil},
@ -59,5 +63,5 @@ var Chaiscript = internal.Register(MustNewLexer(
{`[^\\"$]+`, LiteralStringDouble, nil},
{`"`, LiteralStringDouble, Pop(1)},
},
},
))
}
}

View file

@ -7,14 +7,18 @@ import (
)
// Cheetah lexer.
var Cheetah = internal.Register(MustNewLexer(
var Cheetah = internal.Register(MustNewLazyLexer(
&Config{
Name: "Cheetah",
Aliases: []string{"cheetah", "spitfire"},
Filenames: []string{"*.tmpl", "*.spt"},
MimeTypes: []string{"application/x-cheetah", "application/x-spitfire"},
},
Rules{
cheetahRules,
))
func cheetahRules() Rules {
return Rules{
"root": {
{`(##[^\n]*)$`, ByGroups(Comment), nil},
{`#[*](.|\n)*?[*]#`, Comment, nil},
@ -33,5 +37,5 @@ var Cheetah = internal.Register(MustNewLexer(
`, Other, nil},
{`\s+`, Text, nil},
},
},
))
}
}

View file

@ -230,7 +230,7 @@ var (
)
// Common Lisp lexer.
var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLexer(
var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLazyLexer(
&Config{
Name: "Common Lisp",
Aliases: []string{"common-lisp", "cl", "lisp"},
@ -238,7 +238,19 @@ var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLexer(
MimeTypes: []string{"text/x-common-lisp"},
CaseInsensitive: true,
},
Rules{
commonLispRules,
), TypeMapping{
{NameVariable, NameFunction, clBuiltinFunctions},
{NameVariable, Keyword, clSpecialForms},
{NameVariable, NameBuiltin, clMacros},
{NameVariable, Keyword, clLambdaListKeywords},
{NameVariable, Keyword, clDeclarations},
{NameVariable, KeywordType, clBuiltinTypes},
{NameVariable, NameClass, clBuiltinClasses},
}))
func commonLispRules() Rules {
return Rules{
"root": {
Default(Push("body")),
},
@ -294,13 +306,5 @@ var CommonLisp = internal.Register(TypeRemappingLexer(MustNewLexer(
{`\(`, Punctuation, Push("body")},
{`\)`, Punctuation, Pop(1)},
},
},
), TypeMapping{
{NameVariable, NameFunction, clBuiltinFunctions},
{NameVariable, Keyword, clSpecialForms},
{NameVariable, NameBuiltin, clMacros},
{NameVariable, Keyword, clLambdaListKeywords},
{NameVariable, Keyword, clDeclarations},
{NameVariable, KeywordType, clBuiltinTypes},
{NameVariable, NameClass, clBuiltinClasses},
}))
}
}

View file

@ -6,14 +6,18 @@ import (
)
// Clojure lexer.
var Clojure = internal.Register(MustNewLexer(
var Clojure = internal.Register(MustNewLazyLexer(
&Config{
Name: "Clojure",
Aliases: []string{"clojure", "clj"},
Filenames: []string{"*.clj"},
MimeTypes: []string{"text/x-clojure", "application/x-clojure"},
},
Rules{
clojureRules,
))
func clojureRules() Rules {
return Rules{
"root": {
{`;.*$`, CommentSingle, nil},
{`[,\s]+`, Text, nil},
@ -34,5 +38,5 @@ var Clojure = internal.Register(MustNewLexer(
{`(\{|\})`, Punctuation, nil},
{`(\(|\))`, Punctuation, nil},
},
},
))
}
}

View file

@ -6,14 +6,18 @@ import (
)
// Cmake lexer.
var Cmake = internal.Register(MustNewLexer(
var Cmake = internal.Register(MustNewLazyLexer(
&Config{
Name: "CMake",
Aliases: []string{"cmake"},
Filenames: []string{"*.cmake", "CMakeLists.txt"},
MimeTypes: []string{"text/x-cmake"},
},
Rules{
cmakeRules,
))
func cmakeRules() Rules {
return Rules{
"root": {
{`\b(\w+)([ \t]*)(\()`, ByGroups(NameBuiltin, Text, Punctuation), Push("args")},
Include("keywords"),
@ -40,5 +44,5 @@ var Cmake = internal.Register(MustNewLexer(
{`[ \t]+`, Text, nil},
{`#.*\n`, Comment, nil},
},
},
))
}
}

View file

@ -6,7 +6,7 @@ import (
)
// Cobol lexer.
var Cobol = internal.Register(MustNewLexer(
var Cobol = internal.Register(MustNewLazyLexer(
&Config{
Name: "COBOL",
Aliases: []string{"cobol"},
@ -14,7 +14,11 @@ var Cobol = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-cobol"},
CaseInsensitive: true,
},
Rules{
cobolRules,
))
func cobolRules() Rules {
return Rules{
"root": {
Include("comment"),
Include("strings"),
@ -47,5 +51,5 @@ var Cobol = internal.Register(MustNewLexer(
{`[+-]?\d*\.\d+(E[-+]?\d+)?`, LiteralNumberFloat, nil},
{`[+-]?\d+\.\d*(E[-+]?\d+)?`, LiteralNumberFloat, nil},
},
},
))
}
}

View file

@ -6,7 +6,7 @@ import (
)
// Coffeescript lexer.
var Coffeescript = internal.Register(MustNewLexer(
var Coffeescript = internal.Register(MustNewLazyLexer(
&Config{
Name: "CoffeeScript",
Aliases: []string{"coffee-script", "coffeescript", "coffee"},
@ -15,7 +15,11 @@ var Coffeescript = internal.Register(MustNewLexer(
NotMultiline: true,
DotAll: true,
},
Rules{
coffeescriptRules,
))
func coffeescriptRules() Rules {
return Rules{
"commentsandwhitespace": {
{`\s+`, Text, nil},
{`###[^#].*?###`, CommentMultiline, nil},
@ -87,5 +91,5 @@ var Coffeescript = internal.Register(MustNewLexer(
{`#|\\.|\'|"`, LiteralString, nil},
Include("strings"),
},
},
))
}
}

View file

@ -6,7 +6,7 @@ import (
)
// Cfstatement lexer.
var Cfstatement = internal.Register(MustNewLexer(
var Cfstatement = internal.Register(MustNewLazyLexer(
&Config{
Name: "cfstatement",
Aliases: []string{"cfs"},
@ -15,7 +15,11 @@ var Cfstatement = internal.Register(MustNewLexer(
NotMultiline: true,
CaseInsensitive: true,
},
Rules{
cfstatementRules,
))
func cfstatementRules() Rules {
return Rules{
"root": {
{`//.*?\n`, CommentSingle, nil},
{`/\*(?:.|\n)*?\*/`, CommentMultiline, nil},
@ -44,5 +48,5 @@ var Cfstatement = internal.Register(MustNewLexer(
{`#`, LiteralStringDouble, nil},
{`"`, LiteralStringDouble, Pop(1)},
},
},
))
}
}

View file

@ -6,14 +6,18 @@ import (
)
// Coq lexer.
var Coq = internal.Register(MustNewLexer(
var Coq = internal.Register(MustNewLazyLexer(
&Config{
Name: "Coq",
Aliases: []string{"coq"},
Filenames: []string{"*.v"},
MimeTypes: []string{"text/x-coq"},
},
Rules{
coqRules,
))
func coqRules() Rules {
return Rules{
"root": {
{`\s+`, Text, nil},
{`false|true|\(\)|\[\]`, NameBuiltinPseudo, nil},
@ -59,5 +63,5 @@ var Coq = internal.Register(MustNewLexer(
{`[a-z][a-z0-9_\']*`, Name, Pop(1)},
Default(Pop(1)),
},
},
))
}
}

View file

@ -6,7 +6,7 @@ import (
)
// CPP lexer.
var CPP = internal.Register(MustNewLexer(
var CPP = internal.Register(MustNewLazyLexer(
&Config{
Name: "C++",
Aliases: []string{"cpp", "c++"},
@ -14,7 +14,11 @@ var CPP = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-c++hdr", "text/x-c++src"},
EnsureNL: true,
},
Rules{
cppRules,
))
func cppRules() Rules {
return Rules{
"statements": {
{Words(``, `\b`, `catch`, `const_cast`, `delete`, `dynamic_cast`, `explicit`, `export`, `friend`, `mutable`, `namespace`, `new`, `operator`, `private`, `protected`, `public`, `reinterpret_cast`, `restrict`, `static_cast`, `template`, `this`, `throw`, `throws`, `try`, `typeid`, `typename`, `using`, `virtual`, `constexpr`, `nullptr`, `decltype`, `thread_local`, `alignas`, `alignof`, `static_assert`, `noexcept`, `override`, `final`, `concept`, `requires`, `consteval`, `co_await`, `co_return`, `co_yield`), Keyword, nil},
{`(enum)\b(\s+)(class)\b(\s*)`, ByGroups(Keyword, Text, Keyword, Text), Push("classname")},
@ -102,5 +106,5 @@ var CPP = internal.Register(MustNewLexer(
{`^\s*#endif.*?(?<!\\)\n`, CommentPreproc, Pop(1)},
{`.*?\n`, Comment, nil},
},
},
))
}
}

View file

@ -6,7 +6,7 @@ import (
)
// CassandraCQL lexer.
var CassandraCQL = internal.Register(MustNewLexer(
var CassandraCQL = internal.Register(MustNewLazyLexer(
&Config{
Name: "Cassandra CQL",
Aliases: []string{"cassandra", "cql"},
@ -15,7 +15,11 @@ var CassandraCQL = internal.Register(MustNewLexer(
NotMultiline: true,
CaseInsensitive: true,
},
Rules{
cassandraCQLRules,
))
func cassandraCQLRules() Rules {
return Rules{
"root": {
{`\s+`, TextWhitespace, nil},
{`(--|\/\/).*\n?`, CommentSingle, nil},
@ -23,7 +27,8 @@ var CassandraCQL = internal.Register(MustNewLexer(
{`(ascii|bigint|blob|boolean|counter|date|decimal|double|float|frozen|inet|int|list|map|set|smallint|text|time|timestamp|timeuuid|tinyint|tuple|uuid|varchar|varint)\b`, NameBuiltin, nil},
{Words(``, `\b`, `ADD`, `AGGREGATE`, `ALL`, `ALLOW`, `ALTER`, `AND`, `ANY`, `APPLY`, `AS`, `ASC`, `AUTHORIZE`, `BATCH`, `BEGIN`, `BY`, `CLUSTERING`, `COLUMNFAMILY`, `COMPACT`, `CONSISTENCY`, `COUNT`, `CREATE`, `CUSTOM`, `DELETE`, `DESC`, `DISTINCT`, `DROP`, `EACH_QUORUM`, `ENTRIES`, `EXISTS`, `FILTERING`, `FROM`, `FULL`, `GRANT`, `IF`, `IN`, `INDEX`, `INFINITY`, `INSERT`, `INTO`, `KEY`, `KEYS`, `KEYSPACE`, `KEYSPACES`, `LEVEL`, `LIMIT`, `LOCAL_ONE`, `LOCAL_QUORUM`, `MATERIALIZED`, `MODIFY`, `NAN`, `NORECURSIVE`, `NOSUPERUSER`, `NOT`, `OF`, `ON`, `ONE`, `ORDER`, `PARTITION`, `PASSWORD`, `PER`, `PERMISSION`, `PERMISSIONS`, `PRIMARY`, `QUORUM`, `RENAME`, `REVOKE`, `SCHEMA`, `SELECT`, `STATIC`, `STORAGE`, `SUPERUSER`, `TABLE`, `THREE`, `TO`, `TOKEN`, `TRUNCATE`, `TTL`, `TWO`, `TYPE`, `UNLOGGED`, `UPDATE`, `USE`, `USER`, `USERS`, `USING`, `VALUES`, `VIEW`, `WHERE`, `WITH`, `WRITETIME`, `REPLICATION`, `OR`, `REPLACE`, `FUNCTION`, `CALLED`, `INPUT`, `RETURNS`, `LANGUAGE`, `ROLE`, `ROLES`, `TRIGGER`, `DURABLE_WRITES`, `LOGIN`, `OPTIONS`, `LOGGED`, `SFUNC`, `STYPE`, `FINALFUNC`, `INITCOND`, `IS`, `CONTAINS`, `JSON`, `PAGING`, `OFF`), Keyword, nil},
{"[+*/<>=~!@#%^&|`?-]+", Operator, nil},
{`(?s)(java|javascript)(\s+)(AS)(\s+)('|\$\$)(.*?)(\5)`,
{
`(?s)(java|javascript)(\s+)(AS)(\s+)('|\$\$)(.*?)(\5)`,
UsingByGroup(
internal.Get,
1, 6,
@ -65,5 +70,5 @@ var CassandraCQL = internal.Register(MustNewLexer(
{`[^\$]+`, LiteralStringHeredoc, nil},
{`\$\$`, LiteralStringHeredoc, Pop(1)},
},
},
))
}
}

View file

@ -6,7 +6,7 @@ import (
)
// Crystal lexer.
var Crystal = internal.Register(MustNewLexer(
var Crystal = internal.Register(MustNewLazyLexer(
&Config{
Name: "Crystal",
Aliases: []string{"cr", "crystal"},
@ -14,7 +14,11 @@ var Crystal = internal.Register(MustNewLexer(
MimeTypes: []string{"text/x-crystal"},
DotAll: true,
},
Rules{
crystalRules,
))
func crystalRules() Rules {
return Rules{
"root": {
{`#.*?$`, CommentSingle, nil},
{Words(``, `\b`, `abstract`, `asm`, `as`, `begin`, `break`, `case`, `do`, `else`, `elsif`, `end`, `ensure`, `extend`, `ifdef`, `if`, `include`, `instance_sizeof`, `next`, `of`, `pointerof`, `private`, `protected`, `rescue`, `return`, `require`, `sizeof`, `super`, `then`, `typeof`, `unless`, `until`, `when`, `while`, `with`, `yield`), Keyword, nil},
@ -258,5 +262,5 @@ var Crystal = internal.Register(MustNewLexer(
{`[\\#<>]`, LiteralStringRegex, nil},
{`[^\\#<>]+`, LiteralStringRegex, nil},
},
},
))
}
}

View file

@ -6,7 +6,7 @@ import (
)
// CSharp lexer.
var CSharp = internal.Register(MustNewLexer(
var CSharp = internal.Register(MustNewLazyLexer(
&Config{
Name: "C#",
Aliases: []string{"csharp", "c#"},
@ -15,7 +15,11 @@ var CSharp = internal.Register(MustNewLexer(
DotAll: true,
EnsureNL: true,
},
Rules{
cSharpRules,
))
func cSharpRules() Rules {
return Rules{
"root": {
{`^\s*\[.*?\]`, NameAttribute, nil},
{`[^\S\n]+`, Text, nil},
@ -29,7 +33,7 @@ var CSharp = internal.Register(MustNewLexer(
{`\$@?"(""|[^"])*"`, LiteralString, nil},
{`"(\\\\|\\"|[^"\n])*["\n]`, LiteralString, nil},
{`'\\.'|'[^\\]'`, LiteralStringChar, nil},
{`[0-9](\.[0-9]*)?([eE][+-][0-9]+)?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil},
{`0[xX][0-9a-fA-F]+[Ll]?|[0-9_](\.[0-9]*)?([eE][+-]?[0-9]+)?[flFLdD]?`, LiteralNumber, nil},
{`#[ \t]*(if|endif|else|elif|define|undef|line|error|warning|region|endregion|pragma)\b.*?\n`, CommentPreproc, nil},
{`\b(extern)(\s+)(alias)\b`, ByGroups(Keyword, Text, Keyword), nil},
{`(abstract|as|async|await|base|break|by|case|catch|checked|const|continue|default|delegate|do|else|enum|event|explicit|extern|false|finally|fixed|for|foreach|goto|if|implicit|in|interface|internal|is|let|lock|new|null|on|operator|out|override|params|private|protected|public|readonly|ref|return|sealed|sizeof|stackalloc|static|switch|this|throw|true|try|typeof|unchecked|unsafe|virtual|void|while|get|set|new|partial|yield|add|remove|value|alias|ascending|descending|from|group|into|orderby|select|thenby|where|join|equals)\b`, Keyword, nil},
@ -47,5 +51,5 @@ var CSharp = internal.Register(MustNewLexer(
{`(?=\()`, Text, Pop(1)},
{`(@?[_a-zA-Z]\w*|\.)+`, NameNamespace, Pop(1)},
},
},
))
}
}

View file

@ -6,14 +6,18 @@ import (
)
// CSS lexer.
var CSS = internal.Register(MustNewLexer(
var CSS = internal.Register(MustNewLazyLexer(
&Config{
Name: "CSS",
Aliases: []string{"css"},
Filenames: []string{"*.css"},
MimeTypes: []string{"text/css"},
},
Rules{
cssRules,
))
func cssRules() Rules {
return Rules{
"root": {
Include("basics"),
},
@ -39,6 +43,18 @@ var CSS = internal.Register(MustNewLexer(
Include("basics"),
{`\}`, Punctuation, Pop(2)},
},
"atparenthesis": {
Include("common-values"),
{`/\*(?:.|\n)*?\*/`, Comment, nil},
Include("numeric-values"),
{`[*+/-]`, Operator, nil},
{`[,]`, Punctuation, nil},
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
{`[a-zA-Z_-]\w*`, Name, nil},
{`\(`, Punctuation, Push("atparenthesis")},
{`\)`, Punctuation, Pop(1)},
},
"content": {
{`\s+`, Text, nil},
{`\}`, Punctuation, Pop(1)},
@ -73,6 +89,7 @@ var CSS = internal.Register(MustNewLexer(
{`"(\\\\|\\"|[^"])*"`, LiteralStringDouble, nil},
{`'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
{`[a-zA-Z_-]\w*`, Name, nil},
{`\(`, Punctuation, Push("atparenthesis")},
{`\)`, Punctuation, Pop(1)},
},
"common-values": {
@ -100,5 +117,5 @@ var CSS = internal.Register(MustNewLexer(
{`%`, KeywordType, nil},
Default(Pop(1)),
},
},
))
}
}

View file

@ -6,14 +6,18 @@ import (
)
// Cython lexer.
var Cython = internal.Register(MustNewLexer(
var Cython = internal.Register(MustNewLazyLexer(
&Config{
Name: "Cython",
Aliases: []string{"cython", "pyx", "pyrex"},
Filenames: []string{"*.pyx", "*.pxd", "*.pxi"},
MimeTypes: []string{"text/x-cython", "application/x-cython"},
},
Rules{
cythonRules,
))
func cythonRules() Rules {
return Rules{
"root": {
{`\n`, Text, nil},
{`^(\s*)("""(?:.|\n)*?""")`, ByGroups(Text, LiteralStringDoc), nil},
@ -131,5 +135,5 @@ var Cython = internal.Register(MustNewLexer(
Include("strings"),
Include("nl"),
},
},
))
}
}