1
0
Fork 0
forked from forgejo/forgejo

Vendor Update (#16121)

* update github.com/PuerkitoBio/goquery

* update github.com/alecthomas/chroma

* update github.com/blevesearch/bleve/v2

* update github.com/caddyserver/certmagic

* update github.com/go-enry/go-enry/v2

* update github.com/go-git/go-billy/v5

* update github.com/go-git/go-git/v5

* update github.com/go-redis/redis/v8

* update github.com/go-testfixtures/testfixtures/v3

* update github.com/jaytaylor/html2text

* update github.com/json-iterator/go

* update github.com/klauspost/compress

* update github.com/markbates/goth

* update github.com/mattn/go-isatty

* update github.com/mholt/archiver/v3

* update github.com/microcosm-cc/bluemonday

* update github.com/minio/minio-go/v7

* update github.com/prometheus/client_golang

* update github.com/unrolled/render

* update github.com/xanzy/go-gitlab

* update github.com/yuin/goldmark

* update github.com/yuin/goldmark-highlighting

Co-authored-by: techknowlogick <techknowlogick@gitea.io>
This commit is contained in:
6543 2021-06-10 16:44:25 +02:00 committed by GitHub
parent f088dc4ea1
commit 86e2789960
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
819 changed files with 38072 additions and 34969 deletions

View file

@ -6,14 +6,18 @@ import (
)
// Handlebars lexer.
var Handlebars = internal.Register(MustNewLexer(
var Handlebars = internal.Register(MustNewLazyLexer(
&Config{
Name: "Handlebars",
Aliases: []string{"handlebars"},
Filenames: []string{"*.handlebars"},
Aliases: []string{"handlebars", "hbs"},
Filenames: []string{"*.handlebars", "*.hbs"},
MimeTypes: []string{},
},
Rules{
handlebarsRules,
))
func handlebarsRules() Rules {
return Rules{
"root": {
{`[^{]+`, Other, nil},
{`\{\{!.*\}\}`, Comment, nil},
@ -52,5 +56,5 @@ var Handlebars = internal.Register(MustNewLexer(
{`:?'(\\\\|\\'|[^'])*'`, LiteralStringSingle, nil},
{`[0-9](\.[0-9]*)?(eE[+-][0-9])?[flFLdD]?|0[xX][0-9a-fA-F]+[Ll]?`, LiteralNumber, nil},
},
},
))
}
}

View file

@ -6,14 +6,18 @@ import (
)
// Haskell lexer.
var Haskell = internal.Register(MustNewLexer(
var Haskell = internal.Register(MustNewLazyLexer(
&Config{
Name: "Haskell",
Aliases: []string{"haskell", "hs"},
Filenames: []string{"*.hs"},
MimeTypes: []string{"text/x-haskell"},
},
Rules{
haskellRules,
))
func haskellRules() Rules {
return Rules{
"root": {
{`\s+`, Text, nil},
{`--(?![!#$%&*+./<=>?@^|_~:\\]).*?$`, CommentSingle, nil},
@ -95,5 +99,5 @@ var Haskell = internal.Register(MustNewLexer(
{`\d+`, LiteralStringEscape, Pop(1)},
{`\s+\\`, LiteralStringEscape, Pop(1)},
},
},
))
}
}

View file

@ -6,7 +6,7 @@ import (
)
// Haxe lexer.
var Haxe = internal.Register(MustNewLexer(
var Haxe = internal.Register(MustNewLazyLexer(
&Config{
Name: "Haxe",
Aliases: []string{"hx", "haxe", "hxsl"},
@ -14,7 +14,11 @@ var Haxe = internal.Register(MustNewLexer(
MimeTypes: []string{"text/haxe", "text/x-haxe", "text/x-hx"},
DotAll: true,
},
Rules{
haxeRules,
))
func haxeRules() Rules {
return Rules{
"root": {
Include("spaces"),
Include("meta"),
@ -609,8 +613,8 @@ var Haxe = internal.Register(MustNewLexer(
{`\}`, Punctuation, Pop(1)},
{`,`, Punctuation, Push("#pop", "object")},
},
},
))
}
}
func haxePreProcMutator(state *LexerState) error {
stack, ok := state.Get("haxe-pre-proc").([][]string)

View file

@ -6,14 +6,18 @@ import (
)
// HCL lexer.
var HCL = internal.Register(MustNewLexer(
var HCL = internal.Register(MustNewLazyLexer(
&Config{
Name: "HCL",
Aliases: []string{"hcl"},
Filenames: []string{"*.hcl"},
MimeTypes: []string{"application/x-hcl"},
},
Rules{
hclRules,
))
func hclRules() Rules {
return Rules{
"root": {
Include("string"),
Include("punctuation"),
@ -65,5 +69,5 @@ var HCL = internal.Register(MustNewLexer(
{`\s+`, Text, nil},
{`\\\n`, Text, nil},
},
},
))
}
}

View file

@ -6,14 +6,18 @@ import (
)
// Hexdump lexer.
var Hexdump = internal.Register(MustNewLexer(
var Hexdump = internal.Register(MustNewLazyLexer(
&Config{
Name: "Hexdump",
Aliases: []string{"hexdump"},
Filenames: []string{},
MimeTypes: []string{},
},
Rules{
hexdumpRules,
))
func hexdumpRules() Rules {
return Rules{
"root": {
{`\n`, Text, nil},
Include("offset"),
@ -63,5 +67,5 @@ var Hexdump = internal.Register(MustNewLexer(
{`\s`, Text, nil},
{`^\*`, Punctuation, nil},
},
},
))
}
}

58
vendor/github.com/alecthomas/chroma/lexers/h/hlb.go generated vendored Normal file
View file

@ -0,0 +1,58 @@
package h
import (
. "github.com/alecthomas/chroma" // nolint
"github.com/alecthomas/chroma/lexers/internal"
)
// HLB lexer.
var HLB = internal.Register(MustNewLazyLexer(
&Config{
Name: "HLB",
Aliases: []string{"hlb"},
Filenames: []string{"*.hlb"},
MimeTypes: []string{},
},
hlbRules,
))
func hlbRules() Rules {
return Rules{
"root": {
{`(#.*)`, ByGroups(CommentSingle), nil},
{`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil},
{`((\b(true|false)\b))`, ByGroups(NameBuiltin), nil},
{`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)`, ByGroups(KeywordType), nil},
{`(\b[a-zA-Z_][a-zA-Z0-9]*\b)(\()`, ByGroups(NameFunction, Punctuation), Push("params")},
{`(\{)`, ByGroups(Punctuation), Push("block")},
{`(\n|\r|\r\n)`, Text, nil},
{`.`, Text, nil},
},
"string": {
{`"`, LiteralString, Pop(1)},
{`\\"`, LiteralString, nil},
{`[^\\"]+`, LiteralString, nil},
},
"block": {
{`(\})`, ByGroups(Punctuation), Pop(1)},
{`(#.*)`, ByGroups(CommentSingle), nil},
{`((\b(0(b|B|o|O|x|X)[a-fA-F0-9]+)\b)|(\b(0|[1-9][0-9]*)\b))`, ByGroups(LiteralNumber), nil},
{`((\b(true|false)\b))`, ByGroups(KeywordConstant), nil},
{`"`, LiteralString, Push("string")},
{`(with)`, ByGroups(KeywordReserved), nil},
{`(as)([\t ]+)(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(KeywordReserved, Text, NameFunction), nil},
{`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)([\t ]+)(\{)`, ByGroups(KeywordType, Text, Punctuation), Push("block")},
{`(?!\b(?:scratch|image|resolve|http|checksum|chmod|filename|git|keepGitDir|local|includePatterns|excludePatterns|followPaths|generate|frontendInput|shell|run|readonlyRootfs|env|dir|user|network|security|host|ssh|secret|mount|target|localPath|uid|gid|mode|readonly|tmpfs|sourcePath|cache|mkdir|createParents|chown|createdTime|mkfile|rm|allowNotFound|allowWildcards|copy|followSymlinks|contentsOnly|unpack|createDestPath)\b)(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(NameOther), nil},
{`(\n|\r|\r\n)`, Text, nil},
{`.`, Text, nil},
},
"params": {
{`(\))`, ByGroups(Punctuation), Pop(1)},
{`(variadic)`, ByGroups(Keyword), nil},
{`(\bstring\b|\bint\b|\bbool\b|\bfs\b|\boption\b)`, ByGroups(KeywordType), nil},
{`(\b[a-zA-Z_][a-zA-Z0-9]*\b)`, ByGroups(NameOther), nil},
{`(\n|\r|\r\n)`, Text, nil},
{`.`, Text, nil},
},
}
}

View file

@ -8,7 +8,7 @@ import (
)
// HTML lexer.
var HTML = internal.Register(MustNewLexer(
var HTML = internal.Register(MustNewLazyLexer(
&Config{
Name: "HTML",
Aliases: []string{"html"},
@ -18,7 +18,11 @@ var HTML = internal.Register(MustNewLexer(
DotAll: true,
CaseInsensitive: true,
},
Rules{
htmlRules,
))
func htmlRules() Rules {
return Rules{
"root": {
{`[^<&]+`, Text, nil},
{`&\S*?;`, NameEntity, nil},
@ -55,5 +59,5 @@ var HTML = internal.Register(MustNewLexer(
{`'.*?'`, LiteralString, Pop(1)},
{`[^\s>]+`, LiteralString, Pop(1)},
},
},
))
}
}

View file

@ -8,7 +8,7 @@ import (
)
// HTTP lexer.
var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLazyLexer(
&Config{
Name: "HTTP",
Aliases: []string{"http"},
@ -17,7 +17,11 @@ var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
NotMultiline: true,
DotAll: true,
},
Rules{
httpRules,
)))
func httpRules() Rules {
return Rules{
"root": {
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
{`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
@ -30,8 +34,8 @@ var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
"content": {
{`.+`, EmitterFunc(httpContentBlock), nil},
},
},
)))
}
}
func httpContentBlock(groups []string, lexer Lexer) Iterator {
tokens := []Token{

View file

@ -6,14 +6,18 @@ import (
)
// Hy lexer.
var Hy = internal.Register(MustNewLexer(
var Hy = internal.Register(MustNewLazyLexer(
&Config{
Name: "Hy",
Aliases: []string{"hylang"},
Filenames: []string{"*.hy"},
MimeTypes: []string{"text/x-hy", "application/x-hy"},
},
Rules{
hyRules,
))
func hyRules() Rules {
return Rules{
"root": {
{`;.*$`, CommentSingle, nil},
{`[,\s]+`, Text, nil},
@ -47,5 +51,5 @@ var Hy = internal.Register(MustNewLexer(
{`(?<!\.)(self|None|Ellipsis|NotImplemented|False|True|cls)\b`, NameBuiltinPseudo, nil},
{Words(`(?<!\.)`, `\b`, `ArithmeticError`, `AssertionError`, `AttributeError`, `BaseException`, `DeprecationWarning`, `EOFError`, `EnvironmentError`, `Exception`, `FloatingPointError`, `FutureWarning`, `GeneratorExit`, `IOError`, `ImportError`, `ImportWarning`, `IndentationError`, `IndexError`, `KeyError`, `KeyboardInterrupt`, `LookupError`, `MemoryError`, `NameError`, `NotImplemented`, `NotImplementedError`, `OSError`, `OverflowError`, `OverflowWarning`, `PendingDeprecationWarning`, `ReferenceError`, `RuntimeError`, `RuntimeWarning`, `StandardError`, `StopIteration`, `SyntaxError`, `SyntaxWarning`, `SystemError`, `SystemExit`, `TabError`, `TypeError`, `UnboundLocalError`, `UnicodeDecodeError`, `UnicodeEncodeError`, `UnicodeError`, `UnicodeTranslateError`, `UnicodeWarning`, `UserWarning`, `ValueError`, `VMSError`, `Warning`, `WindowsError`, `ZeroDivisionError`), NameException, nil},
},
},
))
}
}