forked from forgejo/forgejo
Server-side syntax highlighting for all code (#12047)
* Server-side syntax hilighting for all code This PR does a few things: * Remove all traces of highlight.js * Use chroma library to provide fast syntax hilighting directly on the server * Provide syntax hilighting for diffs * Re-style both unified and split diffs views * Add custom syntax hilighting styling for both regular and arc-green Fixes #7729 Fixes #10157 Fixes #11825 Fixes #7728 Fixes #3872 Fixes #3682 And perhaps gets closer to #9553 * fix line marker * fix repo search * Fix single line select * properly load settings * npm uninstall highlight.js * review suggestion * code review * forgot to call function * fix test * Apply suggestions from code review suggestions from @silverwind thanks Co-authored-by: silverwind <me@silverwind.io> * code review * copy/paste error * Use const for highlight size limit * Update web_src/less/_repository.less Co-authored-by: Lauris BH <lauris@nix.lv> * update size limit to 1MB and other styling tweaks * fix highlighting for certain diff sections * fix test * add worker back as suggested Co-authored-by: silverwind <me@silverwind.io> Co-authored-by: Lauris BH <lauris@nix.lv>
This commit is contained in:
parent
ce5f2b9845
commit
af7ffaa279
336 changed files with 37293 additions and 769 deletions
128
vendor/github.com/alecthomas/chroma/lexers/h/http.go
generated
vendored
Normal file
128
vendor/github.com/alecthomas/chroma/lexers/h/http.go
generated
vendored
Normal file
|
@ -0,0 +1,128 @@
|
|||
package h
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
. "github.com/alecthomas/chroma" // nolint
|
||||
"github.com/alecthomas/chroma/lexers/internal"
|
||||
)
|
||||
|
||||
// HTTP lexer.
|
||||
var HTTP = internal.Register(httpBodyContentTypeLexer(MustNewLexer(
|
||||
&Config{
|
||||
Name: "HTTP",
|
||||
Aliases: []string{"http"},
|
||||
Filenames: []string{},
|
||||
MimeTypes: []string{},
|
||||
NotMultiline: true,
|
||||
DotAll: true,
|
||||
},
|
||||
Rules{
|
||||
"root": {
|
||||
{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)(1\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
|
||||
{`(HTTP)(/)(1\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
|
||||
},
|
||||
"headers": {
|
||||
{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
|
||||
{`([\t ]+)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpContinuousHeaderBlock), nil},
|
||||
{`\r?\n`, Text, Push("content")},
|
||||
},
|
||||
"content": {
|
||||
{`.+`, EmitterFunc(httpContentBlock), nil},
|
||||
},
|
||||
},
|
||||
)))
|
||||
|
||||
func httpContentBlock(groups []string, lexer Lexer) Iterator {
|
||||
tokens := []Token{
|
||||
{Generic, groups[0]},
|
||||
}
|
||||
return Literator(tokens...)
|
||||
}
|
||||
|
||||
func httpHeaderBlock(groups []string, lexer Lexer) Iterator {
|
||||
tokens := []Token{
|
||||
{Name, groups[1]},
|
||||
{Text, groups[2]},
|
||||
{Operator, groups[3]},
|
||||
{Text, groups[4]},
|
||||
{Literal, groups[5]},
|
||||
{Text, groups[6]},
|
||||
}
|
||||
return Literator(tokens...)
|
||||
}
|
||||
|
||||
func httpContinuousHeaderBlock(groups []string, lexer Lexer) Iterator {
|
||||
tokens := []Token{
|
||||
{Text, groups[1]},
|
||||
{Literal, groups[2]},
|
||||
{Text, groups[3]},
|
||||
}
|
||||
return Literator(tokens...)
|
||||
}
|
||||
|
||||
func httpBodyContentTypeLexer(lexer Lexer) Lexer { return &httpBodyContentTyper{lexer} }
|
||||
|
||||
type httpBodyContentTyper struct{ Lexer }
|
||||
|
||||
func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit
|
||||
var contentType string
|
||||
var isContentType bool
|
||||
var subIterator Iterator
|
||||
|
||||
it, err := d.Lexer.Tokenise(options, text)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return func() Token {
|
||||
token := it()
|
||||
|
||||
if token == EOF {
|
||||
if subIterator != nil {
|
||||
return subIterator()
|
||||
}
|
||||
return EOF
|
||||
}
|
||||
|
||||
switch {
|
||||
case token.Type == Name && strings.ToLower(token.Value) == "content-type":
|
||||
{
|
||||
isContentType = true
|
||||
}
|
||||
case token.Type == Literal && isContentType:
|
||||
{
|
||||
isContentType = false
|
||||
contentType = strings.TrimSpace(token.Value)
|
||||
pos := strings.Index(contentType, ";")
|
||||
if pos > 0 {
|
||||
contentType = strings.TrimSpace(contentType[:pos])
|
||||
}
|
||||
}
|
||||
case token.Type == Generic && contentType != "":
|
||||
{
|
||||
lexer := internal.MatchMimeType(contentType)
|
||||
|
||||
// application/calendar+xml can be treated as application/xml
|
||||
// if there's not a better match.
|
||||
if lexer == nil && strings.Contains(contentType, "+") {
|
||||
slashPos := strings.Index(contentType, "/")
|
||||
plusPos := strings.LastIndex(contentType, "+")
|
||||
contentType = contentType[:slashPos+1] + contentType[plusPos+1:]
|
||||
lexer = internal.MatchMimeType(contentType)
|
||||
}
|
||||
|
||||
if lexer == nil {
|
||||
token.Type = Text
|
||||
} else {
|
||||
subIterator, err = lexer.Tokenise(nil, token.Value)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return EOF
|
||||
}
|
||||
}
|
||||
}
|
||||
return token
|
||||
}, nil
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue