summaryrefslogtreecommitdiff
path: root/vendor/github.com/alecthomas/chroma/v2/lexers/http.go
diff options
context:
space:
mode:
authorMitja Felicijan <mitja.felicijan@gmail.com>2024-10-25 00:47:47 +0200
committerMitja Felicijan <mitja.felicijan@gmail.com>2024-10-25 00:47:47 +0200
commitc6cc0108ca7738023b45e0eeac0fa2390532dd93 (patch)
tree36890e6cd3091bbab8efbe686cc56f467f645bfd /vendor/github.com/alecthomas/chroma/v2/lexers/http.go
parent0130404a1dc663d4aa68d780c9bcb23a4243e68d (diff)
downloadjbmafp-master.tar.gz
Added vendor lock on depsHEADmaster
Diffstat (limited to 'vendor/github.com/alecthomas/chroma/v2/lexers/http.go')
-rw-r--r--vendor/github.com/alecthomas/chroma/v2/lexers/http.go131
1 files changed, 131 insertions, 0 deletions
diff --git a/vendor/github.com/alecthomas/chroma/v2/lexers/http.go b/vendor/github.com/alecthomas/chroma/v2/lexers/http.go
new file mode 100644
index 0000000..e756202
--- /dev/null
+++ b/vendor/github.com/alecthomas/chroma/v2/lexers/http.go
@@ -0,0 +1,131 @@
+package lexers
+
+import (
+ "strings"
+
+ . "github.com/alecthomas/chroma/v2" // nolint
+)
+
+// HTTP lexer.
+var HTTP = Register(httpBodyContentTypeLexer(MustNewLexer(
+ &Config{
+ Name: "HTTP",
+ Aliases: []string{"http"},
+ Filenames: []string{},
+ MimeTypes: []string{},
+ NotMultiline: true,
+ DotAll: true,
+ },
+ httpRules,
+)))
+
+func httpRules() Rules {
+ return Rules{
+ "root": {
+ {`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
+ {`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
+ },
+ "headers": {
+ {`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
+ {`([\t ]+)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpContinuousHeaderBlock), nil},
+ {`\r?\n`, Text, Push("content")},
+ },
+ "content": {
+ {`.+`, EmitterFunc(httpContentBlock), nil},
+ },
+ }
+}
+
+func httpContentBlock(groups []string, state *LexerState) Iterator {
+ tokens := []Token{
+ {Generic, groups[0]},
+ }
+ return Literator(tokens...)
+}
+
+func httpHeaderBlock(groups []string, state *LexerState) Iterator {
+ tokens := []Token{
+ {Name, groups[1]},
+ {Text, groups[2]},
+ {Operator, groups[3]},
+ {Text, groups[4]},
+ {Literal, groups[5]},
+ {Text, groups[6]},
+ }
+ return Literator(tokens...)
+}
+
+func httpContinuousHeaderBlock(groups []string, state *LexerState) Iterator {
+ tokens := []Token{
+ {Text, groups[1]},
+ {Literal, groups[2]},
+ {Text, groups[3]},
+ }
+ return Literator(tokens...)
+}
+
+func httpBodyContentTypeLexer(lexer Lexer) Lexer { return &httpBodyContentTyper{lexer} }
+
+type httpBodyContentTyper struct{ Lexer }
+
+func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit
+ var contentType string
+ var isContentType bool
+ var subIterator Iterator
+
+ it, err := d.Lexer.Tokenise(options, text)
+ if err != nil {
+ return nil, err
+ }
+
+ return func() Token {
+ token := it()
+
+ if token == EOF {
+ if subIterator != nil {
+ return subIterator()
+ }
+ return EOF
+ }
+
+ switch {
+ case token.Type == Name && strings.ToLower(token.Value) == "content-type":
+ {
+ isContentType = true
+ }
+ case token.Type == Literal && isContentType:
+ {
+ isContentType = false
+ contentType = strings.TrimSpace(token.Value)
+ pos := strings.Index(contentType, ";")
+ if pos > 0 {
+ contentType = strings.TrimSpace(contentType[:pos])
+ }
+ }
+ case token.Type == Generic && contentType != "":
+ {
+ lexer := MatchMimeType(contentType)
+
+ // application/calendar+xml can be treated as application/xml
+ // if there's not a better match.
+ if lexer == nil && strings.Contains(contentType, "+") {
+ slashPos := strings.Index(contentType, "/")
+ plusPos := strings.LastIndex(contentType, "+")
+ contentType = contentType[:slashPos+1] + contentType[plusPos+1:]
+ lexer = MatchMimeType(contentType)
+ }
+
+ if lexer == nil {
+ token.Type = Text
+ } else {
+ subIterator, err = lexer.Tokenise(nil, token.Value)
+ if err != nil {
+ panic(err)
+ }
+ return EOF
+ }
+ }
+ }
+ return token
+ }, nil
+}