File: http.go

package info (click to toggle)
golang-github-alecthomas-chroma-v2 2.5.0-1
  • links: PTS, VCS
  • area: main
  • in suites:
  • size: 7,980 kB
  • sloc: xml: 33,149; python: 589; javascript: 357; makefile: 36; sh: 36
file content (131 lines) | stat: -rw-r--r-- 3,331 bytes parent folder | download
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
package lexers

import (
	"strings"

	. "github.com/alecthomas/chroma/v2" // nolint
)

// HTTP lexer.
var HTTP = Register(httpBodyContentTypeLexer(MustNewLexer(
	&Config{
		Name:         "HTTP",
		Aliases:      []string{"http"},
		Filenames:    []string{},
		MimeTypes:    []string{},
		NotMultiline: true,
		DotAll:       true,
	},
	httpRules,
)))

func httpRules() Rules {
	return Rules{
		"root": {
			{`(GET|POST|PUT|DELETE|HEAD|OPTIONS|TRACE|PATCH|CONNECT)( +)([^ ]+)( +)(HTTP)(/)([12]\.[01])(\r?\n|\Z)`, ByGroups(NameFunction, Text, NameNamespace, Text, KeywordReserved, Operator, LiteralNumber, Text), Push("headers")},
			{`(HTTP)(/)([12]\.[01])( +)(\d{3})( +)([^\r\n]+)(\r?\n|\Z)`, ByGroups(KeywordReserved, Operator, LiteralNumber, Text, LiteralNumber, Text, NameException, Text), Push("headers")},
		},
		"headers": {
			{`([^\s:]+)( *)(:)( *)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpHeaderBlock), nil},
			{`([\t ]+)([^\r\n]+)(\r?\n|\Z)`, EmitterFunc(httpContinuousHeaderBlock), nil},
			{`\r?\n`, Text, Push("content")},
		},
		"content": {
			{`.+`, EmitterFunc(httpContentBlock), nil},
		},
	}
}

func httpContentBlock(groups []string, state *LexerState) Iterator {
	tokens := []Token{
		{Generic, groups[0]},
	}
	return Literator(tokens...)
}

func httpHeaderBlock(groups []string, state *LexerState) Iterator {
	tokens := []Token{
		{Name, groups[1]},
		{Text, groups[2]},
		{Operator, groups[3]},
		{Text, groups[4]},
		{Literal, groups[5]},
		{Text, groups[6]},
	}
	return Literator(tokens...)
}

func httpContinuousHeaderBlock(groups []string, state *LexerState) Iterator {
	tokens := []Token{
		{Text, groups[1]},
		{Literal, groups[2]},
		{Text, groups[3]},
	}
	return Literator(tokens...)
}

func httpBodyContentTypeLexer(lexer Lexer) Lexer { return &httpBodyContentTyper{lexer} }

type httpBodyContentTyper struct{ Lexer }

func (d *httpBodyContentTyper) Tokenise(options *TokeniseOptions, text string) (Iterator, error) { // nolint: gocognit
	var contentType string
	var isContentType bool
	var subIterator Iterator

	it, err := d.Lexer.Tokenise(options, text)
	if err != nil {
		return nil, err
	}

	return func() Token {
		token := it()

		if token == EOF {
			if subIterator != nil {
				return subIterator()
			}
			return EOF
		}

		switch {
		case token.Type == Name && strings.ToLower(token.Value) == "content-type":
			{
				isContentType = true
			}
		case token.Type == Literal && isContentType:
			{
				isContentType = false
				contentType = strings.TrimSpace(token.Value)
				pos := strings.Index(contentType, ";")
				if pos > 0 {
					contentType = strings.TrimSpace(contentType[:pos])
				}
			}
		case token.Type == Generic && contentType != "":
			{
				lexer := MatchMimeType(contentType)

				// application/calendar+xml can be treated as application/xml
				// if there's not a better match.
				if lexer == nil && strings.Contains(contentType, "+") {
					slashPos := strings.Index(contentType, "/")
					plusPos := strings.LastIndex(contentType, "+")
					contentType = contentType[:slashPos+1] + contentType[plusPos+1:]
					lexer = MatchMimeType(contentType)
				}

				if lexer == nil {
					token.Type = Text
				} else {
					subIterator, err = lexer.Tokenise(nil, token.Value)
					if err != nil {
						panic(err)
					}
					return EOF
				}
			}
		}
		return token
	}, nil
}