diff --git a/go.mod b/go.mod index 7df4ce63f..31c77845d 100644 --- a/go.mod +++ b/go.mod @@ -119,6 +119,10 @@ require ( xorm.io/xorm v1.3.3-0.20221209153726-f1bfc5ce9830 ) +require github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 // indirect + +require golang.org/x/image v0.0.0-20220413100746-70e8d0d3baa9 // indirect + require ( cloud.google.com/go/compute v1.7.0 // indirect git.sr.ht/~mariusor/go-xsd-duration v0.0.0-20220703122237-02e73435a078 // indirect @@ -203,7 +207,9 @@ require ( github.com/hashicorp/hcl v1.0.0 // indirect github.com/imdario/mergo v0.3.13 // indirect github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 // indirect + github.com/jbuchbinder/gg v1.3.1-0.20220522202534-b71f553fdd3d github.com/jessevdk/go-flags v1.5.0 // indirect + github.com/jiro4989/textimg/v3 v3.1.8 github.com/josharian/intern v1.0.0 // indirect github.com/kevinburke/ssh_config v1.2.0 // indirect github.com/klauspost/pgzip v1.2.5 // indirect diff --git a/go.sum b/go.sum index 68c111abd..11d0cbf7b 100644 --- a/go.sum +++ b/go.sum @@ -535,6 +535,8 @@ github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0kt github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0= github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A= github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI= +github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g= +github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k= github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 h1:gtexQ/VGyN+VVFRXSFiguSNcXmS6rkKT+X7FdIrTtfo= github.com/golang/geo v0.0.0-20210211234256-740aa86cb551/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= @@ -787,10 +789,16 @@ github.com/jaytaylor/html2text v0.0.0-20211105163654-bc68cce691ba h1:QFQpJdgbON7 github.com/jaytaylor/html2text v0.0.0-20211105163654-bc68cce691ba/go.mod h1:CVKlgaMiht+LXvHG173ujK6JUhZXKb2u/BQtjPDIvyk= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= +github.com/jbuchbinder/gg v1.3.0 h1:nfHEGrrXMCMIlLQIooBwPwn2IqEWhLQQ1s0WPmBpwdw= +github.com/jbuchbinder/gg v1.3.0/go.mod h1:V0Eu/AInMEKfU25ID6D/0FRIGj+Nz1EXId1Igjq1XI4= +github.com/jbuchbinder/gg v1.3.1-0.20220522202534-b71f553fdd3d h1:xUaIzSyN5X8wtiiOfxsLG+uFswf7CdSgd3f62t99XhE= +github.com/jbuchbinder/gg v1.3.1-0.20220522202534-b71f553fdd3d/go.mod h1:Rjxgiu1UOnrvAD1gplSXdFBNpytrSarkIqp98olsi/g= github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc= github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4= github.com/jhillyerd/enmime v0.10.1 h1:3VP8gFhK7R948YJBrna5bOgnTXEuPAoICo79kKkBKfA= github.com/jhillyerd/enmime v0.10.1/go.mod h1:Qpe8EEemJMFAF8+NZoWdpXvK2Yb9dRF0k/z6mkcDHsA= +github.com/jiro4989/textimg/v3 v3.1.8 h1:tfzkegBW59IRkpn/BLMUPTK0XRXW61xQQ4sYTzMCZDI= +github.com/jiro4989/textimg/v3 v3.1.8/go.mod h1:ohDSZdKqvQXFG9rdAAf/AVNxyvtjh32kj0nbW/zXo3k= github.com/jmespath/go-jmespath v0.0.0-20180206201540-c2b33e8439af/go.mod h1:Nht3zPeWKUH0NzdCt2Blrr5ys8VGpn0CEB0cQHVjt7k= github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg= github.com/joho/godotenv v1.5.1 h1:7eLL/+HRGLY0ldzfGMeQkb7vMd0as4CfYvUVzLqw0N0= @@ -1382,6 +1390,8 @@ golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EH golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= +golang.org/x/image v0.0.0-20220413100746-70e8d0d3baa9 h1:LRtI4W37N+KFebI/qV0OFiLUv4GLOWeEW5hn/KEJvxE= +golang.org/x/image v0.0.0-20220413100746-70e8d0d3baa9/go.mod h1:023OzeP/+EPmXeapQh35lcL3II3LrY8Ic+EFFKVhULM= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= diff --git a/modules/codeimage/image/fonts/JetBrainsMono-Regular.ttf b/modules/codeimage/image/fonts/JetBrainsMono-Regular.ttf new file mode 100644 index 000000000..3b75b2690 Binary files /dev/null and b/modules/codeimage/image/fonts/JetBrainsMono-Regular.ttf differ diff --git a/modules/codeimage/image/image.go b/modules/codeimage/image/image.go new file mode 100644 index 000000000..bac373284 --- /dev/null +++ b/modules/codeimage/image/image.go @@ -0,0 +1,73 @@ +package image + +import ( + "bytes" + "embed" + "image/color" + "strings" + + "github.com/jbuchbinder/gg" + "github.com/jiro4989/textimg/v3/token" +) + +//go:embed fonts +var fonts embed.FS + +func Draw(tokens token.Tokens) ([]byte, error) { + foreground := color.RGBA{205, 214, 244, 255} + background := color.RGBA{30, 30, 46, 255} + + dc := gg.NewContext(1200, 630) + fgCol := foreground + bgCol := background + dc.SetColor(bgCol) + if err := dc.LoadFontFaceFS(fonts, "fonts/JetBrainsMono-Regular.ttf", 20); err != nil { + return nil, err + } + dc.Clear() + curX, curY := 50.0, 50.0 + + for _, t := range tokens { + switch t.Kind { + case token.KindColor: + switch t.ColorType { + case token.ColorTypeReset: + fgCol = foreground + bgCol = background + case token.ColorTypeResetForeground: + fgCol = foreground + case token.ColorTypeResetBackground: + bgCol = background + case token.ColorTypeReverse: + fgCol, bgCol = bgCol, fgCol + case token.ColorTypeForeground: + fgCol = color.RGBA(t.Color) + case token.ColorTypeBackground: + bgCol = color.RGBA(t.Color) + } + case token.KindText: + w, h := dc.MeasureMultilineString(t.Text, 1.0) + dc.Push() + dc.SetColor(bgCol) + dc.DrawRectangle(curX, curY, w, h) + dc.Fill() + dc.Pop() + dc.SetColor(fgCol) + dc.DrawStringAnchored(strings.ReplaceAll(strings.ReplaceAll(t.Text, "\t", " "), "\n", ""), curX, curY, 0.0, 1.0) + curX += w + if strings.Contains(t.Text, "\n") { + curY += h + curX = 50 + } + } + } + dc.Push() + dc.SetColor(background) + dc.DrawRectangle(1150, 0, 50, 630) + dc.DrawRectangle(0, 580, 1200, 50) + dc.Fill() + dc.Pop() + buffer := new(bytes.Buffer) + dc.EncodePNG(buffer) + return buffer.Bytes(), nil +} diff --git a/modules/codeimage/parser/grammer.go b/modules/codeimage/parser/grammer.go new file mode 100644 index 000000000..ab1073724 --- /dev/null +++ b/modules/codeimage/parser/grammer.go @@ -0,0 +1,1307 @@ +package parser + +// Code generated by peg parser/grammer.peg DO NOT EDIT. + +import ( + "fmt" + "io" + "os" + "sort" + "strconv" + "strings" +) + +const endSymbol rune = 1114112 + +/* The rule types inferred from the grammar are below. */ +type pegRule uint8 + +const ( + ruleUnknown pegRule = iota + ruleroot + ruleignore + rulecolors + ruletext + rulecolor + rulestandard_color + ruleextended_color + ruleextended_color_256 + ruleextended_color_rgb + ruleextended_color_prefix + ruletext_attributes + rulezero + rulenumber + ruleprefix + ruleescape_sequence + rulecolor_suffix + rulenon_color_suffix + ruledelimiter + ruleAction0 + rulePegText + ruleAction1 + ruleAction2 + ruleAction3 + ruleAction4 + ruleAction5 + ruleAction6 + ruleAction7 + ruleAction8 + ruleAction9 + ruleAction10 + ruleAction11 +) + +var rul3s = [...]string{ + "Unknown", + "root", + "ignore", + "colors", + "text", + "color", + "standard_color", + "extended_color", + "extended_color_256", + "extended_color_rgb", + "extended_color_prefix", + "text_attributes", + "zero", + "number", + "prefix", + "escape_sequence", + "color_suffix", + "non_color_suffix", + "delimiter", + "Action0", + "PegText", + "Action1", + "Action2", + "Action3", + "Action4", + "Action5", + "Action6", + "Action7", + "Action8", + "Action9", + "Action10", + "Action11", +} + +type token32 struct { + pegRule + begin, end uint32 +} + +func (t *token32) String() string { + return fmt.Sprintf("\x1B[34m%v\x1B[m %v %v", rul3s[t.pegRule], t.begin, t.end) +} + +type node32 struct { + token32 + up, next *node32 +} + +func (node *node32) print(w io.Writer, pretty bool, buffer string) { + var print func(node *node32, depth int) + print = func(node *node32, depth int) { + for node != nil { + for c := 0; c < depth; c++ { + fmt.Fprintf(w, " ") + } + rule := rul3s[node.pegRule] + quote := strconv.Quote(string(([]rune(buffer)[node.begin:node.end]))) + if !pretty { + fmt.Fprintf(w, "%v %v\n", rule, quote) + } else { + fmt.Fprintf(w, "\x1B[36m%v\x1B[m %v\n", rule, quote) + } + if node.up != nil { + print(node.up, depth+1) + } + node = node.next + } + } + print(node, 0) +} + +func (node *node32) Print(w io.Writer, buffer string) { + node.print(w, false, buffer) +} + +func (node *node32) PrettyPrint(w io.Writer, buffer string) { + node.print(w, true, buffer) +} + +type tokens32 struct { + tree []token32 +} + +func (t *tokens32) Trim(length uint32) { + t.tree = t.tree[:length] +} + +func (t *tokens32) Print() { + for _, token := range t.tree { + fmt.Println(token.String()) + } +} + +func (t *tokens32) AST() *node32 { + type element struct { + node *node32 + down *element + } + tokens := t.Tokens() + var stack *element + for _, token := range tokens { + if token.begin == token.end { + continue + } + node := &node32{token32: token} + for stack != nil && stack.node.begin >= token.begin && stack.node.end <= token.end { + stack.node.next = node.up + node.up = stack.node + stack = stack.down + } + stack = &element{node: node, down: stack} + } + if stack != nil { + return stack.node + } + return nil +} + +func (t *tokens32) PrintSyntaxTree(buffer string) { + t.AST().Print(os.Stdout, buffer) +} + +func (t *tokens32) WriteSyntaxTree(w io.Writer, buffer string) { + t.AST().Print(w, buffer) +} + +func (t *tokens32) PrettyPrintSyntaxTree(buffer string) { + t.AST().PrettyPrint(os.Stdout, buffer) +} + +func (t *tokens32) Add(rule pegRule, begin, end, index uint32) { + tree, i := t.tree, int(index) + if i >= len(tree) { + t.tree = append(tree, token32{pegRule: rule, begin: begin, end: end}) + return + } + tree[i] = token32{pegRule: rule, begin: begin, end: end} +} + +func (t *tokens32) Tokens() []token32 { + return t.tree +} + +type Parser struct { + ParserFunc + + Buffer string + buffer []rune + rules [32]func() bool + parse func(rule ...int) error + reset func() + Pretty bool + tokens32 +} + +func (p *Parser) Parse(rule ...int) error { + return p.parse(rule...) +} + +func (p *Parser) Reset() { + p.reset() +} + +type textPosition struct { + line, symbol int +} + +type textPositionMap map[int]textPosition + +func translatePositions(buffer []rune, positions []int) textPositionMap { + length, translations, j, line, symbol := len(positions), make(textPositionMap, len(positions)), 0, 1, 0 + sort.Ints(positions) + +search: + for i, c := range buffer { + if c == '\n' { + line, symbol = line+1, 0 + } else { + symbol++ + } + if i == positions[j] { + translations[positions[j]] = textPosition{line, symbol} + for j++; j < length; j++ { + if i != positions[j] { + continue search + } + } + break search + } + } + + return translations +} + +type parseError struct { + p *Parser + max token32 +} + +func (e *parseError) Error() string { + tokens, err := []token32{e.max}, "\n" + positions, p := make([]int, 2*len(tokens)), 0 + for _, token := range tokens { + positions[p], p = int(token.begin), p+1 + positions[p], p = int(token.end), p+1 + } + translations := translatePositions(e.p.buffer, positions) + format := "parse error near %v (line %v symbol %v - line %v symbol %v):\n%v\n" + if e.p.Pretty { + format = "parse error near \x1B[34m%v\x1B[m (line %v symbol %v - line %v symbol %v):\n%v\n" + } + for _, token := range tokens { + begin, end := int(token.begin), int(token.end) + err += fmt.Sprintf(format, + rul3s[token.pegRule], + translations[begin].line, translations[begin].symbol, + translations[end].line, translations[end].symbol, + strconv.Quote(string(e.p.buffer[begin:end]))) + } + + return err +} + +func (p *Parser) PrintSyntaxTree() { + if p.Pretty { + p.tokens32.PrettyPrintSyntaxTree(p.Buffer) + } else { + p.tokens32.PrintSyntaxTree(p.Buffer) + } +} + +func (p *Parser) WriteSyntaxTree(w io.Writer) { + p.tokens32.WriteSyntaxTree(w, p.Buffer) +} + +func (p *Parser) SprintSyntaxTree() string { + var bldr strings.Builder + p.WriteSyntaxTree(&bldr) + return bldr.String() +} + +func (p *Parser) Execute() { + buffer, _buffer, text, begin, end := p.Buffer, p.buffer, "", 0, 0 + for _, token := range p.Tokens() { + switch token.pegRule { + + case rulePegText: + begin, end = int(token.begin), int(token.end) + text = string(_buffer[begin:end]) + + case ruleAction0: + p.pushResetColor() + case ruleAction1: + p.pushText(text) + case ruleAction2: + p.pushStandardColorWithCategory(text) + case ruleAction3: + p.pushResetForegroundColor() + case ruleAction4: + p.pushResetBackgroundColor() + case ruleAction5: + p.setExtendedColor256(text) + case ruleAction6: + p.setExtendedColorR(text) + case ruleAction7: + p.setExtendedColorG(text) + case ruleAction8: + p.setExtendedColorB(text) + case ruleAction9: + p.pushExtendedColor(text) + case ruleAction10: + p.pushResetColor() + case ruleAction11: + p.pushReverseColor() + + } + } + _, _, _, _, _ = buffer, _buffer, text, begin, end +} + +func Pretty(pretty bool) func(*Parser) error { + return func(p *Parser) error { + p.Pretty = pretty + return nil + } +} + +func Size(size int) func(*Parser) error { + return func(p *Parser) error { + p.tokens32 = tokens32{tree: make([]token32, 0, size)} + return nil + } +} +func (p *Parser) Init(options ...func(*Parser) error) error { + var ( + max token32 + position, tokenIndex uint32 + buffer []rune + ) + for _, option := range options { + err := option(p) + if err != nil { + return err + } + } + p.reset = func() { + max = token32{} + position, tokenIndex = 0, 0 + + p.buffer = []rune(p.Buffer) + if len(p.buffer) == 0 || p.buffer[len(p.buffer)-1] != endSymbol { + p.buffer = append(p.buffer, endSymbol) + } + buffer = p.buffer + } + p.reset() + + _rules := p.rules + tree := p.tokens32 + p.parse = func(rule ...int) error { + r := 1 + if len(rule) > 0 { + r = rule[0] + } + matches := p.rules[r]() + p.tokens32 = tree + if matches { + p.Trim(tokenIndex) + return nil + } + return &parseError{p, max} + } + + add := func(rule pegRule, begin uint32) { + tree.Add(rule, begin, position, tokenIndex) + tokenIndex++ + if begin != position && position > max.end { + max = token32{rule, begin, position} + } + } + + matchDot := func() bool { + if buffer[position] != endSymbol { + position++ + return true + } + return false + } + + /*matchChar := func(c byte) bool { + if buffer[position] == c { + position++ + return true + } + return false + }*/ + + /*matchRange := func(lower byte, upper byte) bool { + if c := buffer[position]; c >= lower && c <= upper { + position++ + return true + } + return false + }*/ + + _rules = [...]func() bool{ + nil, + /* 0 root <- <(colors / ignore / text)*> */ + func() bool { + { + position1 := position + l2: + { + position3, tokenIndex3 := position, tokenIndex + { + position4, tokenIndex4 := position, tokenIndex + if !_rules[rulecolors]() { + goto l5 + } + goto l4 + l5: + position, tokenIndex = position4, tokenIndex4 + if !_rules[ruleignore]() { + goto l6 + } + goto l4 + l6: + position, tokenIndex = position4, tokenIndex4 + if !_rules[ruletext]() { + goto l3 + } + } + l4: + goto l2 + l3: + position, tokenIndex = position3, tokenIndex3 + } + add(ruleroot, position1) + } + return true + }, + /* 1 ignore <- <((prefix number? non_color_suffix) / escape_sequence)> */ + func() bool { + position7, tokenIndex7 := position, tokenIndex + { + position8 := position + { + position9, tokenIndex9 := position, tokenIndex + if !_rules[ruleprefix]() { + goto l10 + } + { + position11, tokenIndex11 := position, tokenIndex + if !_rules[rulenumber]() { + goto l11 + } + goto l12 + l11: + position, tokenIndex = position11, tokenIndex11 + } + l12: + if !_rules[rulenon_color_suffix]() { + goto l10 + } + goto l9 + l10: + position, tokenIndex = position9, tokenIndex9 + if !_rules[ruleescape_sequence]() { + goto l7 + } + } + l9: + add(ruleignore, position8) + } + return true + l7: + position, tokenIndex = position7, tokenIndex7 + return false + }, + /* 2 colors <- <((prefix color_suffix Action0) / (prefix color (delimiter color)* color_suffix))> */ + func() bool { + position13, tokenIndex13 := position, tokenIndex + { + position14 := position + { + position15, tokenIndex15 := position, tokenIndex + if !_rules[ruleprefix]() { + goto l16 + } + if !_rules[rulecolor_suffix]() { + goto l16 + } + if !_rules[ruleAction0]() { + goto l16 + } + goto l15 + l16: + position, tokenIndex = position15, tokenIndex15 + if !_rules[ruleprefix]() { + goto l13 + } + if !_rules[rulecolor]() { + goto l13 + } + l17: + { + position18, tokenIndex18 := position, tokenIndex + if !_rules[ruledelimiter]() { + goto l18 + } + if !_rules[rulecolor]() { + goto l18 + } + goto l17 + l18: + position, tokenIndex = position18, tokenIndex18 + } + if !_rules[rulecolor_suffix]() { + goto l13 + } + } + l15: + add(rulecolors, position14) + } + return true + l13: + position, tokenIndex = position13, tokenIndex13 + return false + }, + /* 3 text <- <(<(!'\x1b' .)+> Action1)> */ + func() bool { + position19, tokenIndex19 := position, tokenIndex + { + position20 := position + { + position21 := position + { + position24, tokenIndex24 := position, tokenIndex + if buffer[position] != rune('\x1b') { + goto l24 + } + position++ + goto l19 + l24: + position, tokenIndex = position24, tokenIndex24 + } + if !matchDot() { + goto l19 + } + l22: + { + position23, tokenIndex23 := position, tokenIndex + { + position25, tokenIndex25 := position, tokenIndex + if buffer[position] != rune('\x1b') { + goto l25 + } + position++ + goto l23 + l25: + position, tokenIndex = position25, tokenIndex25 + } + if !matchDot() { + goto l23 + } + goto l22 + l23: + position, tokenIndex = position23, tokenIndex23 + } + add(rulePegText, position21) + } + if !_rules[ruleAction1]() { + goto l19 + } + add(ruletext, position20) + } + return true + l19: + position, tokenIndex = position19, tokenIndex19 + return false + }, + /* 4 color <- <(standard_color / extended_color / text_attributes)> */ + func() bool { + position26, tokenIndex26 := position, tokenIndex + { + position27 := position + { + position28, tokenIndex28 := position, tokenIndex + if !_rules[rulestandard_color]() { + goto l29 + } + goto l28 + l29: + position, tokenIndex = position28, tokenIndex28 + if !_rules[ruleextended_color]() { + goto l30 + } + goto l28 + l30: + position, tokenIndex = position28, tokenIndex28 + if !_rules[ruletext_attributes]() { + goto l26 + } + } + l28: + add(rulecolor, position27) + } + return true + l26: + position, tokenIndex = position26, tokenIndex26 + return false + }, + /* 5 standard_color <- <((zero <(('3' / '4' / '9' / ('1' '0')) [0-7])> Action2) / (zero <(('3' / '9') '9')> Action3) / (zero <(('4' / ('1' '0')) '9')> Action4))> */ + func() bool { + position31, tokenIndex31 := position, tokenIndex + { + position32 := position + { + position33, tokenIndex33 := position, tokenIndex + if !_rules[rulezero]() { + goto l34 + } + { + position35 := position + { + position36, tokenIndex36 := position, tokenIndex + if buffer[position] != rune('3') { + goto l37 + } + position++ + goto l36 + l37: + position, tokenIndex = position36, tokenIndex36 + if buffer[position] != rune('4') { + goto l38 + } + position++ + goto l36 + l38: + position, tokenIndex = position36, tokenIndex36 + if buffer[position] != rune('9') { + goto l39 + } + position++ + goto l36 + l39: + position, tokenIndex = position36, tokenIndex36 + if buffer[position] != rune('1') { + goto l34 + } + position++ + if buffer[position] != rune('0') { + goto l34 + } + position++ + } + l36: + if c := buffer[position]; c < rune('0') || c > rune('7') { + goto l34 + } + position++ + add(rulePegText, position35) + } + if !_rules[ruleAction2]() { + goto l34 + } + goto l33 + l34: + position, tokenIndex = position33, tokenIndex33 + if !_rules[rulezero]() { + goto l40 + } + { + position41 := position + { + position42, tokenIndex42 := position, tokenIndex + if buffer[position] != rune('3') { + goto l43 + } + position++ + goto l42 + l43: + position, tokenIndex = position42, tokenIndex42 + if buffer[position] != rune('9') { + goto l40 + } + position++ + } + l42: + if buffer[position] != rune('9') { + goto l40 + } + position++ + add(rulePegText, position41) + } + if !_rules[ruleAction3]() { + goto l40 + } + goto l33 + l40: + position, tokenIndex = position33, tokenIndex33 + if !_rules[rulezero]() { + goto l31 + } + { + position44 := position + { + position45, tokenIndex45 := position, tokenIndex + if buffer[position] != rune('4') { + goto l46 + } + position++ + goto l45 + l46: + position, tokenIndex = position45, tokenIndex45 + if buffer[position] != rune('1') { + goto l31 + } + position++ + if buffer[position] != rune('0') { + goto l31 + } + position++ + } + l45: + if buffer[position] != rune('9') { + goto l31 + } + position++ + add(rulePegText, position44) + } + if !_rules[ruleAction4]() { + goto l31 + } + } + l33: + add(rulestandard_color, position32) + } + return true + l31: + position, tokenIndex = position31, tokenIndex31 + return false + }, + /* 6 extended_color <- <(extended_color_256 / extended_color_rgb)> */ + func() bool { + position47, tokenIndex47 := position, tokenIndex + { + position48 := position + { + position49, tokenIndex49 := position, tokenIndex + if !_rules[ruleextended_color_256]() { + goto l50 + } + goto l49 + l50: + position, tokenIndex = position49, tokenIndex49 + if !_rules[ruleextended_color_rgb]() { + goto l47 + } + } + l49: + add(ruleextended_color, position48) + } + return true + l47: + position, tokenIndex = position47, tokenIndex47 + return false + }, + /* 7 extended_color_256 <- <(extended_color_prefix delimiter zero '5' delimiter Action5)> */ + func() bool { + position51, tokenIndex51 := position, tokenIndex + { + position52 := position + if !_rules[ruleextended_color_prefix]() { + goto l51 + } + if !_rules[ruledelimiter]() { + goto l51 + } + if !_rules[rulezero]() { + goto l51 + } + if buffer[position] != rune('5') { + goto l51 + } + position++ + if !_rules[ruledelimiter]() { + goto l51 + } + { + position53 := position + if !_rules[rulenumber]() { + goto l51 + } + add(rulePegText, position53) + } + if !_rules[ruleAction5]() { + goto l51 + } + add(ruleextended_color_256, position52) + } + return true + l51: + position, tokenIndex = position51, tokenIndex51 + return false + }, + /* 8 extended_color_rgb <- <(extended_color_prefix delimiter zero '2' delimiter Action6 delimiter Action7 delimiter Action8)> */ + func() bool { + position54, tokenIndex54 := position, tokenIndex + { + position55 := position + if !_rules[ruleextended_color_prefix]() { + goto l54 + } + if !_rules[ruledelimiter]() { + goto l54 + } + if !_rules[rulezero]() { + goto l54 + } + if buffer[position] != rune('2') { + goto l54 + } + position++ + if !_rules[ruledelimiter]() { + goto l54 + } + { + position56 := position + if !_rules[rulenumber]() { + goto l54 + } + add(rulePegText, position56) + } + if !_rules[ruleAction6]() { + goto l54 + } + if !_rules[ruledelimiter]() { + goto l54 + } + { + position57 := position + if !_rules[rulenumber]() { + goto l54 + } + add(rulePegText, position57) + } + if !_rules[ruleAction7]() { + goto l54 + } + if !_rules[ruledelimiter]() { + goto l54 + } + { + position58 := position + if !_rules[rulenumber]() { + goto l54 + } + add(rulePegText, position58) + } + if !_rules[ruleAction8]() { + goto l54 + } + add(ruleextended_color_rgb, position55) + } + return true + l54: + position, tokenIndex = position54, tokenIndex54 + return false + }, + /* 9 extended_color_prefix <- <(zero <(('3' / '4') '8')> Action9)> */ + func() bool { + position59, tokenIndex59 := position, tokenIndex + { + position60 := position + if !_rules[rulezero]() { + goto l59 + } + { + position61 := position + { + position62, tokenIndex62 := position, tokenIndex + if buffer[position] != rune('3') { + goto l63 + } + position++ + goto l62 + l63: + position, tokenIndex = position62, tokenIndex62 + if buffer[position] != rune('4') { + goto l59 + } + position++ + } + l62: + if buffer[position] != rune('8') { + goto l59 + } + position++ + add(rulePegText, position61) + } + if !_rules[ruleAction9]() { + goto l59 + } + add(ruleextended_color_prefix, position60) + } + return true + l59: + position, tokenIndex = position59, tokenIndex59 + return false + }, + /* 10 text_attributes <- <(('0' Action10) / ('7' Action11) / ('1' / '4' / '5' / '8'))+> */ + func() bool { + position64, tokenIndex64 := position, tokenIndex + { + position65 := position + { + position68, tokenIndex68 := position, tokenIndex + if buffer[position] != rune('0') { + goto l69 + } + position++ + if !_rules[ruleAction10]() { + goto l69 + } + goto l68 + l69: + position, tokenIndex = position68, tokenIndex68 + if buffer[position] != rune('7') { + goto l70 + } + position++ + if !_rules[ruleAction11]() { + goto l70 + } + goto l68 + l70: + position, tokenIndex = position68, tokenIndex68 + { + position71, tokenIndex71 := position, tokenIndex + if buffer[position] != rune('1') { + goto l72 + } + position++ + goto l71 + l72: + position, tokenIndex = position71, tokenIndex71 + if buffer[position] != rune('4') { + goto l73 + } + position++ + goto l71 + l73: + position, tokenIndex = position71, tokenIndex71 + if buffer[position] != rune('5') { + goto l74 + } + position++ + goto l71 + l74: + position, tokenIndex = position71, tokenIndex71 + if buffer[position] != rune('8') { + goto l64 + } + position++ + } + l71: + } + l68: + l66: + { + position67, tokenIndex67 := position, tokenIndex + { + position75, tokenIndex75 := position, tokenIndex + if buffer[position] != rune('0') { + goto l76 + } + position++ + if !_rules[ruleAction10]() { + goto l76 + } + goto l75 + l76: + position, tokenIndex = position75, tokenIndex75 + if buffer[position] != rune('7') { + goto l77 + } + position++ + if !_rules[ruleAction11]() { + goto l77 + } + goto l75 + l77: + position, tokenIndex = position75, tokenIndex75 + { + position78, tokenIndex78 := position, tokenIndex + if buffer[position] != rune('1') { + goto l79 + } + position++ + goto l78 + l79: + position, tokenIndex = position78, tokenIndex78 + if buffer[position] != rune('4') { + goto l80 + } + position++ + goto l78 + l80: + position, tokenIndex = position78, tokenIndex78 + if buffer[position] != rune('5') { + goto l81 + } + position++ + goto l78 + l81: + position, tokenIndex = position78, tokenIndex78 + if buffer[position] != rune('8') { + goto l67 + } + position++ + } + l78: + } + l75: + goto l66 + l67: + position, tokenIndex = position67, tokenIndex67 + } + add(ruletext_attributes, position65) + } + return true + l64: + position, tokenIndex = position64, tokenIndex64 + return false + }, + /* 11 zero <- <'0'*> */ + func() bool { + { + position83 := position + l84: + { + position85, tokenIndex85 := position, tokenIndex + if buffer[position] != rune('0') { + goto l85 + } + position++ + goto l84 + l85: + position, tokenIndex = position85, tokenIndex85 + } + add(rulezero, position83) + } + return true + }, + /* 12 number <- <[0-9]+> */ + func() bool { + position86, tokenIndex86 := position, tokenIndex + { + position87 := position + if c := buffer[position]; c < rune('0') || c > rune('9') { + goto l86 + } + position++ + l88: + { + position89, tokenIndex89 := position, tokenIndex + if c := buffer[position]; c < rune('0') || c > rune('9') { + goto l89 + } + position++ + goto l88 + l89: + position, tokenIndex = position89, tokenIndex89 + } + add(rulenumber, position87) + } + return true + l86: + position, tokenIndex = position86, tokenIndex86 + return false + }, + /* 13 prefix <- <(escape_sequence '[')> */ + func() bool { + position90, tokenIndex90 := position, tokenIndex + { + position91 := position + if !_rules[ruleescape_sequence]() { + goto l90 + } + if buffer[position] != rune('[') { + goto l90 + } + position++ + add(ruleprefix, position91) + } + return true + l90: + position, tokenIndex = position90, tokenIndex90 + return false + }, + /* 14 escape_sequence <- <'\x1b'> */ + func() bool { + position92, tokenIndex92 := position, tokenIndex + { + position93 := position + if buffer[position] != rune('\x1b') { + goto l92 + } + position++ + add(ruleescape_sequence, position93) + } + return true + l92: + position, tokenIndex = position92, tokenIndex92 + return false + }, + /* 15 color_suffix <- <'m'> */ + func() bool { + position94, tokenIndex94 := position, tokenIndex + { + position95 := position + if buffer[position] != rune('m') { + goto l94 + } + position++ + add(rulecolor_suffix, position95) + } + return true + l94: + position, tokenIndex = position94, tokenIndex94 + return false + }, + /* 16 non_color_suffix <- <([A-H] / 'f' / 'S' / 'T' / 'J' / 'K')> */ + func() bool { + position96, tokenIndex96 := position, tokenIndex + { + position97 := position + { + position98, tokenIndex98 := position, tokenIndex + if c := buffer[position]; c < rune('A') || c > rune('H') { + goto l99 + } + position++ + goto l98 + l99: + position, tokenIndex = position98, tokenIndex98 + if buffer[position] != rune('f') { + goto l100 + } + position++ + goto l98 + l100: + position, tokenIndex = position98, tokenIndex98 + if buffer[position] != rune('S') { + goto l101 + } + position++ + goto l98 + l101: + position, tokenIndex = position98, tokenIndex98 + if buffer[position] != rune('T') { + goto l102 + } + position++ + goto l98 + l102: + position, tokenIndex = position98, tokenIndex98 + if buffer[position] != rune('J') { + goto l103 + } + position++ + goto l98 + l103: + position, tokenIndex = position98, tokenIndex98 + if buffer[position] != rune('K') { + goto l96 + } + position++ + } + l98: + add(rulenon_color_suffix, position97) + } + return true + l96: + position, tokenIndex = position96, tokenIndex96 + return false + }, + /* 17 delimiter <- <';'> */ + func() bool { + position104, tokenIndex104 := position, tokenIndex + { + position105 := position + if buffer[position] != rune(';') { + goto l104 + } + position++ + add(ruledelimiter, position105) + } + return true + l104: + position, tokenIndex = position104, tokenIndex104 + return false + }, + /* 19 Action0 <- <{ p.pushResetColor() }> */ + func() bool { + { + add(ruleAction0, position) + } + return true + }, + nil, + /* 21 Action1 <- <{ p.pushText(text) }> */ + func() bool { + { + add(ruleAction1, position) + } + return true + }, + /* 22 Action2 <- <{ p.pushStandardColorWithCategory(text) }> */ + func() bool { + { + add(ruleAction2, position) + } + return true + }, + /* 23 Action3 <- <{ p.pushResetForegroundColor() }> */ + func() bool { + { + add(ruleAction3, position) + } + return true + }, + /* 24 Action4 <- <{ p.pushResetBackgroundColor() }> */ + func() bool { + { + add(ruleAction4, position) + } + return true + }, + /* 25 Action5 <- <{ p.setExtendedColor256(text) }> */ + func() bool { + { + add(ruleAction5, position) + } + return true + }, + /* 26 Action6 <- <{ p.setExtendedColorR(text) }> */ + func() bool { + { + add(ruleAction6, position) + } + return true + }, + /* 27 Action7 <- <{ p.setExtendedColorG(text) }> */ + func() bool { + { + add(ruleAction7, position) + } + return true + }, + /* 28 Action8 <- <{ p.setExtendedColorB(text) }> */ + func() bool { + { + add(ruleAction8, position) + } + return true + }, + /* 29 Action9 <- <{ p.pushExtendedColor(text) }> */ + func() bool { + { + add(ruleAction9, position) + } + return true + }, + /* 30 Action10 <- <{ p.pushResetColor() }> */ + func() bool { + { + add(ruleAction10, position) + } + return true + }, + /* 31 Action11 <- <{ p.pushReverseColor() }> */ + func() bool { + { + add(ruleAction11, position) + } + return true + }, + } + p.rules = _rules + return nil +} diff --git a/modules/codeimage/parser/parser.go b/modules/codeimage/parser/parser.go new file mode 100644 index 000000000..29babeb7e --- /dev/null +++ b/modules/codeimage/parser/parser.go @@ -0,0 +1,74 @@ +package parser + +import ( + "strconv" + + "github.com/jiro4989/textimg/v3/color" + "github.com/jiro4989/textimg/v3/token" +) + +type ParserFunc struct { + // pegが生成するTokensと名前が衝突するので別名にする + Tk token.Tokens +} + +func Parse(s string) (token.Tokens, error) { + p := &Parser{Buffer: s} + if err := p.Init(); err != nil { + return nil, err + } + if err := p.Parse(); err != nil { + return nil, err + } + + p.Execute() + return p.Tk, nil +} + +func (p *ParserFunc) pushResetColor() { + p.Tk = append(p.Tk, token.NewResetColor()) +} + +func (p *ParserFunc) pushResetForegroundColor() { + p.Tk = append(p.Tk, token.NewResetForegroundColor()) +} + +func (p *ParserFunc) pushResetBackgroundColor() { + p.Tk = append(p.Tk, token.NewResetBackgroundColor()) +} + +func (p *ParserFunc) pushReverseColor() { + p.Tk = append(p.Tk, token.NewReverseColor()) +} + +func (p *ParserFunc) pushText(text string) { + p.Tk = append(p.Tk, token.NewText(text)) +} + +func (p *ParserFunc) pushStandardColorWithCategory(text string) { + p.Tk = append(p.Tk, token.NewStandardColorWithCategory(text)) +} + +func (p *ParserFunc) pushExtendedColor(text string) { + p.Tk = append(p.Tk, token.NewExtendedColor(text)) +} + +func (p *ParserFunc) setExtendedColor256(text string) { + n, _ := strconv.ParseUint(text, 10, 8) + p.Tk[len(p.Tk)-1].Color = color.Map256[int(n)] +} + +func (p *ParserFunc) setExtendedColorR(text string) { + n, _ := strconv.ParseUint(text, 10, 8) + p.Tk[len(p.Tk)-1].Color.R = uint8(n) +} + +func (p *ParserFunc) setExtendedColorG(text string) { + n, _ := strconv.ParseUint(text, 10, 8) + p.Tk[len(p.Tk)-1].Color.G = uint8(n) +} + +func (p *ParserFunc) setExtendedColorB(text string) { + n, _ := strconv.ParseUint(text, 10, 8) + p.Tk[len(p.Tk)-1].Color.B = uint8(n) +} diff --git a/modules/highlight/highlight.go b/modules/highlight/highlight.go index a5c38940a..4dc62d8b9 100644 --- a/modules/highlight/highlight.go +++ b/modules/highlight/highlight.go @@ -20,6 +20,7 @@ import ( "code.gitea.io/gitea/modules/util" "github.com/alecthomas/chroma/v2" + "github.com/alecthomas/chroma/v2/formatters" "github.com/alecthomas/chroma/v2/formatters/html" "github.com/alecthomas/chroma/v2/lexers" "github.com/alecthomas/chroma/v2/styles" @@ -195,6 +196,65 @@ func File(fileName, language string, code []byte) ([]string, string, error) { return lines, lexerName, nil } +func AnsiFile(fileName, language string, code []byte) ([]string, string, error) { + NewContext() + + if len(code) > sizeLimit { + code = code[:sizeLimit] + } + + var lexer chroma.Lexer + + // provided language overrides everything + if language != "" { + lexer = lexers.Get(language) + } + + if lexer == nil { + if val, ok := highlightMapping[filepath.Ext(fileName)]; ok { + lexer = lexers.Get(val) + } + } + + if lexer == nil { + guessLanguage := analyze.GetCodeLanguage(fileName, code) + + lexer = lexers.Get(guessLanguage) + if lexer == nil { + lexer = lexers.Match(fileName) + if lexer == nil { + lexer = lexers.Fallback + } + } + } + + lexerName := formatLexerName(lexer.Config().Name) + + iterator, err := lexer.Tokenise(nil, string(code)) + if err != nil { + return nil, "", fmt.Errorf("can't tokenize code: %w", err) + } + + tokensLines := chroma.SplitTokensIntoLines(iterator.Tokens()) + ansiBuf := &bytes.Buffer{} + + formatter := formatters.TTY16m + + lines := make([]string, 0, len(tokensLines)) + for _, tokens := range tokensLines { + iterator = chroma.Literator(tokens...) + err = formatter.Format(ansiBuf, styles.CatppuccinMocha, iterator) + if err != nil { + return nil, "", fmt.Errorf("can't format code: %w", err) + } + + lines = append(lines, strings.Replace(ansiBuf.String(), "\033[3m", "", -1)) + ansiBuf.Reset() + } + + return lines, lexerName, nil +} + // PlainText returns non-highlighted HTML for code func PlainText(code []byte) []string { r := bufio.NewReader(bytes.NewReader(code)) diff --git a/routers/web/repo/view.go b/routers/web/repo/view.go index 2cc862ca5..8a2f84bcb 100644 --- a/routers/web/repo/view.go +++ b/routers/web/repo/view.go @@ -26,6 +26,8 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/charset" + "code.gitea.io/gitea/modules/codeimage/image" + "code.gitea.io/gitea/modules/codeimage/parser" "code.gitea.io/gitea/modules/container" "code.gitea.io/gitea/modules/context" "code.gitea.io/gitea/modules/git" @@ -509,6 +511,12 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st statuses[i], fileContent[i] = charset.EscapeControlHTML(line, ctx.Locale) status = status.Or(statuses[i]) } + + ogImg := ctx.Repo.Repository.HTMLURL() + "/src/og/" + ctx.Repo.BranchName + if len(ctx.Repo.TreePath) > 0 { + ogImg += "/" + util.PathEscapeSegments(ctx.Repo.TreePath) + } + ctx.Data["OgImage"] = ogImg ctx.Data["EscapeStatus"] = status ctx.Data["FileContent"] = fileContent ctx.Data["LineEscapeStatus"] = statuses @@ -699,6 +707,55 @@ func checkCitationFile(ctx *context.Context, entry *git.TreeEntry) { } } +func OgImage(ctx *context.Context) { + entry, err := ctx.Repo.Commit.GetTreeEntryByPath(ctx.Repo.TreePath) + if err != nil { + log.Error("%v", err) + return + } + blob := entry.Blob() + buf, dataRc, _, err := getFileReader(ctx.Repo.Repository.ID, blob) + if err != nil { + ctx.ServerError("getFileReader", err) + return + } + defer dataRc.Close() + + language := "" + indexFilename, worktree, deleteTemporaryFile, err := ctx.Repo.GitRepo.ReadTreeToTemporaryIndex(ctx.Repo.CommitID) + if err == nil { + defer deleteTemporaryFile() + filename2attribute2info, err := ctx.Repo.GitRepo.CheckAttribute(git.CheckAttributeOpts{ + CachedOnly: true, + Attributes: []string{"linguist-language", "gitlab-language"}, + Filenames: []string{ctx.Repo.TreePath}, + IndexFile: indexFilename, + WorkTree: worktree, + }) + if err != nil { + log.Error("Unable to load attributes for %-v:%s. Error: %v", ctx.Repo.Repository, ctx.Repo.TreePath, err) + } + + language = filename2attribute2info[ctx.Repo.TreePath]["linguist-language"] + if language == "" || language == "unspecified" { + language = filename2attribute2info[ctx.Repo.TreePath]["gitlab-language"] + } + if language == "unspecified" { + language = "" + } + } + + ansiContent, _, _ := highlight.AnsiFile(blob.Name(), language, buf) + log.Info("%s", ansiContent[0]) + tks, _ := parser.Parse(strings.Join(ansiContent, "")) + img, err := image.Draw(tks) + if err != nil { + log.Error("Drawing image failed: %v", err) + } + + ctx.Write(img) +} + // Home render repository home page func Home(ctx *context.Context) { if setting.EnableFeed { @@ -965,6 +1022,11 @@ func renderCode(ctx *context.Context) { ctx.HTML(http.StatusOK, tplRepoHome) } +func HighlightCodeToImage() error { + + return nil +} + // RenderUserCards render a page show users according the input template func RenderUserCards(ctx *context.Context, total int, getter func(opts db.ListOptions) ([]*user_model.User, error), tpl base.TplName) { page := ctx.FormInt("page") diff --git a/routers/web/web.go b/routers/web/web.go index 6aee3dbed..ce1314735 100644 --- a/routers/web/web.go +++ b/routers/web/web.go @@ -1470,6 +1470,7 @@ func RegisterRoutes(m *web.Route) { m.Get("/branch/*", context.RepoRefByType(context.RepoRefBranch), repo.Home) m.Get("/tag/*", context.RepoRefByType(context.RepoRefTag), repo.Home) m.Get("/commit/*", context.RepoRefByType(context.RepoRefCommit), repo.Home) + m.Get("/og/*", context.RepoRefByType(context.RepoRefBranch), repo.OgImage) // "/*" route is deprecated, and kept for backward compatibility m.Get("/*", context.RepoRefByType(context.RepoRefLegacy), repo.Home) }, repo.SetEditorconfigIfExists) diff --git a/templates/base/head.tmpl b/templates/base/head.tmpl index 709be20cb..d4880a99c 100644 --- a/templates/base/head.tmpl +++ b/templates/base/head.tmpl @@ -37,6 +37,19 @@ {{if .Owner.Description}} {{end}} +{{else if .PageIsViewCode }} + + + + + + + + + + + + {{else if .Repository}} {{if .Issue}}