chore: implement part of html renderer

This commit is contained in:
Steven 2023-12-13 23:50:05 +08:00
parent 453707d18c
commit 43ef9eaced
35 changed files with 449 additions and 91 deletions

View File

@ -1,7 +1,9 @@
package ast package ast
import "fmt"
type BaseBlock struct { type BaseBlock struct {
Node BaseNode
} }
type LineBreak struct { type LineBreak struct {
@ -14,6 +16,10 @@ func (*LineBreak) Type() NodeType {
return NodeTypeLineBreak return NodeTypeLineBreak
} }
func (n *LineBreak) String() string {
return n.Type().String()
}
type Paragraph struct { type Paragraph struct {
BaseBlock BaseBlock
@ -26,6 +32,14 @@ func (*Paragraph) Type() NodeType {
return NodeTypeParagraph return NodeTypeParagraph
} }
func (n *Paragraph) String() string {
str := n.Type().String()
for _, child := range n.Children {
str += " " + child.String()
}
return str
}
type CodeBlock struct { type CodeBlock struct {
BaseBlock BaseBlock
@ -39,6 +53,10 @@ func (*CodeBlock) Type() NodeType {
return NodeTypeCodeBlock return NodeTypeCodeBlock
} }
func (n *CodeBlock) String() string {
return n.Type().String() + " " + n.Language + " " + n.Content
}
type Heading struct { type Heading struct {
BaseBlock BaseBlock
@ -52,6 +70,14 @@ func (*Heading) Type() NodeType {
return NodeTypeHeading return NodeTypeHeading
} }
func (n *Heading) String() string {
str := n.Type().String() + " " + fmt.Sprintf("%d", n.Level)
for _, child := range n.Children {
str += " " + child.String()
}
return str
}
type HorizontalRule struct { type HorizontalRule struct {
BaseBlock BaseBlock
@ -65,6 +91,10 @@ func (*HorizontalRule) Type() NodeType {
return NodeTypeHorizontalRule return NodeTypeHorizontalRule
} }
func (n *HorizontalRule) String() string {
return n.Type().String()
}
type Blockquote struct { type Blockquote struct {
BaseBlock BaseBlock
@ -76,3 +106,11 @@ var NodeTypeBlockquote = NewNodeType("Blockquote")
func (*Blockquote) Type() NodeType { func (*Blockquote) Type() NodeType {
return NodeTypeBlockquote return NodeTypeBlockquote
} }
func (n *Blockquote) String() string {
str := n.Type().String()
for _, child := range n.Children {
str += " " + child.String()
}
return str
}

View File

@ -1,7 +1,7 @@
package ast package ast
type BaseInline struct { type BaseInline struct {
Node BaseNode
} }
type Text struct { type Text struct {
@ -16,10 +16,14 @@ func (*Text) Type() NodeType {
return NodeTypeText return NodeTypeText
} }
func (n *Text) String() string {
return n.Type().String() + " " + n.Content
}
type Bold struct { type Bold struct {
BaseInline BaseInline
// Symbol is "*" or "_" // Symbol is "*" or "_".
Symbol string Symbol string
Content string Content string
} }
@ -30,10 +34,14 @@ func (*Bold) Type() NodeType {
return NodeTypeBold return NodeTypeBold
} }
func (n *Bold) String() string {
return n.Type().String() + " " + n.Symbol + " " + n.Content
}
type Italic struct { type Italic struct {
BaseInline BaseInline
// Symbol is "*" or "_" // Symbol is "*" or "_".
Symbol string Symbol string
Content string Content string
} }
@ -44,10 +52,14 @@ func (*Italic) Type() NodeType {
return NodeTypeItalic return NodeTypeItalic
} }
func (n *Italic) String() string {
return n.Type().String() + " " + n.Symbol + " " + n.Content
}
type BoldItalic struct { type BoldItalic struct {
BaseInline BaseInline
// Symbol is "*" or "_" // Symbol is "*" or "_".
Symbol string Symbol string
Content string Content string
} }
@ -58,6 +70,10 @@ func (*BoldItalic) Type() NodeType {
return NodeTypeBoldItalic return NodeTypeBoldItalic
} }
func (n *BoldItalic) String() string {
return n.Type().String() + " " + n.Symbol + " " + n.Content
}
type Code struct { type Code struct {
BaseInline BaseInline
@ -70,6 +86,10 @@ func (*Code) Type() NodeType {
return NodeTypeCode return NodeTypeCode
} }
func (n *Code) String() string {
return n.Type().String() + " " + n.Content
}
type Image struct { type Image struct {
BaseInline BaseInline
@ -83,6 +103,10 @@ func (*Image) Type() NodeType {
return NodeTypeImage return NodeTypeImage
} }
func (n *Image) String() string {
return n.Type().String() + " " + n.AltText + " " + n.URL
}
type Link struct { type Link struct {
BaseInline BaseInline
@ -96,6 +120,10 @@ func (*Link) Type() NodeType {
return NodeTypeLink return NodeTypeLink
} }
func (n *Link) String() string {
return n.Type().String() + " " + n.Text + " " + n.URL
}
type Tag struct { type Tag struct {
BaseInline BaseInline
@ -108,6 +136,10 @@ func (*Tag) Type() NodeType {
return NodeTypeTag return NodeTypeTag
} }
func (n *Tag) String() string {
return n.Type().String() + " " + n.Content
}
type Strikethrough struct { type Strikethrough struct {
BaseInline BaseInline
@ -119,3 +151,7 @@ var NodeTypeStrikethrough = NewNodeType("Strikethrough")
func (*Strikethrough) Type() NodeType { func (*Strikethrough) Type() NodeType {
return NodeTypeStrikethrough return NodeTypeStrikethrough
} }
func (n *Strikethrough) String() string {
return n.Type().String() + " " + n.Content
}

View File

@ -1,7 +1,36 @@
package ast package ast
type Node interface { type Node interface {
// Type returns a node type.
Type() NodeType Type() NodeType
// String returns a string representation of this node.
// This method is used for debugging.
String() string
// GetParent returns a parent node of this node.
GetParent() Node
// GetPrevSibling returns a previous sibling node of this node.
GetPrevSibling() Node
// GetNextSibling returns a next sibling node of this node.
GetNextSibling() Node
// GetChildren returns children nodes of this node.
GetChildren() []Node
// SetParent sets a parent node to this node.
SetParent(Node)
// SetPrevSibling sets a previous sibling node to this node.
SetPrevSibling(Node)
// SetNextSibling sets a next sibling node to this node.
SetNextSibling(Node)
// SetChildren sets children nodes to this node.
SetChildren([]Node)
} }
type NodeType int type NodeType int
@ -18,3 +47,45 @@ func NewNodeType(name string) NodeType {
nodeTypeIndex++ nodeTypeIndex++
return nodeTypeIndex return nodeTypeIndex
} }
type BaseNode struct {
parent Node
prevSibling Node
nextSibling Node
children []Node
}
func (n *BaseNode) GetParent() Node {
return n.parent
}
func (n *BaseNode) GetPrevSibling() Node {
return n.prevSibling
}
func (n *BaseNode) GetNextSibling() Node {
return n.nextSibling
}
func (n *BaseNode) GetChildren() []Node {
return n.children
}
func (n *BaseNode) SetParent(node Node) {
n.parent = node
}
func (n *BaseNode) SetPrevSibling(node Node) {
n.prevSibling = node
}
func (n *BaseNode) SetNextSibling(node Node) {
n.nextSibling = node
}
func (n *BaseNode) SetChildren(nodes []Node) {
n.children = nodes
}

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -21,10 +23,10 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens := []*tokenizer.Token{} contentTokens := []*tokenizer.Token{}
for _, token := range tokens[2:] { for _, token := range tokens[2:] {
contentTokens = append(contentTokens, token)
if token.Type == tokenizer.Newline { if token.Type == tokenizer.Newline {
break break
} }
contentTokens = append(contentTokens, token)
} }
if len(contentTokens) == 0 { if len(contentTokens) == 0 {
return 0, false return 0, false
@ -33,15 +35,18 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) {
return len(contentTokens) + 2, true return len(contentTokens) + 2, true
} }
func (p *BlockquoteParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *BlockquoteParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
contentTokens := tokens[2:size] contentTokens := tokens[2:size]
children := ParseInline(contentTokens) blockquote := &ast.Blockquote{}
return &ast.Blockquote{ children, err := ParseInline(blockquote, contentTokens)
Children: children, if err != nil {
return nil, err
} }
blockquote.Children = children
return blockquote, nil
} }

View File

@ -31,6 +31,7 @@ func TestBlockquoteParser(t *testing.T) {
&ast.Text{ &ast.Text{
Content: "Hello", Content: "Hello",
}, },
&ast.LineBreak{},
}, },
}, },
}, },
@ -42,6 +43,7 @@ func TestBlockquoteParser(t *testing.T) {
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.blockquote, NewBlockquoteParser().Parse(tokens)) node, _ := NewBlockquoteParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.blockquote}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -43,10 +45,10 @@ func (*BoldParser) Match(tokens []*tokenizer.Token) (int, bool) {
return cursor + 2, true return cursor + 2, true
} }
func (p *BoldParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *BoldParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
prefixTokenType := tokens[0].Type prefixTokenType := tokens[0].Type
@ -54,5 +56,5 @@ func (p *BoldParser) Parse(tokens []*tokenizer.Token) ast.Node {
return &ast.Bold{ return &ast.Bold{
Symbol: prefixTokenType, Symbol: prefixTokenType,
Content: tokenizer.Stringify(contentTokens), Content: tokenizer.Stringify(contentTokens),
} }, nil
} }

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -43,10 +45,10 @@ func (*BoldItalicParser) Match(tokens []*tokenizer.Token) (int, bool) {
return cursor + 3, true return cursor + 3, true
} }
func (p *BoldItalicParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *BoldItalicParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
prefixTokenType := tokens[0].Type prefixTokenType := tokens[0].Type
@ -54,5 +56,5 @@ func (p *BoldItalicParser) Parse(tokens []*tokenizer.Token) ast.Node {
return &ast.BoldItalic{ return &ast.BoldItalic{
Symbol: prefixTokenType, Symbol: prefixTokenType,
Content: tokenizer.Stringify(contentTokens), Content: tokenizer.Stringify(contentTokens),
} }, nil
} }

View File

@ -44,6 +44,7 @@ func TestBoldItalicParser(t *testing.T) {
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.boldItalic, NewBoldItalicParser().Parse(tokens)) node, _ := NewBoldItalicParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.boldItalic}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -44,6 +44,7 @@ func TestBoldParser(t *testing.T) {
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.bold, NewBoldParser().Parse(tokens)) node, _ := NewBoldParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.bold}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -36,14 +38,14 @@ func (*CodeParser) Match(tokens []*tokenizer.Token) (int, bool) {
return len(contentTokens) + 2, true return len(contentTokens) + 2, true
} }
func (p *CodeParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *CodeParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
contentTokens := tokens[1 : size-1] contentTokens := tokens[1 : size-1]
return &ast.Code{ return &ast.Code{
Content: tokenizer.Stringify(contentTokens), Content: tokenizer.Stringify(contentTokens),
} }, nil
} }

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -51,10 +53,10 @@ func (*CodeBlockParser) Match(tokens []*tokenizer.Token) (int, bool) {
return cursor, true return cursor, true
} }
func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
languageToken := tokens[3] languageToken := tokens[3]
@ -73,5 +75,5 @@ func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) ast.Node {
if languageToken != nil { if languageToken != nil {
codeBlock.Language = languageToken.String() codeBlock.Language = languageToken.String()
} }
return codeBlock return codeBlock, nil
} }

View File

@ -58,6 +58,7 @@ func TestCodeBlockParser(t *testing.T) {
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.codeBlock, NewCodeBlockParser().Parse(tokens)) node, _ := NewCodeBlockParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.codeBlock}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -32,6 +32,7 @@ func TestCodeParser(t *testing.T) {
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.code, NewCodeParser().Parse(tokens)) node, _ := NewCodeParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.code}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -47,10 +49,10 @@ func (*HeadingParser) Match(tokens []*tokenizer.Token) (int, bool) {
return cursor, true return cursor, true
} }
func (p *HeadingParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *HeadingParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
level := 0 level := 0
@ -61,10 +63,15 @@ func (p *HeadingParser) Parse(tokens []*tokenizer.Token) ast.Node {
break break
} }
} }
contentTokens := tokens[level+1 : size] contentTokens := tokens[level+1 : size]
children := ParseInline(contentTokens) heading := &ast.Heading{
return &ast.Heading{
Level: level, Level: level,
Children: children,
} }
children, err := ParseInline(heading, contentTokens)
if err != nil {
return nil, err
}
heading.Children = children
return heading, nil
} }

View File

@ -75,6 +75,7 @@ Hello World`,
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.heading, NewHeadingParser().Parse(tokens)) node, _ := NewHeadingParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.heading}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -27,13 +29,13 @@ func (*HorizontalRuleParser) Match(tokens []*tokenizer.Token) (int, bool) {
return 3, true return 3, true
} }
func (p *HorizontalRuleParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *HorizontalRuleParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
return &ast.HorizontalRule{ return &ast.HorizontalRule{
Symbol: tokens[0].Type, Symbol: tokens[0].Type,
} }, nil
} }

View File

@ -44,6 +44,7 @@ func TestHorizontalRuleParser(t *testing.T) {
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.horizontalRule, NewHorizontalRuleParser().Parse(tokens)) node, _ := NewHorizontalRuleParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.horizontalRule}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -52,10 +54,10 @@ func (*ImageParser) Match(tokens []*tokenizer.Token) (int, bool) {
return cursor + len(contentTokens) + 1, true return cursor + len(contentTokens) + 1, true
} }
func (p *ImageParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *ImageParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
altTextTokens := []*tokenizer.Token{} altTextTokens := []*tokenizer.Token{}
@ -69,5 +71,5 @@ func (p *ImageParser) Parse(tokens []*tokenizer.Token) ast.Node {
return &ast.Image{ return &ast.Image{
AltText: tokenizer.Stringify(altTextTokens), AltText: tokenizer.Stringify(altTextTokens),
URL: tokenizer.Stringify(contentTokens), URL: tokenizer.Stringify(contentTokens),
} }, nil
} }

View File

@ -39,6 +39,7 @@ func TestImageParser(t *testing.T) {
} }
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.image, NewImageParser().Parse(tokens)) node, _ := NewImageParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.image}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -42,10 +44,10 @@ func (*ItalicParser) Match(tokens []*tokenizer.Token) (int, bool) {
return len(contentTokens) + 2, true return len(contentTokens) + 2, true
} }
func (p *ItalicParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *ItalicParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
prefixTokenType := tokens[0].Type prefixTokenType := tokens[0].Type
@ -53,5 +55,5 @@ func (p *ItalicParser) Parse(tokens []*tokenizer.Token) ast.Node {
return &ast.Italic{ return &ast.Italic{
Symbol: prefixTokenType, Symbol: prefixTokenType,
Content: tokenizer.Stringify(contentTokens), Content: tokenizer.Stringify(contentTokens),
} }, nil
} }

View File

@ -43,6 +43,7 @@ func TestItalicParser(t *testing.T) {
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.italic, NewItalicParser().Parse(tokens)) node, _ := NewItalicParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.italic}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -21,11 +23,11 @@ func (*LineBreakParser) Match(tokens []*tokenizer.Token) (int, bool) {
return 1, true return 1, true
} }
func (p *LineBreakParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *LineBreakParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
return &ast.LineBreak{} return &ast.LineBreak{}, nil
} }

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -51,10 +53,10 @@ func (*LinkParser) Match(tokens []*tokenizer.Token) (int, bool) {
return 4 + len(urlTokens) + len(textTokens), true return 4 + len(urlTokens) + len(textTokens), true
} }
func (p *LinkParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *LinkParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
textTokens := []*tokenizer.Token{} textTokens := []*tokenizer.Token{}
@ -68,5 +70,5 @@ func (p *LinkParser) Parse(tokens []*tokenizer.Token) ast.Node {
return &ast.Link{ return &ast.Link{
Text: tokenizer.Stringify(textTokens), Text: tokenizer.Stringify(textTokens),
URL: tokenizer.Stringify(urlTokens), URL: tokenizer.Stringify(urlTokens),
} }, nil
} }

View File

@ -39,6 +39,7 @@ func TestLinkParser(t *testing.T) {
} }
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.link, NewLinkParser().Parse(tokens)) node, _ := NewLinkParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.link}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -29,15 +31,18 @@ func (*ParagraphParser) Match(tokens []*tokenizer.Token) (int, bool) {
return len(contentTokens), true return len(contentTokens), true
} }
func (p *ParagraphParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *ParagraphParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
contentTokens := tokens[:size] contentTokens := tokens[:size]
children := ParseInline(contentTokens) paragraph := &ast.Paragraph{}
return &ast.Paragraph{ children, err := ParseInline(paragraph, contentTokens)
Children: children, if err != nil {
return nil, err
} }
paragraph.Children = children
return paragraph, nil
} }

View File

@ -32,6 +32,7 @@ func TestParagraphParser(t *testing.T) {
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.paragraph, NewParagraphParser().Parse(tokens)) node, _ := NewParagraphParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.paragraph}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -14,7 +14,7 @@ type Context struct {
type BaseParser interface { type BaseParser interface {
Match(tokens []*tokenizer.Token) (int, bool) Match(tokens []*tokenizer.Token) (int, bool)
Parse(tokens []*tokenizer.Token) ast.Node Parse(tokens []*tokenizer.Token) (ast.Node, error)
} }
type InlineParser interface { type InlineParser interface {
@ -36,16 +36,23 @@ var defaultBlockParsers = []BlockParser{
func Parse(tokens []*tokenizer.Token) ([]ast.Node, error) { func Parse(tokens []*tokenizer.Token) ([]ast.Node, error) {
nodes := []ast.Node{} nodes := []ast.Node{}
var prevNode ast.Node
for len(tokens) > 0 { for len(tokens) > 0 {
for _, blockParser := range defaultBlockParsers { for _, blockParser := range defaultBlockParsers {
cursor, matched := blockParser.Match(tokens) size, matched := blockParser.Match(tokens)
if matched { if matched {
node := blockParser.Parse(tokens) node, err := blockParser.Parse(tokens)
if node == nil { if err != nil {
return nil, errors.New("parse error") return nil, errors.New("parse error")
} }
tokens = tokens[size:]
if prevNode != nil {
prevNode.SetNextSibling(node)
node.SetPrevSibling(prevNode)
}
prevNode = node
nodes = append(nodes, node) nodes = append(nodes, node)
tokens = tokens[cursor:]
break break
} }
} }
@ -62,27 +69,40 @@ var defaultInlineParsers = []InlineParser{
NewCodeParser(), NewCodeParser(),
NewTagParser(), NewTagParser(),
NewStrikethroughParser(), NewStrikethroughParser(),
NewLineBreakParser(),
NewTextParser(), NewTextParser(),
} }
func ParseInline(tokens []*tokenizer.Token) []ast.Node { func ParseInline(parent ast.Node, tokens []*tokenizer.Token) ([]ast.Node, error) {
nodes := []ast.Node{} nodes := []ast.Node{}
var lastNode ast.Node var prevNode ast.Node
for len(tokens) > 0 { for len(tokens) > 0 {
for _, inlineParser := range defaultInlineParsers { for _, inlineParser := range defaultInlineParsers {
cursor, matched := inlineParser.Match(tokens) size, matched := inlineParser.Match(tokens)
if matched { if matched {
node := inlineParser.Parse(tokens) node, err := inlineParser.Parse(tokens)
if node.Type() == ast.NodeTypeText && lastNode != nil && lastNode.Type() == ast.NodeTypeText { if err != nil {
lastNode.(*ast.Text).Content += node.(*ast.Text).Content return nil, errors.New("parse error")
} else {
nodes = append(nodes, node)
lastNode = node
} }
tokens = tokens[cursor:]
tokens = tokens[size:]
node.SetParent(parent)
if prevNode != nil {
if prevNode.Type() == ast.NodeTypeText && node.Type() == ast.NodeTypeText {
prevNode.(*ast.Text).Content += node.(*ast.Text).Content
break
}
prevNode.SetNextSibling(node)
node.SetPrevSibling(prevNode)
}
nodes = append(nodes, node)
prevNode = node
break break
} }
} }
} }
return nodes parent.SetChildren(nodes)
return nodes, nil
} }

View File

@ -43,7 +43,7 @@ func TestParser(t *testing.T) {
}, },
}, },
{ {
text: "Hello **world**!", text: "Hello **world**!\nHere is a new line.",
nodes: []ast.Node{ nodes: []ast.Node{
&ast.Paragraph{ &ast.Paragraph{
Children: []ast.Node{ Children: []ast.Node{
@ -59,6 +59,14 @@ func TestParser(t *testing.T) {
}, },
}, },
}, },
&ast.LineBreak{},
&ast.Paragraph{
Children: []ast.Node{
&ast.Text{
Content: "Here is a new line.",
},
},
},
}, },
}, },
{ {
@ -89,8 +97,17 @@ func TestParser(t *testing.T) {
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
nodes, err := Parse(tokens) nodes, _ := Parse(tokens)
require.NoError(t, err) require.Equal(t, StringifyNodes(test.nodes), StringifyNodes(nodes))
require.Equal(t, test.nodes, nodes)
} }
} }
func StringifyNodes(nodes []ast.Node) string {
var result string
for _, node := range nodes {
if node != nil {
result += node.String()
}
}
return result
}

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -36,14 +38,14 @@ func (*StrikethroughParser) Match(tokens []*tokenizer.Token) (int, bool) {
return cursor + 2, true return cursor + 2, true
} }
func (p *StrikethroughParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *StrikethroughParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
contentTokens := tokens[2 : size-2] contentTokens := tokens[2 : size-2]
return &ast.Strikethrough{ return &ast.Strikethrough{
Content: tokenizer.Stringify(contentTokens), Content: tokenizer.Stringify(contentTokens),
} }, nil
} }

View File

@ -40,6 +40,7 @@ func TestStrikethroughParser(t *testing.T) {
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.strikethrough, NewStrikethroughParser().Parse(tokens)) node, _ := NewStrikethroughParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.strikethrough}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -1,6 +1,8 @@
package parser package parser
import ( import (
"errors"
"github.com/usememos/memos/plugin/gomark/ast" "github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer" "github.com/usememos/memos/plugin/gomark/parser/tokenizer"
) )
@ -32,14 +34,14 @@ func (*TagParser) Match(tokens []*tokenizer.Token) (int, bool) {
return len(contentTokens) + 1, true return len(contentTokens) + 1, true
} }
func (p *TagParser) Parse(tokens []*tokenizer.Token) ast.Node { func (p *TagParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
size, ok := p.Match(tokens) size, ok := p.Match(tokens)
if size == 0 || !ok { if size == 0 || !ok {
return nil return nil, errors.New("not matched")
} }
contentTokens := tokens[1:size] contentTokens := tokens[1:size]
return &ast.Tag{ return &ast.Tag{
Content: tokenizer.Stringify(contentTokens), Content: tokenizer.Stringify(contentTokens),
} }, nil
} }

View File

@ -38,6 +38,7 @@ func TestTagParser(t *testing.T) {
for _, test := range tests { for _, test := range tests {
tokens := tokenizer.Tokenize(test.text) tokens := tokenizer.Tokenize(test.text)
require.Equal(t, test.tag, NewTagParser().Parse(tokens)) node, _ := NewTagParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.tag}), StringifyNodes([]ast.Node{node}))
} }
} }

View File

@ -20,11 +20,11 @@ func (*TextParser) Match(tokens []*tokenizer.Token) (int, bool) {
return 1, true return 1, true
} }
func (*TextParser) Parse(tokens []*tokenizer.Token) ast.Node { func (*TextParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
if len(tokens) == 0 { if len(tokens) == 0 {
return &ast.Text{} return &ast.Text{}, nil
} }
return &ast.Text{ return &ast.Text{
Content: tokens[0].String(), Content: tokens[0].String(),
} }, nil
} }

View File

@ -0,0 +1,87 @@
package html
import (
"bytes"
"fmt"
"github.com/usememos/memos/plugin/gomark/ast"
)
// HTMLRenderer is a simple renderer that converts AST to HTML.
// nolint
type HTMLRenderer struct {
output *bytes.Buffer
context *renderContext
}
type renderContext struct {
}
// NewHTMLRenderer creates a new HTMLRenderer.
func NewHTMLRenderer() *HTMLRenderer {
return &HTMLRenderer{
output: new(bytes.Buffer),
context: &renderContext{},
}
}
// RenderNode renders a single AST node to HTML.
func (r *HTMLRenderer) RenderNode(node ast.Node) {
prevSibling, nextSibling := node.GetPrevSibling(), node.GetNextSibling()
switch n := node.(type) {
case *ast.LineBreak:
r.output.WriteString("<br>")
case *ast.Paragraph:
r.output.WriteString("<p>")
r.RenderNodes(n.Children)
r.output.WriteString("</p>")
case *ast.CodeBlock:
r.output.WriteString("<pre><code>")
r.output.WriteString(n.Content)
r.output.WriteString("</code></pre>")
case *ast.Heading:
r.output.WriteString(fmt.Sprintf("<h%d>", n.Level))
r.RenderNodes(n.Children)
r.output.WriteString(fmt.Sprintf("</h%d>", n.Level))
case *ast.HorizontalRule:
r.output.WriteString("<hr>")
case *ast.Blockquote:
if prevSibling == nil || prevSibling.Type() != ast.NodeTypeBlockquote {
r.output.WriteString("<blockquote>")
}
r.RenderNodes(n.Children)
if nextSibling != nil && nextSibling.Type() == ast.NodeTypeBlockquote {
r.RenderNode(nextSibling)
}
if prevSibling == nil || prevSibling.Type() != ast.NodeTypeBlockquote {
r.output.WriteString("</blockquote>")
}
case *ast.Text:
r.output.WriteString(n.Content)
default:
// Handle other block types if needed.
}
}
// RenderNodes renders a slice of AST nodes to HTML.
func (r *HTMLRenderer) RenderNodes(nodes []ast.Node) {
for _, node := range nodes {
prevSibling := node.GetPrevSibling()
if prevSibling != nil {
if prevSibling.Type() == node.Type() {
if node.Type() == ast.NodeTypeBlockquote {
continue
}
}
}
r.RenderNode(node)
}
}
// Render renders the AST to HTML.
func (r *HTMLRenderer) Render(astRoot []ast.Node) string {
r.RenderNodes(astRoot)
return r.output.String()
}

View File

@ -0,0 +1,36 @@
package html
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/gomark/parser"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
func TestHTMLRenderer(t *testing.T) {
tests := []struct {
text string
expected string
}{
{
text: "Hello world!",
expected: `<p>Hello world!</p>`,
},
{
text: "> Hello\n> world!",
expected: `<blockquote>Hello<br>world!</blockquote>`,
},
}
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
nodes, err := parser.Parse(tokens)
require.NoError(t, err)
actual := NewHTMLRenderer().Render(nodes)
if actual != test.expected {
t.Errorf("expected: %s, actual: %s", test.expected, actual)
}
}
}