feat: implement restore nodes

This commit is contained in:
Steven 2023-12-28 22:35:39 +08:00
parent 2a6f054876
commit 46f7cffc7b
34 changed files with 264 additions and 154 deletions

View File

@ -28,57 +28,13 @@ const (
EscapingCharacterNode
)
func (t NodeType) String() string {
switch t {
case LineBreakNode:
return "LineBreakNode"
case ParagraphNode:
return "ParagraphNode"
case CodeBlockNode:
return "CodeBlockNode"
case HeadingNode:
return "HeadingNode"
case HorizontalRuleNode:
return "HorizontalRuleNode"
case BlockquoteNode:
return "BlockquoteNode"
case OrderedListNode:
return "OrderedListNode"
case UnorderedListNode:
return "UnorderedListNode"
case TaskListNode:
return "TaskListNode"
case TextNode:
return "TextNode"
case BoldNode:
return "BoldNode"
case ItalicNode:
return "ItalicNode"
case BoldItalicNode:
return "BoldItalicNode"
case CodeNode:
return "CodeNode"
case ImageNode:
return "ImageNode"
case LinkNode:
return "LinkNode"
case AutoLinkNode:
return "AutoLinkNode"
case TagNode:
return "TagNode"
case StrikethroughNode:
return "StrikethroughNode"
case EscapingCharacterNode:
return "EscapingCharacterNode"
default:
return "UnknownNode"
}
}
type Node interface {
// Type returns a node type.
Type() NodeType
// Restore returns a string representation of this node.
Restore() string
// PrevSibling returns a previous sibling node of this node.
PrevSibling() Node
@ -113,3 +69,12 @@ func (n *BaseNode) SetPrevSibling(node Node) {
func (n *BaseNode) SetNextSibling(node Node) {
n.nextSibling = node
}
func IsBlockNode(node Node) bool {
switch node.Type() {
case ParagraphNode, CodeBlockNode, HeadingNode, HorizontalRuleNode, BlockquoteNode, OrderedListNode, UnorderedListNode, TaskListNode:
return true
default:
return false
}
}

View File

@ -1,5 +1,7 @@
package ast
import "fmt"
type BaseBlock struct {
BaseNode
}
@ -12,6 +14,10 @@ func (*LineBreak) Type() NodeType {
return LineBreakNode
}
func (*LineBreak) Restore() string {
return "\n"
}
type Paragraph struct {
BaseBlock
@ -22,6 +28,14 @@ func (*Paragraph) Type() NodeType {
return ParagraphNode
}
func (n *Paragraph) Restore() string {
var result string
for _, child := range n.Children {
result += child.Restore()
}
return result
}
type CodeBlock struct {
BaseBlock
@ -33,6 +47,10 @@ func (*CodeBlock) Type() NodeType {
return CodeBlockNode
}
func (n *CodeBlock) Restore() string {
return fmt.Sprintf("```%s\n%s\n```", n.Language, n.Content)
}
type Heading struct {
BaseBlock
@ -44,6 +62,18 @@ func (*Heading) Type() NodeType {
return HeadingNode
}
func (n *Heading) Restore() string {
var result string
for _, child := range n.Children {
result += child.Restore()
}
symbol := ""
for i := 0; i < n.Level; i++ {
symbol += "#"
}
return fmt.Sprintf("%s %s", symbol, result)
}
type HorizontalRule struct {
BaseBlock
@ -55,6 +85,10 @@ func (*HorizontalRule) Type() NodeType {
return HorizontalRuleNode
}
func (n *HorizontalRule) Restore() string {
return n.Symbol + n.Symbol + n.Symbol
}
type Blockquote struct {
BaseBlock
@ -65,6 +99,14 @@ func (*Blockquote) Type() NodeType {
return BlockquoteNode
}
func (n *Blockquote) Restore() string {
var result string
for _, child := range n.Children {
result += child.Restore()
}
return fmt.Sprintf("> %s", result)
}
type OrderedList struct {
BaseBlock
@ -76,6 +118,14 @@ func (*OrderedList) Type() NodeType {
return OrderedListNode
}
func (n *OrderedList) Restore() string {
var result string
for _, child := range n.Children {
result += child.Restore()
}
return fmt.Sprintf("%s. %s", n.Number, result)
}
type UnorderedList struct {
BaseBlock
@ -88,6 +138,14 @@ func (*UnorderedList) Type() NodeType {
return UnorderedListNode
}
func (n *UnorderedList) Restore() string {
var result string
for _, child := range n.Children {
result += child.Restore()
}
return fmt.Sprintf("%s %s", n.Symbol, result)
}
type TaskList struct {
BaseBlock
@ -100,3 +158,15 @@ type TaskList struct {
func (*TaskList) Type() NodeType {
return TaskListNode
}
func (n *TaskList) Restore() string {
var result string
for _, child := range n.Children {
result += child.Restore()
}
complete := " "
if n.Complete {
complete = "x"
}
return fmt.Sprintf("%s [%s] %s", n.Symbol, complete, result)
}

View File

@ -1,5 +1,7 @@
package ast
import "fmt"
type BaseInline struct {
BaseNode
}
@ -14,6 +16,10 @@ func (*Text) Type() NodeType {
return TextNode
}
func (n *Text) Restore() string {
return n.Content
}
type Bold struct {
BaseInline
@ -26,6 +32,15 @@ func (*Bold) Type() NodeType {
return BoldNode
}
func (n *Bold) Restore() string {
symbol := n.Symbol + n.Symbol
children := ""
for _, child := range n.Children {
children += child.Restore()
}
return fmt.Sprintf("%s%s%s", symbol, children, symbol)
}
type Italic struct {
BaseInline
@ -38,6 +53,10 @@ func (*Italic) Type() NodeType {
return ItalicNode
}
func (n *Italic) Restore() string {
return fmt.Sprintf("%s%s%s", n.Symbol, n.Content, n.Symbol)
}
type BoldItalic struct {
BaseInline
@ -50,6 +69,11 @@ func (*BoldItalic) Type() NodeType {
return BoldItalicNode
}
func (n *BoldItalic) Restore() string {
symbol := n.Symbol + n.Symbol + n.Symbol
return fmt.Sprintf("%s%s%s", symbol, n.Content, symbol)
}
type Code struct {
BaseInline
@ -60,6 +84,10 @@ func (*Code) Type() NodeType {
return CodeNode
}
func (n *Code) Restore() string {
return fmt.Sprintf("`%s`", n.Content)
}
type Image struct {
BaseInline
@ -71,6 +99,10 @@ func (*Image) Type() NodeType {
return ImageNode
}
func (n *Image) Restore() string {
return fmt.Sprintf("![%s](%s)", n.AltText, n.URL)
}
type Link struct {
BaseInline
@ -82,16 +114,28 @@ func (*Link) Type() NodeType {
return LinkNode
}
func (n *Link) Restore() string {
return fmt.Sprintf("[%s](%s)", n.Text, n.URL)
}
type AutoLink struct {
BaseInline
URL string
IsRawText bool
}
func (*AutoLink) Type() NodeType {
return AutoLinkNode
}
func (n *AutoLink) Restore() string {
if n.IsRawText {
return n.URL
}
return fmt.Sprintf("<%s>", n.URL)
}
type Tag struct {
BaseInline
@ -102,6 +146,10 @@ func (*Tag) Type() NodeType {
return TagNode
}
func (n *Tag) Restore() string {
return fmt.Sprintf("<%s>", n.Content)
}
type Strikethrough struct {
BaseInline
@ -112,6 +160,10 @@ func (*Strikethrough) Type() NodeType {
return StrikethroughNode
}
func (n *Strikethrough) Restore() string {
return fmt.Sprintf("~~%s~~", n.Content)
}
type EscapingCharacter struct {
BaseInline
@ -121,3 +173,7 @@ type EscapingCharacter struct {
func (*EscapingCharacter) Type() NodeType {
return EscapingCharacterNode
}
func (n *EscapingCharacter) Restore() string {
return fmt.Sprintf("\\%s", n.Symbol)
}

View File

@ -57,10 +57,13 @@ func (p *AutoLinkParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
}
url := tokenizer.Stringify(tokens[:size])
isRawText := true
if tokens[0].Type == tokenizer.LessThan && tokens[len(tokens)-1].Type == tokenizer.GreaterThan {
isRawText = false
url = tokenizer.Stringify(tokens[1 : len(tokens)-1])
}
return &ast.AutoLink{
URL: url,
IsRawText: isRawText,
}, nil
}

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestAutoLinkParser(t *testing.T) {
@ -28,6 +29,7 @@ func TestAutoLinkParser(t *testing.T) {
text: "https://example.com",
link: &ast.AutoLink{
URL: "https://example.com",
IsRawText: true,
},
},
}
@ -35,6 +37,6 @@ func TestAutoLinkParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewAutoLinkParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.link}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.link}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -23,10 +23,10 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens := []*tokenizer.Token{}
for _, token := range tokens[2:] {
contentTokens = append(contentTokens, token)
if token.Type == tokenizer.Newline {
break
}
contentTokens = append(contentTokens, token)
}
if len(contentTokens) == 0 {
return 0, false

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestBlockquoteParser(t *testing.T) {
@ -51,6 +52,6 @@ func TestBlockquoteParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewBlockquoteParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.blockquote}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.blockquote}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestBoldItalicParser(t *testing.T) {
@ -45,6 +46,6 @@ func TestBoldItalicParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewBoldItalicParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.boldItalic}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.boldItalic}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestBoldParser(t *testing.T) {
@ -53,6 +54,6 @@ func TestBoldParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewBoldParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.bold}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.bold}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -40,7 +40,7 @@ func (*CodeBlockParser) Match(tokens []*tokenizer.Token) (int, bool) {
matched = true
break
} else if tokens[cursor+4].Type == tokenizer.Newline {
cursor += 5
cursor += 4
matched = true
break
}
@ -65,9 +65,6 @@ func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
languageToken = nil
contentStart = 4
}
if tokens[size-1].Type == tokenizer.Newline {
contentEnd = size - 5
}
codeBlock := &ast.CodeBlock{
Content: tokenizer.Stringify(tokens[contentStart:contentEnd]),

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestCodeBlockParser(t *testing.T) {
@ -59,6 +60,6 @@ func TestCodeBlockParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewCodeBlockParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.codeBlock}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.codeBlock}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestCodeParser(t *testing.T) {
@ -33,6 +34,6 @@ func TestCodeParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewCodeParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.code}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.code}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestEscapingCharacterParser(t *testing.T) {
@ -25,6 +26,6 @@ func TestEscapingCharacterParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewEscapingCharacterParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.node}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.node}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -34,10 +34,10 @@ func (*HeadingParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens := []*tokenizer.Token{}
for _, token := range tokens[level+1:] {
contentTokens = append(contentTokens, token)
if token.Type == tokenizer.Newline {
break
}
contentTokens = append(contentTokens, token)
}
if len(contentTokens) == 0 {
return 0, false
@ -62,9 +62,6 @@ func (p *HeadingParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
}
contentTokens := tokens[level+1 : size]
if contentTokens[len(contentTokens)-1].Type == tokenizer.Newline {
contentTokens = contentTokens[:len(contentTokens)-1]
}
children, err := ParseInline(contentTokens)
if err != nil {
return nil, err

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestHeadingParser(t *testing.T) {
@ -80,6 +81,6 @@ Hello World`,
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewHeadingParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.heading}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.heading}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestHorizontalRuleParser(t *testing.T) {
@ -51,6 +52,6 @@ func TestHorizontalRuleParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewHorizontalRuleParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.horizontalRule}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.horizontalRule}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestImageParser(t *testing.T) {
@ -40,6 +41,6 @@ func TestImageParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewImageParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.image}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.image}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestItalicParser(t *testing.T) {
@ -44,6 +45,6 @@ func TestItalicParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewItalicParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.italic}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.italic}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestLinkParser(t *testing.T) {
@ -47,6 +48,6 @@ func TestLinkParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewLinkParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.link}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.link}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -23,10 +23,10 @@ func (*OrderedListParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens := []*tokenizer.Token{}
for _, token := range tokens[3:] {
contentTokens = append(contentTokens, token)
if token.Type == tokenizer.Newline {
break
}
contentTokens = append(contentTokens, token)
}
if len(contentTokens) == 0 {
@ -43,9 +43,6 @@ func (p *OrderedListParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
}
contentTokens := tokens[3:size]
if contentTokens[len(contentTokens)-1].Type == tokenizer.Newline {
contentTokens = contentTokens[:len(contentTokens)-1]
}
children, err := ParseInline(contentTokens)
if err != nil {
return nil, err

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestOrderedListParser(t *testing.T) {
@ -53,6 +54,6 @@ func TestOrderedListParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewOrderedListParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.node}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.node}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -18,10 +18,10 @@ func NewParagraphParser() *ParagraphParser {
func (*ParagraphParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens := []*tokenizer.Token{}
for _, token := range tokens {
contentTokens = append(contentTokens, token)
if token.Type == tokenizer.Newline {
break
}
contentTokens = append(contentTokens, token)
}
if len(contentTokens) == 0 {
return 0, false
@ -38,11 +38,7 @@ func (p *ParagraphParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
return nil, errors.New("not matched")
}
contentTokens := tokens[:size]
if contentTokens[len(contentTokens)-1].Type == tokenizer.Newline {
contentTokens = contentTokens[:len(contentTokens)-1]
}
children, err := ParseInline(contentTokens)
children, err := ParseInline(tokens[:size])
if err != nil {
return nil, err
}

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestParagraphParser(t *testing.T) {
@ -57,6 +58,6 @@ func TestParagraphParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewParagraphParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.paragraph}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.paragraph}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -48,6 +48,7 @@ func ParseBlock(tokens []*tokenizer.Token) ([]ast.Node, error) {
func ParseBlockWithParsers(tokens []*tokenizer.Token, blockParsers []BlockParser) ([]ast.Node, error) {
nodes := []ast.Node{}
var prevNode ast.Node
var skipNextLineBreakFlag bool
for len(tokens) > 0 {
for _, blockParser := range blockParsers {
size, matched := blockParser.Match(tokens)
@ -57,12 +58,21 @@ func ParseBlockWithParsers(tokens []*tokenizer.Token, blockParsers []BlockParser
return nil, errors.New("parse error")
}
if node.Type() == ast.LineBreakNode && skipNextLineBreakFlag {
if prevNode != nil && ast.IsBlockNode(prevNode) {
tokens = tokens[size:]
skipNextLineBreakFlag = false
break
}
}
tokens = tokens[size:]
if prevNode != nil {
prevNode.SetNextSibling(node)
node.SetPrevSibling(prevNode)
}
prevNode = node
skipNextLineBreakFlag = true
nodes = append(nodes, node)
break
}

View File

@ -1,13 +1,13 @@
package parser
import (
"strconv"
"testing"
"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestParser(t *testing.T) {
@ -202,62 +202,6 @@ func TestParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
nodes, _ := Parse(tokens)
require.Equal(t, StringifyNodes(test.nodes), StringifyNodes(nodes))
require.Equal(t, restore.Restore(test.nodes), restore.Restore(nodes))
}
}
func StringifyNodes(nodes []ast.Node) string {
var result string
for _, node := range nodes {
if node != nil {
result += StringifyNode(node)
}
}
return result
}
func StringifyNode(node ast.Node) string {
switch n := node.(type) {
case *ast.LineBreak:
return "LineBreak()"
case *ast.CodeBlock:
return "CodeBlock(" + n.Language + ", " + n.Content + ")"
case *ast.Paragraph:
return "Paragraph(" + StringifyNodes(n.Children) + ")"
case *ast.Heading:
return "Heading(" + StringifyNodes(n.Children) + ")"
case *ast.HorizontalRule:
return "HorizontalRule(" + n.Symbol + ")"
case *ast.Blockquote:
return "Blockquote(" + StringifyNodes(n.Children) + ")"
case *ast.OrderedList:
return "OrderedList(" + n.Number + ", " + StringifyNodes(n.Children) + ")"
case *ast.UnorderedList:
return "UnorderedList(" + n.Symbol + ", " + StringifyNodes(n.Children) + ")"
case *ast.TaskList:
return "TaskList(" + n.Symbol + ", " + strconv.FormatBool(n.Complete) + ", " + StringifyNodes(n.Children) + ")"
case *ast.Text:
return "Text(" + n.Content + ")"
case *ast.Bold:
return "Bold(" + n.Symbol + StringifyNodes(n.Children) + n.Symbol + ")"
case *ast.Italic:
return "Italic(" + n.Symbol + n.Content + n.Symbol + ")"
case *ast.BoldItalic:
return "BoldItalic(" + n.Symbol + n.Content + n.Symbol + ")"
case *ast.Code:
return "Code(" + n.Content + ")"
case *ast.Image:
return "Image(" + n.URL + ", " + n.AltText + ")"
case *ast.Link:
return "Link(" + n.Text + ", " + n.URL + ")"
case *ast.AutoLink:
return "AutoLink(" + n.URL + ")"
case *ast.Tag:
return "Tag(" + n.Content + ")"
case *ast.Strikethrough:
return "Strikethrough(" + n.Content + ")"
case *ast.EscapingCharacter:
return "EscapingCharacter(" + n.Symbol + ")"
}
return ""
}

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestStrikethroughParser(t *testing.T) {
@ -41,6 +42,6 @@ func TestStrikethroughParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewStrikethroughParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.strikethrough}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.strikethrough}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestTagParser(t *testing.T) {
@ -39,6 +40,6 @@ func TestTagParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewTagParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.tag}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.tag}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -34,10 +34,10 @@ func (*TaskListParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens := []*tokenizer.Token{}
for _, token := range tokens[6:] {
contentTokens = append(contentTokens, token)
if token.Type == tokenizer.Newline {
break
}
contentTokens = append(contentTokens, token)
}
if len(contentTokens) == 0 {
return 0, false
@ -54,9 +54,6 @@ func (p *TaskListParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
symbolToken := tokens[0]
contentTokens := tokens[6:size]
if contentTokens[len(contentTokens)-1].Type == tokenizer.Newline {
contentTokens = contentTokens[:len(contentTokens)-1]
}
children, err := ParseInline(contentTokens)
if err != nil {
return nil, err

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestTaskListParser(t *testing.T) {
@ -52,6 +53,6 @@ func TestTaskListParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewTaskListParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.node}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.node}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -24,10 +24,10 @@ func (*UnorderedListParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens := []*tokenizer.Token{}
for _, token := range tokens[2:] {
contentTokens = append(contentTokens, token)
if token.Type == tokenizer.Newline {
break
}
contentTokens = append(contentTokens, token)
}
if len(contentTokens) == 0 {
return 0, false
@ -44,9 +44,6 @@ func (p *UnorderedListParser) Parse(tokens []*tokenizer.Token) (ast.Node, error)
symbolToken := tokens[0]
contentTokens := tokens[2:size]
if contentTokens[len(contentTokens)-1].Type == tokenizer.Newline {
contentTokens = contentTokens[:len(contentTokens)-1]
}
children, err := ParseInline(contentTokens)
if err != nil {
return nil, err

View File

@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
func TestUnorderedListParser(t *testing.T) {
@ -50,6 +51,6 @@ func TestUnorderedListParser(t *testing.T) {
for _, test := range tests {
tokens := tokenizer.Tokenize(test.text)
node, _ := NewUnorderedListParser().Parse(tokens)
require.Equal(t, StringifyNodes([]ast.Node{test.node}), StringifyNodes([]ast.Node{node}))
require.Equal(t, restore.Restore([]ast.Node{test.node}), restore.Restore([]ast.Node{node}))
}
}

View File

@ -83,7 +83,7 @@ func (r *HTMLRenderer) Render(astRoot []ast.Node) string {
return r.output.String()
}
func (r *HTMLRenderer) renderLineBreak(_ *ast.LineBreak) {
func (r *HTMLRenderer) renderLineBreak(*ast.LineBreak) {
r.output.WriteString("<br>")
}

View File

@ -0,0 +1,14 @@
package restore
import "github.com/usememos/memos/plugin/gomark/ast"
func Restore(nodes []ast.Node) string {
var result string
for _, node := range nodes {
if node == nil {
continue
}
result += node.Restore()
}
return result
}

View File

@ -0,0 +1,48 @@
package restore
import (
"testing"
"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/gomark/ast"
)
func TestRestore(t *testing.T) {
tests := []struct {
nodes []ast.Node
rawText string
}{
{
nodes: nil,
rawText: "",
},
{
nodes: []ast.Node{
&ast.Text{
Content: "Hello world!",
},
},
rawText: "Hello world!",
},
{
nodes: []ast.Node{
&ast.Paragraph{
Children: []ast.Node{
&ast.Text{
Content: "Here: ",
},
&ast.Code{
Content: "Hello world!",
},
},
},
},
rawText: "Here: `Hello world!`",
},
}
for _, test := range tests {
require.Equal(t, Restore(test.nodes), test.rawText)
}
}