memos/plugin/gomark/parser/tokenizer/tokenizer_test.go
2024-01-20 02:09:33 +08:00

143 lines
1.9 KiB
Go

package tokenizer
import (
"testing"
"github.com/stretchr/testify/require"
)
func TestTokenize(t *testing.T) {
tests := []struct {
text string
tokens []*Token
}{
{
text: "*Hello world!",
tokens: []*Token{
{
Type: Asterisk,
Value: "*",
},
{
Type: Text,
Value: "Hello",
},
{
Type: Space,
Value: " ",
},
{
Type: Text,
Value: "world",
},
{
Type: ExclamationMark,
Value: "!",
},
},
},
{
text: `# hello
world`,
tokens: []*Token{
{
Type: PoundSign,
Value: "#",
},
{
Type: Space,
Value: " ",
},
{
Type: Text,
Value: "hello",
},
{
Type: Space,
Value: " ",
},
{
Type: Newline,
Value: "\n",
},
{
Type: Space,
Value: " ",
},
{
Type: Text,
Value: "world",
},
},
},
}
for _, test := range tests {
result := Tokenize(test.text)
require.Equal(t, test.tokens, result)
}
}
func TestSplit(t *testing.T) {
tests := []struct {
tokens []*Token
sep TokenType
result [][]*Token
}{
{
tokens: []*Token{
{
Type: Asterisk,
Value: "*",
},
{
Type: Text,
Value: "Hello",
},
{
Type: Space,
Value: " ",
},
{
Type: Text,
Value: "world",
},
{
Type: ExclamationMark,
Value: "!",
},
},
sep: Asterisk,
result: [][]*Token{
{},
{
{
Type: Text,
Value: "Hello",
},
{
Type: Space,
Value: " ",
},
{
Type: Text,
Value: "world",
},
{
Type: ExclamationMark,
Value: "!",
},
},
},
},
}
for _, test := range tests {
result := Split(test.tokens, test.sep)
for index, tokens := range result {
require.Equal(t, Stringify(test.result[index]), Stringify(tokens))
}
}
}