mirror of
				https://github.com/usememos/memos.git
				synced 2025-06-05 22:09:59 +02:00 
			
		
		
		
	chore: fix linter
This commit is contained in:
		@@ -11,7 +11,7 @@ func NewBlockquoteParser() *BlockquoteParser {
 | 
			
		||||
	return &BlockquoteParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *BlockquoteParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 3 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewBoldParser() InlineParser {
 | 
			
		||||
	return &BoldParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *BoldParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*BoldParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 5 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewCodeParser() *CodeParser {
 | 
			
		||||
	return &CodeParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *CodeParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*CodeParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 3 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewEmbeddedContentParser() *EmbeddedContentParser {
 | 
			
		||||
	return &EmbeddedContentParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *EmbeddedContentParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*EmbeddedContentParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 5 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewEscapingCharacterParser() *EscapingCharacterParser {
 | 
			
		||||
	return &EscapingCharacterParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *EscapingCharacterParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*EscapingCharacterParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	if len(tokens) < 2 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewHeadingParser() *HeadingParser {
 | 
			
		||||
	return &HeadingParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *HeadingParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*HeadingParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	spaceIndex := tokenizer.FindUnescaped(matchedTokens, tokenizer.Space)
 | 
			
		||||
	if spaceIndex < 0 {
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewHighlightParser() InlineParser {
 | 
			
		||||
	return &HighlightParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *HighlightParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*HighlightParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedToken := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedToken) < 5 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewHorizontalRuleParser() *HorizontalRuleParser {
 | 
			
		||||
	return &HorizontalRuleParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *HorizontalRuleParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*HorizontalRuleParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 3 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewImageParser() *ImageParser {
 | 
			
		||||
	return &ImageParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *ImageParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*ImageParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 5 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -13,7 +13,7 @@ func NewItalicParser() *ItalicParser {
 | 
			
		||||
	return &ItalicParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *ItalicParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*ItalicParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 3 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewLineBreakParser() *LineBreakParser {
 | 
			
		||||
	return &LineBreakParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *LineBreakParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*LineBreakParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	if len(tokens) == 0 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
	}
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewLinkParser() *LinkParser {
 | 
			
		||||
	return &LinkParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *LinkParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*LinkParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 5 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewMathParser() *MathParser {
 | 
			
		||||
	return &MathParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *MathParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*MathParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 3 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewMathBlockParser() *MathBlockParser {
 | 
			
		||||
	return &MathBlockParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *MathBlockParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*MathBlockParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	rows := tokenizer.Split(tokens, tokenizer.Newline)
 | 
			
		||||
	if len(rows) < 3 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewOrderedListParser() *OrderedListParser {
 | 
			
		||||
	return &OrderedListParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *OrderedListParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*OrderedListParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	indent := 0
 | 
			
		||||
	for _, token := range matchedTokens {
 | 
			
		||||
 
 | 
			
		||||
@@ -13,7 +13,7 @@ func NewParagraphParser() *ParagraphParser {
 | 
			
		||||
	return &ParagraphParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *ParagraphParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*ParagraphParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) == 0 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewStrikethroughParser() *StrikethroughParser {
 | 
			
		||||
	return &StrikethroughParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *StrikethroughParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*StrikethroughParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 5 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewSubscriptParser() *SubscriptParser {
 | 
			
		||||
	return &SubscriptParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *SubscriptParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*SubscriptParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 3 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewSuperscriptParser() *SuperscriptParser {
 | 
			
		||||
	return &SuperscriptParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *SuperscriptParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*SuperscriptParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 3 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewTableParser() *TableParser {
 | 
			
		||||
	return &TableParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *TableParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*TableParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	rawRows := tokenizer.Split(tokens, tokenizer.Newline)
 | 
			
		||||
	if len(rawRows) < 3 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewTagParser() *TagParser {
 | 
			
		||||
	return &TagParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *TagParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*TagParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	if len(matchedTokens) < 2 {
 | 
			
		||||
		return nil, 0
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewTaskListParser() *TaskListParser {
 | 
			
		||||
	return &TaskListParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *TaskListParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*TaskListParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	indent := 0
 | 
			
		||||
	for _, token := range matchedTokens {
 | 
			
		||||
 
 | 
			
		||||
@@ -11,7 +11,7 @@ func NewUnorderedListParser() *UnorderedListParser {
 | 
			
		||||
	return &UnorderedListParser{}
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
func (p *UnorderedListParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
func (*UnorderedListParser) Match(tokens []*tokenizer.Token) (ast.Node, int) {
 | 
			
		||||
	matchedTokens := tokenizer.GetFirstLine(tokens)
 | 
			
		||||
	indent := 0
 | 
			
		||||
	for _, token := range matchedTokens {
 | 
			
		||||
 
 | 
			
		||||
		Reference in New Issue
	
	Block a user