Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
C
canifa_note
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Vũ Hoàng Anh
canifa_note
Commits
46f7cffc
Commit
46f7cffc
authored
Dec 28, 2023
by
Steven
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
feat: implement restore nodes
parent
2a6f0548
Changes
34
Hide whitespace changes
Inline
Side-by-side
Showing
34 changed files
with
264 additions
and
154 deletions
+264
-154
ast.go
plugin/gomark/ast/ast.go
+12
-47
block.go
plugin/gomark/ast/block.go
+70
-0
inline.go
plugin/gomark/ast/inline.go
+57
-1
auto_link.go
plugin/gomark/parser/auto_link.go
+4
-1
auto_link_test.go
plugin/gomark/parser/auto_link_test.go
+4
-2
blockquote.go
plugin/gomark/parser/blockquote.go
+1
-1
blockquote_test.go
plugin/gomark/parser/blockquote_test.go
+2
-1
bold_italic_test.go
plugin/gomark/parser/bold_italic_test.go
+2
-1
bold_test.go
plugin/gomark/parser/bold_test.go
+2
-1
code_block.go
plugin/gomark/parser/code_block.go
+1
-4
code_block_test.go
plugin/gomark/parser/code_block_test.go
+2
-1
code_test.go
plugin/gomark/parser/code_test.go
+2
-1
escaping_character_test.go
plugin/gomark/parser/escaping_character_test.go
+2
-1
heading.go
plugin/gomark/parser/heading.go
+1
-4
heading_test.go
plugin/gomark/parser/heading_test.go
+2
-1
horizontal_rule_test.go
plugin/gomark/parser/horizontal_rule_test.go
+2
-1
image_test.go
plugin/gomark/parser/image_test.go
+2
-1
italic_test.go
plugin/gomark/parser/italic_test.go
+2
-1
link_test.go
plugin/gomark/parser/link_test.go
+2
-1
ordered_list.go
plugin/gomark/parser/ordered_list.go
+1
-4
ordered_list_test.go
plugin/gomark/parser/ordered_list_test.go
+2
-1
paragraph.go
plugin/gomark/parser/paragraph.go
+2
-6
paragraph_test.go
plugin/gomark/parser/paragraph_test.go
+2
-1
parser.go
plugin/gomark/parser/parser.go
+10
-0
parser_test.go
plugin/gomark/parser/parser_test.go
+2
-58
strikethrough_test.go
plugin/gomark/parser/strikethrough_test.go
+2
-1
tag_test.go
plugin/gomark/parser/tag_test.go
+2
-1
task_list.go
plugin/gomark/parser/task_list.go
+1
-4
task_list_test.go
plugin/gomark/parser/task_list_test.go
+2
-1
unordered_list.go
plugin/gomark/parser/unordered_list.go
+1
-4
unordered_list_test.go
plugin/gomark/parser/unordered_list_test.go
+2
-1
html.go
plugin/gomark/renderer/html/html.go
+1
-1
restore.go
plugin/gomark/restore/restore.go
+14
-0
restore_test.go
plugin/gomark/restore/restore_test.go
+48
-0
No files found.
plugin/gomark/ast/ast.go
View file @
46f7cffc
...
@@ -28,57 +28,13 @@ const (
...
@@ -28,57 +28,13 @@ const (
EscapingCharacterNode
EscapingCharacterNode
)
)
func
(
t
NodeType
)
String
()
string
{
switch
t
{
case
LineBreakNode
:
return
"LineBreakNode"
case
ParagraphNode
:
return
"ParagraphNode"
case
CodeBlockNode
:
return
"CodeBlockNode"
case
HeadingNode
:
return
"HeadingNode"
case
HorizontalRuleNode
:
return
"HorizontalRuleNode"
case
BlockquoteNode
:
return
"BlockquoteNode"
case
OrderedListNode
:
return
"OrderedListNode"
case
UnorderedListNode
:
return
"UnorderedListNode"
case
TaskListNode
:
return
"TaskListNode"
case
TextNode
:
return
"TextNode"
case
BoldNode
:
return
"BoldNode"
case
ItalicNode
:
return
"ItalicNode"
case
BoldItalicNode
:
return
"BoldItalicNode"
case
CodeNode
:
return
"CodeNode"
case
ImageNode
:
return
"ImageNode"
case
LinkNode
:
return
"LinkNode"
case
AutoLinkNode
:
return
"AutoLinkNode"
case
TagNode
:
return
"TagNode"
case
StrikethroughNode
:
return
"StrikethroughNode"
case
EscapingCharacterNode
:
return
"EscapingCharacterNode"
default
:
return
"UnknownNode"
}
}
type
Node
interface
{
type
Node
interface
{
// Type returns a node type.
// Type returns a node type.
Type
()
NodeType
Type
()
NodeType
// Restore returns a string representation of this node.
Restore
()
string
// PrevSibling returns a previous sibling node of this node.
// PrevSibling returns a previous sibling node of this node.
PrevSibling
()
Node
PrevSibling
()
Node
...
@@ -113,3 +69,12 @@ func (n *BaseNode) SetPrevSibling(node Node) {
...
@@ -113,3 +69,12 @@ func (n *BaseNode) SetPrevSibling(node Node) {
func
(
n
*
BaseNode
)
SetNextSibling
(
node
Node
)
{
func
(
n
*
BaseNode
)
SetNextSibling
(
node
Node
)
{
n
.
nextSibling
=
node
n
.
nextSibling
=
node
}
}
func
IsBlockNode
(
node
Node
)
bool
{
switch
node
.
Type
()
{
case
ParagraphNode
,
CodeBlockNode
,
HeadingNode
,
HorizontalRuleNode
,
BlockquoteNode
,
OrderedListNode
,
UnorderedListNode
,
TaskListNode
:
return
true
default
:
return
false
}
}
plugin/gomark/ast/block.go
View file @
46f7cffc
package
ast
package
ast
import
"fmt"
type
BaseBlock
struct
{
type
BaseBlock
struct
{
BaseNode
BaseNode
}
}
...
@@ -12,6 +14,10 @@ func (*LineBreak) Type() NodeType {
...
@@ -12,6 +14,10 @@ func (*LineBreak) Type() NodeType {
return
LineBreakNode
return
LineBreakNode
}
}
func
(
*
LineBreak
)
Restore
()
string
{
return
"
\n
"
}
type
Paragraph
struct
{
type
Paragraph
struct
{
BaseBlock
BaseBlock
...
@@ -22,6 +28,14 @@ func (*Paragraph) Type() NodeType {
...
@@ -22,6 +28,14 @@ func (*Paragraph) Type() NodeType {
return
ParagraphNode
return
ParagraphNode
}
}
func
(
n
*
Paragraph
)
Restore
()
string
{
var
result
string
for
_
,
child
:=
range
n
.
Children
{
result
+=
child
.
Restore
()
}
return
result
}
type
CodeBlock
struct
{
type
CodeBlock
struct
{
BaseBlock
BaseBlock
...
@@ -33,6 +47,10 @@ func (*CodeBlock) Type() NodeType {
...
@@ -33,6 +47,10 @@ func (*CodeBlock) Type() NodeType {
return
CodeBlockNode
return
CodeBlockNode
}
}
func
(
n
*
CodeBlock
)
Restore
()
string
{
return
fmt
.
Sprintf
(
"```%s
\n
%s
\n
```"
,
n
.
Language
,
n
.
Content
)
}
type
Heading
struct
{
type
Heading
struct
{
BaseBlock
BaseBlock
...
@@ -44,6 +62,18 @@ func (*Heading) Type() NodeType {
...
@@ -44,6 +62,18 @@ func (*Heading) Type() NodeType {
return
HeadingNode
return
HeadingNode
}
}
func
(
n
*
Heading
)
Restore
()
string
{
var
result
string
for
_
,
child
:=
range
n
.
Children
{
result
+=
child
.
Restore
()
}
symbol
:=
""
for
i
:=
0
;
i
<
n
.
Level
;
i
++
{
symbol
+=
"#"
}
return
fmt
.
Sprintf
(
"%s %s"
,
symbol
,
result
)
}
type
HorizontalRule
struct
{
type
HorizontalRule
struct
{
BaseBlock
BaseBlock
...
@@ -55,6 +85,10 @@ func (*HorizontalRule) Type() NodeType {
...
@@ -55,6 +85,10 @@ func (*HorizontalRule) Type() NodeType {
return
HorizontalRuleNode
return
HorizontalRuleNode
}
}
func
(
n
*
HorizontalRule
)
Restore
()
string
{
return
n
.
Symbol
+
n
.
Symbol
+
n
.
Symbol
}
type
Blockquote
struct
{
type
Blockquote
struct
{
BaseBlock
BaseBlock
...
@@ -65,6 +99,14 @@ func (*Blockquote) Type() NodeType {
...
@@ -65,6 +99,14 @@ func (*Blockquote) Type() NodeType {
return
BlockquoteNode
return
BlockquoteNode
}
}
func
(
n
*
Blockquote
)
Restore
()
string
{
var
result
string
for
_
,
child
:=
range
n
.
Children
{
result
+=
child
.
Restore
()
}
return
fmt
.
Sprintf
(
"> %s"
,
result
)
}
type
OrderedList
struct
{
type
OrderedList
struct
{
BaseBlock
BaseBlock
...
@@ -76,6 +118,14 @@ func (*OrderedList) Type() NodeType {
...
@@ -76,6 +118,14 @@ func (*OrderedList) Type() NodeType {
return
OrderedListNode
return
OrderedListNode
}
}
func
(
n
*
OrderedList
)
Restore
()
string
{
var
result
string
for
_
,
child
:=
range
n
.
Children
{
result
+=
child
.
Restore
()
}
return
fmt
.
Sprintf
(
"%s. %s"
,
n
.
Number
,
result
)
}
type
UnorderedList
struct
{
type
UnorderedList
struct
{
BaseBlock
BaseBlock
...
@@ -88,6 +138,14 @@ func (*UnorderedList) Type() NodeType {
...
@@ -88,6 +138,14 @@ func (*UnorderedList) Type() NodeType {
return
UnorderedListNode
return
UnorderedListNode
}
}
func
(
n
*
UnorderedList
)
Restore
()
string
{
var
result
string
for
_
,
child
:=
range
n
.
Children
{
result
+=
child
.
Restore
()
}
return
fmt
.
Sprintf
(
"%s %s"
,
n
.
Symbol
,
result
)
}
type
TaskList
struct
{
type
TaskList
struct
{
BaseBlock
BaseBlock
...
@@ -100,3 +158,15 @@ type TaskList struct {
...
@@ -100,3 +158,15 @@ type TaskList struct {
func
(
*
TaskList
)
Type
()
NodeType
{
func
(
*
TaskList
)
Type
()
NodeType
{
return
TaskListNode
return
TaskListNode
}
}
func
(
n
*
TaskList
)
Restore
()
string
{
var
result
string
for
_
,
child
:=
range
n
.
Children
{
result
+=
child
.
Restore
()
}
complete
:=
" "
if
n
.
Complete
{
complete
=
"x"
}
return
fmt
.
Sprintf
(
"%s [%s] %s"
,
n
.
Symbol
,
complete
,
result
)
}
plugin/gomark/ast/inline.go
View file @
46f7cffc
package
ast
package
ast
import
"fmt"
type
BaseInline
struct
{
type
BaseInline
struct
{
BaseNode
BaseNode
}
}
...
@@ -14,6 +16,10 @@ func (*Text) Type() NodeType {
...
@@ -14,6 +16,10 @@ func (*Text) Type() NodeType {
return
TextNode
return
TextNode
}
}
func
(
n
*
Text
)
Restore
()
string
{
return
n
.
Content
}
type
Bold
struct
{
type
Bold
struct
{
BaseInline
BaseInline
...
@@ -26,6 +32,15 @@ func (*Bold) Type() NodeType {
...
@@ -26,6 +32,15 @@ func (*Bold) Type() NodeType {
return
BoldNode
return
BoldNode
}
}
func
(
n
*
Bold
)
Restore
()
string
{
symbol
:=
n
.
Symbol
+
n
.
Symbol
children
:=
""
for
_
,
child
:=
range
n
.
Children
{
children
+=
child
.
Restore
()
}
return
fmt
.
Sprintf
(
"%s%s%s"
,
symbol
,
children
,
symbol
)
}
type
Italic
struct
{
type
Italic
struct
{
BaseInline
BaseInline
...
@@ -38,6 +53,10 @@ func (*Italic) Type() NodeType {
...
@@ -38,6 +53,10 @@ func (*Italic) Type() NodeType {
return
ItalicNode
return
ItalicNode
}
}
func
(
n
*
Italic
)
Restore
()
string
{
return
fmt
.
Sprintf
(
"%s%s%s"
,
n
.
Symbol
,
n
.
Content
,
n
.
Symbol
)
}
type
BoldItalic
struct
{
type
BoldItalic
struct
{
BaseInline
BaseInline
...
@@ -50,6 +69,11 @@ func (*BoldItalic) Type() NodeType {
...
@@ -50,6 +69,11 @@ func (*BoldItalic) Type() NodeType {
return
BoldItalicNode
return
BoldItalicNode
}
}
func
(
n
*
BoldItalic
)
Restore
()
string
{
symbol
:=
n
.
Symbol
+
n
.
Symbol
+
n
.
Symbol
return
fmt
.
Sprintf
(
"%s%s%s"
,
symbol
,
n
.
Content
,
symbol
)
}
type
Code
struct
{
type
Code
struct
{
BaseInline
BaseInline
...
@@ -60,6 +84,10 @@ func (*Code) Type() NodeType {
...
@@ -60,6 +84,10 @@ func (*Code) Type() NodeType {
return
CodeNode
return
CodeNode
}
}
func
(
n
*
Code
)
Restore
()
string
{
return
fmt
.
Sprintf
(
"`%s`"
,
n
.
Content
)
}
type
Image
struct
{
type
Image
struct
{
BaseInline
BaseInline
...
@@ -71,6 +99,10 @@ func (*Image) Type() NodeType {
...
@@ -71,6 +99,10 @@ func (*Image) Type() NodeType {
return
ImageNode
return
ImageNode
}
}
func
(
n
*
Image
)
Restore
()
string
{
return
fmt
.
Sprintf
(
""
,
n
.
AltText
,
n
.
URL
)
}
type
Link
struct
{
type
Link
struct
{
BaseInline
BaseInline
...
@@ -82,16 +114,28 @@ func (*Link) Type() NodeType {
...
@@ -82,16 +114,28 @@ func (*Link) Type() NodeType {
return
LinkNode
return
LinkNode
}
}
func
(
n
*
Link
)
Restore
()
string
{
return
fmt
.
Sprintf
(
"[%s](%s)"
,
n
.
Text
,
n
.
URL
)
}
type
AutoLink
struct
{
type
AutoLink
struct
{
BaseInline
BaseInline
URL
string
URL
string
IsRawText
bool
}
}
func
(
*
AutoLink
)
Type
()
NodeType
{
func
(
*
AutoLink
)
Type
()
NodeType
{
return
AutoLinkNode
return
AutoLinkNode
}
}
func
(
n
*
AutoLink
)
Restore
()
string
{
if
n
.
IsRawText
{
return
n
.
URL
}
return
fmt
.
Sprintf
(
"<%s>"
,
n
.
URL
)
}
type
Tag
struct
{
type
Tag
struct
{
BaseInline
BaseInline
...
@@ -102,6 +146,10 @@ func (*Tag) Type() NodeType {
...
@@ -102,6 +146,10 @@ func (*Tag) Type() NodeType {
return
TagNode
return
TagNode
}
}
func
(
n
*
Tag
)
Restore
()
string
{
return
fmt
.
Sprintf
(
"<%s>"
,
n
.
Content
)
}
type
Strikethrough
struct
{
type
Strikethrough
struct
{
BaseInline
BaseInline
...
@@ -112,6 +160,10 @@ func (*Strikethrough) Type() NodeType {
...
@@ -112,6 +160,10 @@ func (*Strikethrough) Type() NodeType {
return
StrikethroughNode
return
StrikethroughNode
}
}
func
(
n
*
Strikethrough
)
Restore
()
string
{
return
fmt
.
Sprintf
(
"~~%s~~"
,
n
.
Content
)
}
type
EscapingCharacter
struct
{
type
EscapingCharacter
struct
{
BaseInline
BaseInline
...
@@ -121,3 +173,7 @@ type EscapingCharacter struct {
...
@@ -121,3 +173,7 @@ type EscapingCharacter struct {
func
(
*
EscapingCharacter
)
Type
()
NodeType
{
func
(
*
EscapingCharacter
)
Type
()
NodeType
{
return
EscapingCharacterNode
return
EscapingCharacterNode
}
}
func
(
n
*
EscapingCharacter
)
Restore
()
string
{
return
fmt
.
Sprintf
(
"
\\
%s"
,
n
.
Symbol
)
}
plugin/gomark/parser/auto_link.go
View file @
46f7cffc
...
@@ -57,10 +57,13 @@ func (p *AutoLinkParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
...
@@ -57,10 +57,13 @@ func (p *AutoLinkParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
}
}
url
:=
tokenizer
.
Stringify
(
tokens
[
:
size
])
url
:=
tokenizer
.
Stringify
(
tokens
[
:
size
])
isRawText
:=
true
if
tokens
[
0
]
.
Type
==
tokenizer
.
LessThan
&&
tokens
[
len
(
tokens
)
-
1
]
.
Type
==
tokenizer
.
GreaterThan
{
if
tokens
[
0
]
.
Type
==
tokenizer
.
LessThan
&&
tokens
[
len
(
tokens
)
-
1
]
.
Type
==
tokenizer
.
GreaterThan
{
isRawText
=
false
url
=
tokenizer
.
Stringify
(
tokens
[
1
:
len
(
tokens
)
-
1
])
url
=
tokenizer
.
Stringify
(
tokens
[
1
:
len
(
tokens
)
-
1
])
}
}
return
&
ast
.
AutoLink
{
return
&
ast
.
AutoLink
{
URL
:
url
,
URL
:
url
,
IsRawText
:
isRawText
,
},
nil
},
nil
}
}
plugin/gomark/parser/auto_link_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestAutoLinkParser
(
t
*
testing
.
T
)
{
func
TestAutoLinkParser
(
t
*
testing
.
T
)
{
...
@@ -27,7 +28,8 @@ func TestAutoLinkParser(t *testing.T) {
...
@@ -27,7 +28,8 @@ func TestAutoLinkParser(t *testing.T) {
{
{
text
:
"https://example.com"
,
text
:
"https://example.com"
,
link
:
&
ast
.
AutoLink
{
link
:
&
ast
.
AutoLink
{
URL
:
"https://example.com"
,
URL
:
"https://example.com"
,
IsRawText
:
true
,
},
},
},
},
}
}
...
@@ -35,6 +37,6 @@ func TestAutoLinkParser(t *testing.T) {
...
@@ -35,6 +37,6 @@ func TestAutoLinkParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewAutoLinkParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewAutoLinkParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
link
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
link
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/blockquote.go
View file @
46f7cffc
...
@@ -23,10 +23,10 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -23,10 +23,10 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens
:=
[]
*
tokenizer
.
Token
{}
contentTokens
:=
[]
*
tokenizer
.
Token
{}
for
_
,
token
:=
range
tokens
[
2
:
]
{
for
_
,
token
:=
range
tokens
[
2
:
]
{
contentTokens
=
append
(
contentTokens
,
token
)
if
token
.
Type
==
tokenizer
.
Newline
{
if
token
.
Type
==
tokenizer
.
Newline
{
break
break
}
}
contentTokens
=
append
(
contentTokens
,
token
)
}
}
if
len
(
contentTokens
)
==
0
{
if
len
(
contentTokens
)
==
0
{
return
0
,
false
return
0
,
false
...
...
plugin/gomark/parser/blockquote_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestBlockquoteParser
(
t
*
testing
.
T
)
{
func
TestBlockquoteParser
(
t
*
testing
.
T
)
{
...
@@ -51,6 +52,6 @@ func TestBlockquoteParser(t *testing.T) {
...
@@ -51,6 +52,6 @@ func TestBlockquoteParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewBlockquoteParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewBlockquoteParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
blockquote
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
blockquote
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/bold_italic_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestBoldItalicParser
(
t
*
testing
.
T
)
{
func
TestBoldItalicParser
(
t
*
testing
.
T
)
{
...
@@ -45,6 +46,6 @@ func TestBoldItalicParser(t *testing.T) {
...
@@ -45,6 +46,6 @@ func TestBoldItalicParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewBoldItalicParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewBoldItalicParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
boldItalic
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
boldItalic
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/bold_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestBoldParser
(
t
*
testing
.
T
)
{
func
TestBoldParser
(
t
*
testing
.
T
)
{
...
@@ -53,6 +54,6 @@ func TestBoldParser(t *testing.T) {
...
@@ -53,6 +54,6 @@ func TestBoldParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewBoldParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewBoldParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
bold
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
bold
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/code_block.go
View file @
46f7cffc
...
@@ -40,7 +40,7 @@ func (*CodeBlockParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -40,7 +40,7 @@ func (*CodeBlockParser) Match(tokens []*tokenizer.Token) (int, bool) {
matched
=
true
matched
=
true
break
break
}
else
if
tokens
[
cursor
+
4
]
.
Type
==
tokenizer
.
Newline
{
}
else
if
tokens
[
cursor
+
4
]
.
Type
==
tokenizer
.
Newline
{
cursor
+=
5
cursor
+=
4
matched
=
true
matched
=
true
break
break
}
}
...
@@ -65,9 +65,6 @@ func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
...
@@ -65,9 +65,6 @@ func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
languageToken
=
nil
languageToken
=
nil
contentStart
=
4
contentStart
=
4
}
}
if
tokens
[
size
-
1
]
.
Type
==
tokenizer
.
Newline
{
contentEnd
=
size
-
5
}
codeBlock
:=
&
ast
.
CodeBlock
{
codeBlock
:=
&
ast
.
CodeBlock
{
Content
:
tokenizer
.
Stringify
(
tokens
[
contentStart
:
contentEnd
]),
Content
:
tokenizer
.
Stringify
(
tokens
[
contentStart
:
contentEnd
]),
...
...
plugin/gomark/parser/code_block_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestCodeBlockParser
(
t
*
testing
.
T
)
{
func
TestCodeBlockParser
(
t
*
testing
.
T
)
{
...
@@ -59,6 +60,6 @@ func TestCodeBlockParser(t *testing.T) {
...
@@ -59,6 +60,6 @@ func TestCodeBlockParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewCodeBlockParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewCodeBlockParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
codeBlock
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
codeBlock
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/code_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestCodeParser
(
t
*
testing
.
T
)
{
func
TestCodeParser
(
t
*
testing
.
T
)
{
...
@@ -33,6 +34,6 @@ func TestCodeParser(t *testing.T) {
...
@@ -33,6 +34,6 @@ func TestCodeParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewCodeParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewCodeParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
code
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
code
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/escaping_character_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestEscapingCharacterParser
(
t
*
testing
.
T
)
{
func
TestEscapingCharacterParser
(
t
*
testing
.
T
)
{
...
@@ -25,6 +26,6 @@ func TestEscapingCharacterParser(t *testing.T) {
...
@@ -25,6 +26,6 @@ func TestEscapingCharacterParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewEscapingCharacterParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewEscapingCharacterParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
node
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
node
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/heading.go
View file @
46f7cffc
...
@@ -34,10 +34,10 @@ func (*HeadingParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -34,10 +34,10 @@ func (*HeadingParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens
:=
[]
*
tokenizer
.
Token
{}
contentTokens
:=
[]
*
tokenizer
.
Token
{}
for
_
,
token
:=
range
tokens
[
level
+
1
:
]
{
for
_
,
token
:=
range
tokens
[
level
+
1
:
]
{
contentTokens
=
append
(
contentTokens
,
token
)
if
token
.
Type
==
tokenizer
.
Newline
{
if
token
.
Type
==
tokenizer
.
Newline
{
break
break
}
}
contentTokens
=
append
(
contentTokens
,
token
)
}
}
if
len
(
contentTokens
)
==
0
{
if
len
(
contentTokens
)
==
0
{
return
0
,
false
return
0
,
false
...
@@ -62,9 +62,6 @@ func (p *HeadingParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
...
@@ -62,9 +62,6 @@ func (p *HeadingParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
}
}
contentTokens
:=
tokens
[
level
+
1
:
size
]
contentTokens
:=
tokens
[
level
+
1
:
size
]
if
contentTokens
[
len
(
contentTokens
)
-
1
]
.
Type
==
tokenizer
.
Newline
{
contentTokens
=
contentTokens
[
:
len
(
contentTokens
)
-
1
]
}
children
,
err
:=
ParseInline
(
contentTokens
)
children
,
err
:=
ParseInline
(
contentTokens
)
if
err
!=
nil
{
if
err
!=
nil
{
return
nil
,
err
return
nil
,
err
...
...
plugin/gomark/parser/heading_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestHeadingParser
(
t
*
testing
.
T
)
{
func
TestHeadingParser
(
t
*
testing
.
T
)
{
...
@@ -80,6 +81,6 @@ Hello World`,
...
@@ -80,6 +81,6 @@ Hello World`,
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewHeadingParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewHeadingParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
heading
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
heading
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/horizontal_rule_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestHorizontalRuleParser
(
t
*
testing
.
T
)
{
func
TestHorizontalRuleParser
(
t
*
testing
.
T
)
{
...
@@ -51,6 +52,6 @@ func TestHorizontalRuleParser(t *testing.T) {
...
@@ -51,6 +52,6 @@ func TestHorizontalRuleParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewHorizontalRuleParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewHorizontalRuleParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
horizontalRule
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
horizontalRule
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/image_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestImageParser
(
t
*
testing
.
T
)
{
func
TestImageParser
(
t
*
testing
.
T
)
{
...
@@ -40,6 +41,6 @@ func TestImageParser(t *testing.T) {
...
@@ -40,6 +41,6 @@ func TestImageParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewImageParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewImageParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
image
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
image
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/italic_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestItalicParser
(
t
*
testing
.
T
)
{
func
TestItalicParser
(
t
*
testing
.
T
)
{
...
@@ -44,6 +45,6 @@ func TestItalicParser(t *testing.T) {
...
@@ -44,6 +45,6 @@ func TestItalicParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewItalicParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewItalicParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
italic
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
italic
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/link_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestLinkParser
(
t
*
testing
.
T
)
{
func
TestLinkParser
(
t
*
testing
.
T
)
{
...
@@ -47,6 +48,6 @@ func TestLinkParser(t *testing.T) {
...
@@ -47,6 +48,6 @@ func TestLinkParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewLinkParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewLinkParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
link
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
link
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/ordered_list.go
View file @
46f7cffc
...
@@ -23,10 +23,10 @@ func (*OrderedListParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -23,10 +23,10 @@ func (*OrderedListParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens
:=
[]
*
tokenizer
.
Token
{}
contentTokens
:=
[]
*
tokenizer
.
Token
{}
for
_
,
token
:=
range
tokens
[
3
:
]
{
for
_
,
token
:=
range
tokens
[
3
:
]
{
contentTokens
=
append
(
contentTokens
,
token
)
if
token
.
Type
==
tokenizer
.
Newline
{
if
token
.
Type
==
tokenizer
.
Newline
{
break
break
}
}
contentTokens
=
append
(
contentTokens
,
token
)
}
}
if
len
(
contentTokens
)
==
0
{
if
len
(
contentTokens
)
==
0
{
...
@@ -43,9 +43,6 @@ func (p *OrderedListParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
...
@@ -43,9 +43,6 @@ func (p *OrderedListParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
}
}
contentTokens
:=
tokens
[
3
:
size
]
contentTokens
:=
tokens
[
3
:
size
]
if
contentTokens
[
len
(
contentTokens
)
-
1
]
.
Type
==
tokenizer
.
Newline
{
contentTokens
=
contentTokens
[
:
len
(
contentTokens
)
-
1
]
}
children
,
err
:=
ParseInline
(
contentTokens
)
children
,
err
:=
ParseInline
(
contentTokens
)
if
err
!=
nil
{
if
err
!=
nil
{
return
nil
,
err
return
nil
,
err
...
...
plugin/gomark/parser/ordered_list_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestOrderedListParser
(
t
*
testing
.
T
)
{
func
TestOrderedListParser
(
t
*
testing
.
T
)
{
...
@@ -53,6 +54,6 @@ func TestOrderedListParser(t *testing.T) {
...
@@ -53,6 +54,6 @@ func TestOrderedListParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewOrderedListParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewOrderedListParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
node
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
node
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/paragraph.go
View file @
46f7cffc
...
@@ -18,10 +18,10 @@ func NewParagraphParser() *ParagraphParser {
...
@@ -18,10 +18,10 @@ func NewParagraphParser() *ParagraphParser {
func
(
*
ParagraphParser
)
Match
(
tokens
[]
*
tokenizer
.
Token
)
(
int
,
bool
)
{
func
(
*
ParagraphParser
)
Match
(
tokens
[]
*
tokenizer
.
Token
)
(
int
,
bool
)
{
contentTokens
:=
[]
*
tokenizer
.
Token
{}
contentTokens
:=
[]
*
tokenizer
.
Token
{}
for
_
,
token
:=
range
tokens
{
for
_
,
token
:=
range
tokens
{
contentTokens
=
append
(
contentTokens
,
token
)
if
token
.
Type
==
tokenizer
.
Newline
{
if
token
.
Type
==
tokenizer
.
Newline
{
break
break
}
}
contentTokens
=
append
(
contentTokens
,
token
)
}
}
if
len
(
contentTokens
)
==
0
{
if
len
(
contentTokens
)
==
0
{
return
0
,
false
return
0
,
false
...
@@ -38,11 +38,7 @@ func (p *ParagraphParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
...
@@ -38,11 +38,7 @@ func (p *ParagraphParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
return
nil
,
errors
.
New
(
"not matched"
)
return
nil
,
errors
.
New
(
"not matched"
)
}
}
contentTokens
:=
tokens
[
:
size
]
children
,
err
:=
ParseInline
(
tokens
[
:
size
])
if
contentTokens
[
len
(
contentTokens
)
-
1
]
.
Type
==
tokenizer
.
Newline
{
contentTokens
=
contentTokens
[
:
len
(
contentTokens
)
-
1
]
}
children
,
err
:=
ParseInline
(
contentTokens
)
if
err
!=
nil
{
if
err
!=
nil
{
return
nil
,
err
return
nil
,
err
}
}
...
...
plugin/gomark/parser/paragraph_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestParagraphParser
(
t
*
testing
.
T
)
{
func
TestParagraphParser
(
t
*
testing
.
T
)
{
...
@@ -57,6 +58,6 @@ func TestParagraphParser(t *testing.T) {
...
@@ -57,6 +58,6 @@ func TestParagraphParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewParagraphParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewParagraphParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
paragraph
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
paragraph
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/parser.go
View file @
46f7cffc
...
@@ -48,6 +48,7 @@ func ParseBlock(tokens []*tokenizer.Token) ([]ast.Node, error) {
...
@@ -48,6 +48,7 @@ func ParseBlock(tokens []*tokenizer.Token) ([]ast.Node, error) {
func
ParseBlockWithParsers
(
tokens
[]
*
tokenizer
.
Token
,
blockParsers
[]
BlockParser
)
([]
ast
.
Node
,
error
)
{
func
ParseBlockWithParsers
(
tokens
[]
*
tokenizer
.
Token
,
blockParsers
[]
BlockParser
)
([]
ast
.
Node
,
error
)
{
nodes
:=
[]
ast
.
Node
{}
nodes
:=
[]
ast
.
Node
{}
var
prevNode
ast
.
Node
var
prevNode
ast
.
Node
var
skipNextLineBreakFlag
bool
for
len
(
tokens
)
>
0
{
for
len
(
tokens
)
>
0
{
for
_
,
blockParser
:=
range
blockParsers
{
for
_
,
blockParser
:=
range
blockParsers
{
size
,
matched
:=
blockParser
.
Match
(
tokens
)
size
,
matched
:=
blockParser
.
Match
(
tokens
)
...
@@ -57,12 +58,21 @@ func ParseBlockWithParsers(tokens []*tokenizer.Token, blockParsers []BlockParser
...
@@ -57,12 +58,21 @@ func ParseBlockWithParsers(tokens []*tokenizer.Token, blockParsers []BlockParser
return
nil
,
errors
.
New
(
"parse error"
)
return
nil
,
errors
.
New
(
"parse error"
)
}
}
if
node
.
Type
()
==
ast
.
LineBreakNode
&&
skipNextLineBreakFlag
{
if
prevNode
!=
nil
&&
ast
.
IsBlockNode
(
prevNode
)
{
tokens
=
tokens
[
size
:
]
skipNextLineBreakFlag
=
false
break
}
}
tokens
=
tokens
[
size
:
]
tokens
=
tokens
[
size
:
]
if
prevNode
!=
nil
{
if
prevNode
!=
nil
{
prevNode
.
SetNextSibling
(
node
)
prevNode
.
SetNextSibling
(
node
)
node
.
SetPrevSibling
(
prevNode
)
node
.
SetPrevSibling
(
prevNode
)
}
}
prevNode
=
node
prevNode
=
node
skipNextLineBreakFlag
=
true
nodes
=
append
(
nodes
,
node
)
nodes
=
append
(
nodes
,
node
)
break
break
}
}
...
...
plugin/gomark/parser/parser_test.go
View file @
46f7cffc
package
parser
package
parser
import
(
import
(
"strconv"
"testing"
"testing"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestParser
(
t
*
testing
.
T
)
{
func
TestParser
(
t
*
testing
.
T
)
{
...
@@ -202,62 +202,6 @@ func TestParser(t *testing.T) {
...
@@ -202,62 +202,6 @@ func TestParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
nodes
,
_
:=
Parse
(
tokens
)
nodes
,
_
:=
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
(
test
.
nodes
),
StringifyNodes
(
nodes
))
require
.
Equal
(
t
,
restore
.
Restore
(
test
.
nodes
),
restore
.
Restore
(
nodes
))
}
}
}
}
func
StringifyNodes
(
nodes
[]
ast
.
Node
)
string
{
var
result
string
for
_
,
node
:=
range
nodes
{
if
node
!=
nil
{
result
+=
StringifyNode
(
node
)
}
}
return
result
}
func
StringifyNode
(
node
ast
.
Node
)
string
{
switch
n
:=
node
.
(
type
)
{
case
*
ast
.
LineBreak
:
return
"LineBreak()"
case
*
ast
.
CodeBlock
:
return
"CodeBlock("
+
n
.
Language
+
", "
+
n
.
Content
+
")"
case
*
ast
.
Paragraph
:
return
"Paragraph("
+
StringifyNodes
(
n
.
Children
)
+
")"
case
*
ast
.
Heading
:
return
"Heading("
+
StringifyNodes
(
n
.
Children
)
+
")"
case
*
ast
.
HorizontalRule
:
return
"HorizontalRule("
+
n
.
Symbol
+
")"
case
*
ast
.
Blockquote
:
return
"Blockquote("
+
StringifyNodes
(
n
.
Children
)
+
")"
case
*
ast
.
OrderedList
:
return
"OrderedList("
+
n
.
Number
+
", "
+
StringifyNodes
(
n
.
Children
)
+
")"
case
*
ast
.
UnorderedList
:
return
"UnorderedList("
+
n
.
Symbol
+
", "
+
StringifyNodes
(
n
.
Children
)
+
")"
case
*
ast
.
TaskList
:
return
"TaskList("
+
n
.
Symbol
+
", "
+
strconv
.
FormatBool
(
n
.
Complete
)
+
", "
+
StringifyNodes
(
n
.
Children
)
+
")"
case
*
ast
.
Text
:
return
"Text("
+
n
.
Content
+
")"
case
*
ast
.
Bold
:
return
"Bold("
+
n
.
Symbol
+
StringifyNodes
(
n
.
Children
)
+
n
.
Symbol
+
")"
case
*
ast
.
Italic
:
return
"Italic("
+
n
.
Symbol
+
n
.
Content
+
n
.
Symbol
+
")"
case
*
ast
.
BoldItalic
:
return
"BoldItalic("
+
n
.
Symbol
+
n
.
Content
+
n
.
Symbol
+
")"
case
*
ast
.
Code
:
return
"Code("
+
n
.
Content
+
")"
case
*
ast
.
Image
:
return
"Image("
+
n
.
URL
+
", "
+
n
.
AltText
+
")"
case
*
ast
.
Link
:
return
"Link("
+
n
.
Text
+
", "
+
n
.
URL
+
")"
case
*
ast
.
AutoLink
:
return
"AutoLink("
+
n
.
URL
+
")"
case
*
ast
.
Tag
:
return
"Tag("
+
n
.
Content
+
")"
case
*
ast
.
Strikethrough
:
return
"Strikethrough("
+
n
.
Content
+
")"
case
*
ast
.
EscapingCharacter
:
return
"EscapingCharacter("
+
n
.
Symbol
+
")"
}
return
""
}
plugin/gomark/parser/strikethrough_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestStrikethroughParser
(
t
*
testing
.
T
)
{
func
TestStrikethroughParser
(
t
*
testing
.
T
)
{
...
@@ -41,6 +42,6 @@ func TestStrikethroughParser(t *testing.T) {
...
@@ -41,6 +42,6 @@ func TestStrikethroughParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewStrikethroughParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewStrikethroughParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
strikethrough
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
strikethrough
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/tag_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestTagParser
(
t
*
testing
.
T
)
{
func
TestTagParser
(
t
*
testing
.
T
)
{
...
@@ -39,6 +40,6 @@ func TestTagParser(t *testing.T) {
...
@@ -39,6 +40,6 @@ func TestTagParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewTagParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewTagParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
tag
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
tag
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/task_list.go
View file @
46f7cffc
...
@@ -34,10 +34,10 @@ func (*TaskListParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -34,10 +34,10 @@ func (*TaskListParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens
:=
[]
*
tokenizer
.
Token
{}
contentTokens
:=
[]
*
tokenizer
.
Token
{}
for
_
,
token
:=
range
tokens
[
6
:
]
{
for
_
,
token
:=
range
tokens
[
6
:
]
{
contentTokens
=
append
(
contentTokens
,
token
)
if
token
.
Type
==
tokenizer
.
Newline
{
if
token
.
Type
==
tokenizer
.
Newline
{
break
break
}
}
contentTokens
=
append
(
contentTokens
,
token
)
}
}
if
len
(
contentTokens
)
==
0
{
if
len
(
contentTokens
)
==
0
{
return
0
,
false
return
0
,
false
...
@@ -54,9 +54,6 @@ func (p *TaskListParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
...
@@ -54,9 +54,6 @@ func (p *TaskListParser) Parse(tokens []*tokenizer.Token) (ast.Node, error) {
symbolToken
:=
tokens
[
0
]
symbolToken
:=
tokens
[
0
]
contentTokens
:=
tokens
[
6
:
size
]
contentTokens
:=
tokens
[
6
:
size
]
if
contentTokens
[
len
(
contentTokens
)
-
1
]
.
Type
==
tokenizer
.
Newline
{
contentTokens
=
contentTokens
[
:
len
(
contentTokens
)
-
1
]
}
children
,
err
:=
ParseInline
(
contentTokens
)
children
,
err
:=
ParseInline
(
contentTokens
)
if
err
!=
nil
{
if
err
!=
nil
{
return
nil
,
err
return
nil
,
err
...
...
plugin/gomark/parser/task_list_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestTaskListParser
(
t
*
testing
.
T
)
{
func
TestTaskListParser
(
t
*
testing
.
T
)
{
...
@@ -52,6 +53,6 @@ func TestTaskListParser(t *testing.T) {
...
@@ -52,6 +53,6 @@ func TestTaskListParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewTaskListParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewTaskListParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
node
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
node
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/unordered_list.go
View file @
46f7cffc
...
@@ -24,10 +24,10 @@ func (*UnorderedListParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -24,10 +24,10 @@ func (*UnorderedListParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens
:=
[]
*
tokenizer
.
Token
{}
contentTokens
:=
[]
*
tokenizer
.
Token
{}
for
_
,
token
:=
range
tokens
[
2
:
]
{
for
_
,
token
:=
range
tokens
[
2
:
]
{
contentTokens
=
append
(
contentTokens
,
token
)
if
token
.
Type
==
tokenizer
.
Newline
{
if
token
.
Type
==
tokenizer
.
Newline
{
break
break
}
}
contentTokens
=
append
(
contentTokens
,
token
)
}
}
if
len
(
contentTokens
)
==
0
{
if
len
(
contentTokens
)
==
0
{
return
0
,
false
return
0
,
false
...
@@ -44,9 +44,6 @@ func (p *UnorderedListParser) Parse(tokens []*tokenizer.Token) (ast.Node, error)
...
@@ -44,9 +44,6 @@ func (p *UnorderedListParser) Parse(tokens []*tokenizer.Token) (ast.Node, error)
symbolToken
:=
tokens
[
0
]
symbolToken
:=
tokens
[
0
]
contentTokens
:=
tokens
[
2
:
size
]
contentTokens
:=
tokens
[
2
:
size
]
if
contentTokens
[
len
(
contentTokens
)
-
1
]
.
Type
==
tokenizer
.
Newline
{
contentTokens
=
contentTokens
[
:
len
(
contentTokens
)
-
1
]
}
children
,
err
:=
ParseInline
(
contentTokens
)
children
,
err
:=
ParseInline
(
contentTokens
)
if
err
!=
nil
{
if
err
!=
nil
{
return
nil
,
err
return
nil
,
err
...
...
plugin/gomark/parser/unordered_list_test.go
View file @
46f7cffc
...
@@ -7,6 +7,7 @@ import (
...
@@ -7,6 +7,7 @@ import (
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/restore"
)
)
func
TestUnorderedListParser
(
t
*
testing
.
T
)
{
func
TestUnorderedListParser
(
t
*
testing
.
T
)
{
...
@@ -50,6 +51,6 @@ func TestUnorderedListParser(t *testing.T) {
...
@@ -50,6 +51,6 @@ func TestUnorderedListParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
node
,
_
:=
NewUnorderedListParser
()
.
Parse
(
tokens
)
node
,
_
:=
NewUnorderedListParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
node
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
require
.
Equal
(
t
,
restore
.
Restore
([]
ast
.
Node
{
test
.
node
}),
restore
.
Restore
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/renderer/html/html.go
View file @
46f7cffc
...
@@ -83,7 +83,7 @@ func (r *HTMLRenderer) Render(astRoot []ast.Node) string {
...
@@ -83,7 +83,7 @@ func (r *HTMLRenderer) Render(astRoot []ast.Node) string {
return
r
.
output
.
String
()
return
r
.
output
.
String
()
}
}
func
(
r
*
HTMLRenderer
)
renderLineBreak
(
_
*
ast
.
LineBreak
)
{
func
(
r
*
HTMLRenderer
)
renderLineBreak
(
*
ast
.
LineBreak
)
{
r
.
output
.
WriteString
(
"<br>"
)
r
.
output
.
WriteString
(
"<br>"
)
}
}
...
...
plugin/gomark/restore/restore.go
0 → 100644
View file @
46f7cffc
package
restore
import
"github.com/usememos/memos/plugin/gomark/ast"
func
Restore
(
nodes
[]
ast
.
Node
)
string
{
var
result
string
for
_
,
node
:=
range
nodes
{
if
node
==
nil
{
continue
}
result
+=
node
.
Restore
()
}
return
result
}
plugin/gomark/restore/restore_test.go
0 → 100644
View file @
46f7cffc
package
restore
import
(
"testing"
"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/gomark/ast"
)
func
TestRestore
(
t
*
testing
.
T
)
{
tests
:=
[]
struct
{
nodes
[]
ast
.
Node
rawText
string
}{
{
nodes
:
nil
,
rawText
:
""
,
},
{
nodes
:
[]
ast
.
Node
{
&
ast
.
Text
{
Content
:
"Hello world!"
,
},
},
rawText
:
"Hello world!"
,
},
{
nodes
:
[]
ast
.
Node
{
&
ast
.
Paragraph
{
Children
:
[]
ast
.
Node
{
&
ast
.
Text
{
Content
:
"Here: "
,
},
&
ast
.
Code
{
Content
:
"Hello world!"
,
},
},
},
},
rawText
:
"Here: `Hello world!`"
,
},
}
for
_
,
test
:=
range
tests
{
require
.
Equal
(
t
,
Restore
(
test
.
nodes
),
test
.
rawText
)
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment