Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
C
canifa_note
Project
Project
Details
Activity
Releases
Cycle Analytics
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Charts
Issues
0
Issues
0
List
Board
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Charts
Wiki
Wiki
Snippets
Snippets
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Charts
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Vũ Hoàng Anh
canifa_note
Commits
43ef9eac
Commit
43ef9eac
authored
Dec 13, 2023
by
Steven
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
chore: implement part of html renderer
parent
453707d1
Changes
35
Hide whitespace changes
Inline
Side-by-side
Showing
35 changed files
with
449 additions
and
91 deletions
+449
-91
block.go
plugin/gomark/ast/block.go
+39
-1
inline.go
plugin/gomark/ast/inline.go
+40
-4
node.go
plugin/gomark/ast/node.go
+71
-0
blockquote.go
plugin/gomark/parser/blockquote.go
+11
-6
blockquote_test.go
plugin/gomark/parser/blockquote_test.go
+3
-1
bold.go
plugin/gomark/parser/bold.go
+5
-3
bold_italic.go
plugin/gomark/parser/bold_italic.go
+5
-3
bold_italic_test.go
plugin/gomark/parser/bold_italic_test.go
+2
-1
bold_test.go
plugin/gomark/parser/bold_test.go
+2
-1
code.go
plugin/gomark/parser/code.go
+5
-3
code_block.go
plugin/gomark/parser/code_block.go
+5
-3
code_block_test.go
plugin/gomark/parser/code_block_test.go
+2
-1
code_test.go
plugin/gomark/parser/code_test.go
+2
-1
heading.go
plugin/gomark/parser/heading.go
+13
-6
heading_test.go
plugin/gomark/parser/heading_test.go
+2
-1
horizontal_rule.go
plugin/gomark/parser/horizontal_rule.go
+5
-3
horizontal_rule_test.go
plugin/gomark/parser/horizontal_rule_test.go
+2
-1
image.go
plugin/gomark/parser/image.go
+5
-3
image_test.go
plugin/gomark/parser/image_test.go
+2
-1
italic.go
plugin/gomark/parser/italic.go
+5
-3
italic_test.go
plugin/gomark/parser/italic_test.go
+2
-1
line_break.go
plugin/gomark/parser/line_break.go
+5
-3
link.go
plugin/gomark/parser/link.go
+5
-3
link_test.go
plugin/gomark/parser/link_test.go
+2
-1
paragraph.go
plugin/gomark/parser/paragraph.go
+10
-5
paragraph_test.go
plugin/gomark/parser/paragraph_test.go
+2
-1
parser.go
plugin/gomark/parser/parser.go
+36
-16
parser_test.go
plugin/gomark/parser/parser_test.go
+21
-4
strikethrough.go
plugin/gomark/parser/strikethrough.go
+5
-3
strikethrough_test.go
plugin/gomark/parser/strikethrough_test.go
+2
-1
tag.go
plugin/gomark/parser/tag.go
+5
-3
tag_test.go
plugin/gomark/parser/tag_test.go
+2
-1
text.go
plugin/gomark/parser/text.go
+3
-3
html.go
plugin/gomark/renderer/html/html.go
+87
-0
html_test.go
plugin/gomark/renderer/html/html_test.go
+36
-0
No files found.
plugin/gomark/ast/block.go
View file @
43ef9eac
package
ast
package
ast
import
"fmt"
type
BaseBlock
struct
{
type
BaseBlock
struct
{
Node
Base
Node
}
}
type
LineBreak
struct
{
type
LineBreak
struct
{
...
@@ -14,6 +16,10 @@ func (*LineBreak) Type() NodeType {
...
@@ -14,6 +16,10 @@ func (*LineBreak) Type() NodeType {
return
NodeTypeLineBreak
return
NodeTypeLineBreak
}
}
func
(
n
*
LineBreak
)
String
()
string
{
return
n
.
Type
()
.
String
()
}
type
Paragraph
struct
{
type
Paragraph
struct
{
BaseBlock
BaseBlock
...
@@ -26,6 +32,14 @@ func (*Paragraph) Type() NodeType {
...
@@ -26,6 +32,14 @@ func (*Paragraph) Type() NodeType {
return
NodeTypeParagraph
return
NodeTypeParagraph
}
}
func
(
n
*
Paragraph
)
String
()
string
{
str
:=
n
.
Type
()
.
String
()
for
_
,
child
:=
range
n
.
Children
{
str
+=
" "
+
child
.
String
()
}
return
str
}
type
CodeBlock
struct
{
type
CodeBlock
struct
{
BaseBlock
BaseBlock
...
@@ -39,6 +53,10 @@ func (*CodeBlock) Type() NodeType {
...
@@ -39,6 +53,10 @@ func (*CodeBlock) Type() NodeType {
return
NodeTypeCodeBlock
return
NodeTypeCodeBlock
}
}
func
(
n
*
CodeBlock
)
String
()
string
{
return
n
.
Type
()
.
String
()
+
" "
+
n
.
Language
+
" "
+
n
.
Content
}
type
Heading
struct
{
type
Heading
struct
{
BaseBlock
BaseBlock
...
@@ -52,6 +70,14 @@ func (*Heading) Type() NodeType {
...
@@ -52,6 +70,14 @@ func (*Heading) Type() NodeType {
return
NodeTypeHeading
return
NodeTypeHeading
}
}
func
(
n
*
Heading
)
String
()
string
{
str
:=
n
.
Type
()
.
String
()
+
" "
+
fmt
.
Sprintf
(
"%d"
,
n
.
Level
)
for
_
,
child
:=
range
n
.
Children
{
str
+=
" "
+
child
.
String
()
}
return
str
}
type
HorizontalRule
struct
{
type
HorizontalRule
struct
{
BaseBlock
BaseBlock
...
@@ -65,6 +91,10 @@ func (*HorizontalRule) Type() NodeType {
...
@@ -65,6 +91,10 @@ func (*HorizontalRule) Type() NodeType {
return
NodeTypeHorizontalRule
return
NodeTypeHorizontalRule
}
}
func
(
n
*
HorizontalRule
)
String
()
string
{
return
n
.
Type
()
.
String
()
}
type
Blockquote
struct
{
type
Blockquote
struct
{
BaseBlock
BaseBlock
...
@@ -76,3 +106,11 @@ var NodeTypeBlockquote = NewNodeType("Blockquote")
...
@@ -76,3 +106,11 @@ var NodeTypeBlockquote = NewNodeType("Blockquote")
func
(
*
Blockquote
)
Type
()
NodeType
{
func
(
*
Blockquote
)
Type
()
NodeType
{
return
NodeTypeBlockquote
return
NodeTypeBlockquote
}
}
func
(
n
*
Blockquote
)
String
()
string
{
str
:=
n
.
Type
()
.
String
()
for
_
,
child
:=
range
n
.
Children
{
str
+=
" "
+
child
.
String
()
}
return
str
}
plugin/gomark/ast/inline.go
View file @
43ef9eac
package
ast
package
ast
type
BaseInline
struct
{
type
BaseInline
struct
{
Node
Base
Node
}
}
type
Text
struct
{
type
Text
struct
{
...
@@ -16,10 +16,14 @@ func (*Text) Type() NodeType {
...
@@ -16,10 +16,14 @@ func (*Text) Type() NodeType {
return
NodeTypeText
return
NodeTypeText
}
}
func
(
n
*
Text
)
String
()
string
{
return
n
.
Type
()
.
String
()
+
" "
+
n
.
Content
}
type
Bold
struct
{
type
Bold
struct
{
BaseInline
BaseInline
// Symbol is "*" or "_"
// Symbol is "*" or "_"
.
Symbol
string
Symbol
string
Content
string
Content
string
}
}
...
@@ -30,10 +34,14 @@ func (*Bold) Type() NodeType {
...
@@ -30,10 +34,14 @@ func (*Bold) Type() NodeType {
return
NodeTypeBold
return
NodeTypeBold
}
}
func
(
n
*
Bold
)
String
()
string
{
return
n
.
Type
()
.
String
()
+
" "
+
n
.
Symbol
+
" "
+
n
.
Content
}
type
Italic
struct
{
type
Italic
struct
{
BaseInline
BaseInline
// Symbol is "*" or "_"
// Symbol is "*" or "_"
.
Symbol
string
Symbol
string
Content
string
Content
string
}
}
...
@@ -44,10 +52,14 @@ func (*Italic) Type() NodeType {
...
@@ -44,10 +52,14 @@ func (*Italic) Type() NodeType {
return
NodeTypeItalic
return
NodeTypeItalic
}
}
func
(
n
*
Italic
)
String
()
string
{
return
n
.
Type
()
.
String
()
+
" "
+
n
.
Symbol
+
" "
+
n
.
Content
}
type
BoldItalic
struct
{
type
BoldItalic
struct
{
BaseInline
BaseInline
// Symbol is "*" or "_"
// Symbol is "*" or "_"
.
Symbol
string
Symbol
string
Content
string
Content
string
}
}
...
@@ -58,6 +70,10 @@ func (*BoldItalic) Type() NodeType {
...
@@ -58,6 +70,10 @@ func (*BoldItalic) Type() NodeType {
return
NodeTypeBoldItalic
return
NodeTypeBoldItalic
}
}
func
(
n
*
BoldItalic
)
String
()
string
{
return
n
.
Type
()
.
String
()
+
" "
+
n
.
Symbol
+
" "
+
n
.
Content
}
type
Code
struct
{
type
Code
struct
{
BaseInline
BaseInline
...
@@ -70,6 +86,10 @@ func (*Code) Type() NodeType {
...
@@ -70,6 +86,10 @@ func (*Code) Type() NodeType {
return
NodeTypeCode
return
NodeTypeCode
}
}
func
(
n
*
Code
)
String
()
string
{
return
n
.
Type
()
.
String
()
+
" "
+
n
.
Content
}
type
Image
struct
{
type
Image
struct
{
BaseInline
BaseInline
...
@@ -83,6 +103,10 @@ func (*Image) Type() NodeType {
...
@@ -83,6 +103,10 @@ func (*Image) Type() NodeType {
return
NodeTypeImage
return
NodeTypeImage
}
}
func
(
n
*
Image
)
String
()
string
{
return
n
.
Type
()
.
String
()
+
" "
+
n
.
AltText
+
" "
+
n
.
URL
}
type
Link
struct
{
type
Link
struct
{
BaseInline
BaseInline
...
@@ -96,6 +120,10 @@ func (*Link) Type() NodeType {
...
@@ -96,6 +120,10 @@ func (*Link) Type() NodeType {
return
NodeTypeLink
return
NodeTypeLink
}
}
func
(
n
*
Link
)
String
()
string
{
return
n
.
Type
()
.
String
()
+
" "
+
n
.
Text
+
" "
+
n
.
URL
}
type
Tag
struct
{
type
Tag
struct
{
BaseInline
BaseInline
...
@@ -108,6 +136,10 @@ func (*Tag) Type() NodeType {
...
@@ -108,6 +136,10 @@ func (*Tag) Type() NodeType {
return
NodeTypeTag
return
NodeTypeTag
}
}
func
(
n
*
Tag
)
String
()
string
{
return
n
.
Type
()
.
String
()
+
" "
+
n
.
Content
}
type
Strikethrough
struct
{
type
Strikethrough
struct
{
BaseInline
BaseInline
...
@@ -119,3 +151,7 @@ var NodeTypeStrikethrough = NewNodeType("Strikethrough")
...
@@ -119,3 +151,7 @@ var NodeTypeStrikethrough = NewNodeType("Strikethrough")
func
(
*
Strikethrough
)
Type
()
NodeType
{
func
(
*
Strikethrough
)
Type
()
NodeType
{
return
NodeTypeStrikethrough
return
NodeTypeStrikethrough
}
}
func
(
n
*
Strikethrough
)
String
()
string
{
return
n
.
Type
()
.
String
()
+
" "
+
n
.
Content
}
plugin/gomark/ast/node.go
View file @
43ef9eac
package
ast
package
ast
type
Node
interface
{
type
Node
interface
{
// Type returns a node type.
Type
()
NodeType
Type
()
NodeType
// String returns a string representation of this node.
// This method is used for debugging.
String
()
string
// GetParent returns a parent node of this node.
GetParent
()
Node
// GetPrevSibling returns a previous sibling node of this node.
GetPrevSibling
()
Node
// GetNextSibling returns a next sibling node of this node.
GetNextSibling
()
Node
// GetChildren returns children nodes of this node.
GetChildren
()
[]
Node
// SetParent sets a parent node to this node.
SetParent
(
Node
)
// SetPrevSibling sets a previous sibling node to this node.
SetPrevSibling
(
Node
)
// SetNextSibling sets a next sibling node to this node.
SetNextSibling
(
Node
)
// SetChildren sets children nodes to this node.
SetChildren
([]
Node
)
}
}
type
NodeType
int
type
NodeType
int
...
@@ -18,3 +47,45 @@ func NewNodeType(name string) NodeType {
...
@@ -18,3 +47,45 @@ func NewNodeType(name string) NodeType {
nodeTypeIndex
++
nodeTypeIndex
++
return
nodeTypeIndex
return
nodeTypeIndex
}
}
type
BaseNode
struct
{
parent
Node
prevSibling
Node
nextSibling
Node
children
[]
Node
}
func
(
n
*
BaseNode
)
GetParent
()
Node
{
return
n
.
parent
}
func
(
n
*
BaseNode
)
GetPrevSibling
()
Node
{
return
n
.
prevSibling
}
func
(
n
*
BaseNode
)
GetNextSibling
()
Node
{
return
n
.
nextSibling
}
func
(
n
*
BaseNode
)
GetChildren
()
[]
Node
{
return
n
.
children
}
func
(
n
*
BaseNode
)
SetParent
(
node
Node
)
{
n
.
parent
=
node
}
func
(
n
*
BaseNode
)
SetPrevSibling
(
node
Node
)
{
n
.
prevSibling
=
node
}
func
(
n
*
BaseNode
)
SetNextSibling
(
node
Node
)
{
n
.
nextSibling
=
node
}
func
(
n
*
BaseNode
)
SetChildren
(
nodes
[]
Node
)
{
n
.
children
=
nodes
}
plugin/gomark/parser/blockquote.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -21,10 +23,10 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -21,10 +23,10 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) {
contentTokens
:=
[]
*
tokenizer
.
Token
{}
contentTokens
:=
[]
*
tokenizer
.
Token
{}
for
_
,
token
:=
range
tokens
[
2
:
]
{
for
_
,
token
:=
range
tokens
[
2
:
]
{
contentTokens
=
append
(
contentTokens
,
token
)
if
token
.
Type
==
tokenizer
.
Newline
{
if
token
.
Type
==
tokenizer
.
Newline
{
break
break
}
}
contentTokens
=
append
(
contentTokens
,
token
)
}
}
if
len
(
contentTokens
)
==
0
{
if
len
(
contentTokens
)
==
0
{
return
0
,
false
return
0
,
false
...
@@ -33,15 +35,18 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -33,15 +35,18 @@ func (*BlockquoteParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
len
(
contentTokens
)
+
2
,
true
return
len
(
contentTokens
)
+
2
,
true
}
}
func
(
p
*
BlockquoteParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
BlockquoteParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
contentTokens
:=
tokens
[
2
:
size
]
contentTokens
:=
tokens
[
2
:
size
]
children
:=
ParseInline
(
contentTokens
)
blockquote
:=
&
ast
.
Blockquote
{}
return
&
ast
.
Blockquote
{
children
,
err
:=
ParseInline
(
blockquote
,
contentTokens
)
Children
:
children
,
if
err
!=
nil
{
return
nil
,
err
}
}
blockquote
.
Children
=
children
return
blockquote
,
nil
}
}
plugin/gomark/parser/blockquote_test.go
View file @
43ef9eac
...
@@ -31,6 +31,7 @@ func TestBlockquoteParser(t *testing.T) {
...
@@ -31,6 +31,7 @@ func TestBlockquoteParser(t *testing.T) {
&
ast
.
Text
{
&
ast
.
Text
{
Content
:
"Hello"
,
Content
:
"Hello"
,
},
},
&
ast
.
LineBreak
{},
},
},
},
},
},
},
...
@@ -42,6 +43,7 @@ func TestBlockquoteParser(t *testing.T) {
...
@@ -42,6 +43,7 @@ func TestBlockquoteParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
blockquote
,
NewBlockquoteParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewBlockquoteParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
blockquote
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/bold.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -43,10 +45,10 @@ func (*BoldParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -43,10 +45,10 @@ func (*BoldParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
cursor
+
2
,
true
return
cursor
+
2
,
true
}
}
func
(
p
*
BoldParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
BoldParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
prefixTokenType
:=
tokens
[
0
]
.
Type
prefixTokenType
:=
tokens
[
0
]
.
Type
...
@@ -54,5 +56,5 @@ func (p *BoldParser) Parse(tokens []*tokenizer.Token) ast.Node {
...
@@ -54,5 +56,5 @@ func (p *BoldParser) Parse(tokens []*tokenizer.Token) ast.Node {
return
&
ast
.
Bold
{
return
&
ast
.
Bold
{
Symbol
:
prefixTokenType
,
Symbol
:
prefixTokenType
,
Content
:
tokenizer
.
Stringify
(
contentTokens
),
Content
:
tokenizer
.
Stringify
(
contentTokens
),
}
}
,
nil
}
}
plugin/gomark/parser/bold_italic.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -43,10 +45,10 @@ func (*BoldItalicParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -43,10 +45,10 @@ func (*BoldItalicParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
cursor
+
3
,
true
return
cursor
+
3
,
true
}
}
func
(
p
*
BoldItalicParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
BoldItalicParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
prefixTokenType
:=
tokens
[
0
]
.
Type
prefixTokenType
:=
tokens
[
0
]
.
Type
...
@@ -54,5 +56,5 @@ func (p *BoldItalicParser) Parse(tokens []*tokenizer.Token) ast.Node {
...
@@ -54,5 +56,5 @@ func (p *BoldItalicParser) Parse(tokens []*tokenizer.Token) ast.Node {
return
&
ast
.
BoldItalic
{
return
&
ast
.
BoldItalic
{
Symbol
:
prefixTokenType
,
Symbol
:
prefixTokenType
,
Content
:
tokenizer
.
Stringify
(
contentTokens
),
Content
:
tokenizer
.
Stringify
(
contentTokens
),
}
}
,
nil
}
}
plugin/gomark/parser/bold_italic_test.go
View file @
43ef9eac
...
@@ -44,6 +44,7 @@ func TestBoldItalicParser(t *testing.T) {
...
@@ -44,6 +44,7 @@ func TestBoldItalicParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
boldItalic
,
NewBoldItalicParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewBoldItalicParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
boldItalic
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/bold_test.go
View file @
43ef9eac
...
@@ -44,6 +44,7 @@ func TestBoldParser(t *testing.T) {
...
@@ -44,6 +44,7 @@ func TestBoldParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
bold
,
NewBoldParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewBoldParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
bold
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/code.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -36,14 +38,14 @@ func (*CodeParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -36,14 +38,14 @@ func (*CodeParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
len
(
contentTokens
)
+
2
,
true
return
len
(
contentTokens
)
+
2
,
true
}
}
func
(
p
*
CodeParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
CodeParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
contentTokens
:=
tokens
[
1
:
size
-
1
]
contentTokens
:=
tokens
[
1
:
size
-
1
]
return
&
ast
.
Code
{
return
&
ast
.
Code
{
Content
:
tokenizer
.
Stringify
(
contentTokens
),
Content
:
tokenizer
.
Stringify
(
contentTokens
),
}
}
,
nil
}
}
plugin/gomark/parser/code_block.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -51,10 +53,10 @@ func (*CodeBlockParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -51,10 +53,10 @@ func (*CodeBlockParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
cursor
,
true
return
cursor
,
true
}
}
func
(
p
*
CodeBlockParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
CodeBlockParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
languageToken
:=
tokens
[
3
]
languageToken
:=
tokens
[
3
]
...
@@ -73,5 +75,5 @@ func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) ast.Node {
...
@@ -73,5 +75,5 @@ func (p *CodeBlockParser) Parse(tokens []*tokenizer.Token) ast.Node {
if
languageToken
!=
nil
{
if
languageToken
!=
nil
{
codeBlock
.
Language
=
languageToken
.
String
()
codeBlock
.
Language
=
languageToken
.
String
()
}
}
return
codeBlock
return
codeBlock
,
nil
}
}
plugin/gomark/parser/code_block_test.go
View file @
43ef9eac
...
@@ -58,6 +58,7 @@ func TestCodeBlockParser(t *testing.T) {
...
@@ -58,6 +58,7 @@ func TestCodeBlockParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
codeBlock
,
NewCodeBlockParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewCodeBlockParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
codeBlock
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/code_test.go
View file @
43ef9eac
...
@@ -32,6 +32,7 @@ func TestCodeParser(t *testing.T) {
...
@@ -32,6 +32,7 @@ func TestCodeParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
code
,
NewCodeParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewCodeParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
code
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/heading.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -47,10 +49,10 @@ func (*HeadingParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -47,10 +49,10 @@ func (*HeadingParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
cursor
,
true
return
cursor
,
true
}
}
func
(
p
*
HeadingParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
HeadingParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
level
:=
0
level
:=
0
...
@@ -61,10 +63,15 @@ func (p *HeadingParser) Parse(tokens []*tokenizer.Token) ast.Node {
...
@@ -61,10 +63,15 @@ func (p *HeadingParser) Parse(tokens []*tokenizer.Token) ast.Node {
break
break
}
}
}
}
contentTokens
:=
tokens
[
level
+
1
:
size
]
contentTokens
:=
tokens
[
level
+
1
:
size
]
children
:=
ParseInline
(
contentTokens
)
heading
:=
&
ast
.
Heading
{
return
&
ast
.
Heading
{
Level
:
level
,
Level
:
level
,
}
Children
:
children
,
children
,
err
:=
ParseInline
(
heading
,
contentTokens
)
if
err
!=
nil
{
return
nil
,
err
}
}
heading
.
Children
=
children
return
heading
,
nil
}
}
plugin/gomark/parser/heading_test.go
View file @
43ef9eac
...
@@ -75,6 +75,7 @@ Hello World`,
...
@@ -75,6 +75,7 @@ Hello World`,
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
heading
,
NewHeadingParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewHeadingParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
heading
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/horizontal_rule.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -27,13 +29,13 @@ func (*HorizontalRuleParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -27,13 +29,13 @@ func (*HorizontalRuleParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
3
,
true
return
3
,
true
}
}
func
(
p
*
HorizontalRuleParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
HorizontalRuleParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
return
&
ast
.
HorizontalRule
{
return
&
ast
.
HorizontalRule
{
Symbol
:
tokens
[
0
]
.
Type
,
Symbol
:
tokens
[
0
]
.
Type
,
}
}
,
nil
}
}
plugin/gomark/parser/horizontal_rule_test.go
View file @
43ef9eac
...
@@ -44,6 +44,7 @@ func TestHorizontalRuleParser(t *testing.T) {
...
@@ -44,6 +44,7 @@ func TestHorizontalRuleParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
horizontalRule
,
NewHorizontalRuleParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewHorizontalRuleParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
horizontalRule
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/image.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -52,10 +54,10 @@ func (*ImageParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -52,10 +54,10 @@ func (*ImageParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
cursor
+
len
(
contentTokens
)
+
1
,
true
return
cursor
+
len
(
contentTokens
)
+
1
,
true
}
}
func
(
p
*
ImageParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
ImageParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
altTextTokens
:=
[]
*
tokenizer
.
Token
{}
altTextTokens
:=
[]
*
tokenizer
.
Token
{}
...
@@ -69,5 +71,5 @@ func (p *ImageParser) Parse(tokens []*tokenizer.Token) ast.Node {
...
@@ -69,5 +71,5 @@ func (p *ImageParser) Parse(tokens []*tokenizer.Token) ast.Node {
return
&
ast
.
Image
{
return
&
ast
.
Image
{
AltText
:
tokenizer
.
Stringify
(
altTextTokens
),
AltText
:
tokenizer
.
Stringify
(
altTextTokens
),
URL
:
tokenizer
.
Stringify
(
contentTokens
),
URL
:
tokenizer
.
Stringify
(
contentTokens
),
}
}
,
nil
}
}
plugin/gomark/parser/image_test.go
View file @
43ef9eac
...
@@ -39,6 +39,7 @@ func TestImageParser(t *testing.T) {
...
@@ -39,6 +39,7 @@ func TestImageParser(t *testing.T) {
}
}
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
image
,
NewImageParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewImageParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
image
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/italic.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -42,10 +44,10 @@ func (*ItalicParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -42,10 +44,10 @@ func (*ItalicParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
len
(
contentTokens
)
+
2
,
true
return
len
(
contentTokens
)
+
2
,
true
}
}
func
(
p
*
ItalicParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
ItalicParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
prefixTokenType
:=
tokens
[
0
]
.
Type
prefixTokenType
:=
tokens
[
0
]
.
Type
...
@@ -53,5 +55,5 @@ func (p *ItalicParser) Parse(tokens []*tokenizer.Token) ast.Node {
...
@@ -53,5 +55,5 @@ func (p *ItalicParser) Parse(tokens []*tokenizer.Token) ast.Node {
return
&
ast
.
Italic
{
return
&
ast
.
Italic
{
Symbol
:
prefixTokenType
,
Symbol
:
prefixTokenType
,
Content
:
tokenizer
.
Stringify
(
contentTokens
),
Content
:
tokenizer
.
Stringify
(
contentTokens
),
}
}
,
nil
}
}
plugin/gomark/parser/italic_test.go
View file @
43ef9eac
...
@@ -43,6 +43,7 @@ func TestItalicParser(t *testing.T) {
...
@@ -43,6 +43,7 @@ func TestItalicParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
italic
,
NewItalicParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewItalicParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
italic
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/line_break.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -21,11 +23,11 @@ func (*LineBreakParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -21,11 +23,11 @@ func (*LineBreakParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
1
,
true
return
1
,
true
}
}
func
(
p
*
LineBreakParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
LineBreakParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
return
&
ast
.
LineBreak
{}
return
&
ast
.
LineBreak
{}
,
nil
}
}
plugin/gomark/parser/link.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -51,10 +53,10 @@ func (*LinkParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -51,10 +53,10 @@ func (*LinkParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
4
+
len
(
urlTokens
)
+
len
(
textTokens
),
true
return
4
+
len
(
urlTokens
)
+
len
(
textTokens
),
true
}
}
func
(
p
*
LinkParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
LinkParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
textTokens
:=
[]
*
tokenizer
.
Token
{}
textTokens
:=
[]
*
tokenizer
.
Token
{}
...
@@ -68,5 +70,5 @@ func (p *LinkParser) Parse(tokens []*tokenizer.Token) ast.Node {
...
@@ -68,5 +70,5 @@ func (p *LinkParser) Parse(tokens []*tokenizer.Token) ast.Node {
return
&
ast
.
Link
{
return
&
ast
.
Link
{
Text
:
tokenizer
.
Stringify
(
textTokens
),
Text
:
tokenizer
.
Stringify
(
textTokens
),
URL
:
tokenizer
.
Stringify
(
urlTokens
),
URL
:
tokenizer
.
Stringify
(
urlTokens
),
}
}
,
nil
}
}
plugin/gomark/parser/link_test.go
View file @
43ef9eac
...
@@ -39,6 +39,7 @@ func TestLinkParser(t *testing.T) {
...
@@ -39,6 +39,7 @@ func TestLinkParser(t *testing.T) {
}
}
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
link
,
NewLinkParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewLinkParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
link
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/paragraph.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -29,15 +31,18 @@ func (*ParagraphParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -29,15 +31,18 @@ func (*ParagraphParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
len
(
contentTokens
),
true
return
len
(
contentTokens
),
true
}
}
func
(
p
*
ParagraphParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
ParagraphParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
contentTokens
:=
tokens
[
:
size
]
contentTokens
:=
tokens
[
:
size
]
children
:=
ParseInline
(
contentTokens
)
paragraph
:=
&
ast
.
Paragraph
{}
return
&
ast
.
Paragraph
{
children
,
err
:=
ParseInline
(
paragraph
,
contentTokens
)
Children
:
children
,
if
err
!=
nil
{
return
nil
,
err
}
}
paragraph
.
Children
=
children
return
paragraph
,
nil
}
}
plugin/gomark/parser/paragraph_test.go
View file @
43ef9eac
...
@@ -32,6 +32,7 @@ func TestParagraphParser(t *testing.T) {
...
@@ -32,6 +32,7 @@ func TestParagraphParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
paragraph
,
NewParagraphParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewParagraphParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
paragraph
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/parser.go
View file @
43ef9eac
...
@@ -14,7 +14,7 @@ type Context struct {
...
@@ -14,7 +14,7 @@ type Context struct {
type
BaseParser
interface
{
type
BaseParser
interface
{
Match
(
tokens
[]
*
tokenizer
.
Token
)
(
int
,
bool
)
Match
(
tokens
[]
*
tokenizer
.
Token
)
(
int
,
bool
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
}
}
type
InlineParser
interface
{
type
InlineParser
interface
{
...
@@ -36,16 +36,23 @@ var defaultBlockParsers = []BlockParser{
...
@@ -36,16 +36,23 @@ var defaultBlockParsers = []BlockParser{
func
Parse
(
tokens
[]
*
tokenizer
.
Token
)
([]
ast
.
Node
,
error
)
{
func
Parse
(
tokens
[]
*
tokenizer
.
Token
)
([]
ast
.
Node
,
error
)
{
nodes
:=
[]
ast
.
Node
{}
nodes
:=
[]
ast
.
Node
{}
var
prevNode
ast
.
Node
for
len
(
tokens
)
>
0
{
for
len
(
tokens
)
>
0
{
for
_
,
blockParser
:=
range
defaultBlockParsers
{
for
_
,
blockParser
:=
range
defaultBlockParsers
{
cursor
,
matched
:=
blockParser
.
Match
(
tokens
)
size
,
matched
:=
blockParser
.
Match
(
tokens
)
if
matched
{
if
matched
{
node
:=
blockParser
.
Parse
(
tokens
)
node
,
err
:=
blockParser
.
Parse
(
tokens
)
if
node
=
=
nil
{
if
err
!
=
nil
{
return
nil
,
errors
.
New
(
"parse error"
)
return
nil
,
errors
.
New
(
"parse error"
)
}
}
tokens
=
tokens
[
size
:
]
if
prevNode
!=
nil
{
prevNode
.
SetNextSibling
(
node
)
node
.
SetPrevSibling
(
prevNode
)
}
prevNode
=
node
nodes
=
append
(
nodes
,
node
)
nodes
=
append
(
nodes
,
node
)
tokens
=
tokens
[
cursor
:
]
break
break
}
}
}
}
...
@@ -62,27 +69,40 @@ var defaultInlineParsers = []InlineParser{
...
@@ -62,27 +69,40 @@ var defaultInlineParsers = []InlineParser{
NewCodeParser
(),
NewCodeParser
(),
NewTagParser
(),
NewTagParser
(),
NewStrikethroughParser
(),
NewStrikethroughParser
(),
NewLineBreakParser
(),
NewTextParser
(),
NewTextParser
(),
}
}
func
ParseInline
(
tokens
[]
*
tokenizer
.
Token
)
[]
ast
.
Node
{
func
ParseInline
(
parent
ast
.
Node
,
tokens
[]
*
tokenizer
.
Token
)
([]
ast
.
Node
,
error
)
{
nodes
:=
[]
ast
.
Node
{}
nodes
:=
[]
ast
.
Node
{}
var
last
Node
ast
.
Node
var
prev
Node
ast
.
Node
for
len
(
tokens
)
>
0
{
for
len
(
tokens
)
>
0
{
for
_
,
inlineParser
:=
range
defaultInlineParsers
{
for
_
,
inlineParser
:=
range
defaultInlineParsers
{
cursor
,
matched
:=
inlineParser
.
Match
(
tokens
)
size
,
matched
:=
inlineParser
.
Match
(
tokens
)
if
matched
{
if
matched
{
node
:=
inlineParser
.
Parse
(
tokens
)
node
,
err
:=
inlineParser
.
Parse
(
tokens
)
if
node
.
Type
()
==
ast
.
NodeTypeText
&&
lastNode
!=
nil
&&
lastNode
.
Type
()
==
ast
.
NodeTypeText
{
if
err
!=
nil
{
lastNode
.
(
*
ast
.
Text
)
.
Content
+=
node
.
(
*
ast
.
Text
)
.
Content
return
nil
,
errors
.
New
(
"parse error"
)
}
else
{
}
nodes
=
append
(
nodes
,
node
)
lastNode
=
node
tokens
=
tokens
[
size
:
]
node
.
SetParent
(
parent
)
if
prevNode
!=
nil
{
if
prevNode
.
Type
()
==
ast
.
NodeTypeText
&&
node
.
Type
()
==
ast
.
NodeTypeText
{
prevNode
.
(
*
ast
.
Text
)
.
Content
+=
node
.
(
*
ast
.
Text
)
.
Content
break
}
prevNode
.
SetNextSibling
(
node
)
node
.
SetPrevSibling
(
prevNode
)
}
}
tokens
=
tokens
[
cursor
:
]
nodes
=
append
(
nodes
,
node
)
prevNode
=
node
break
break
}
}
}
}
}
}
return
nodes
parent
.
SetChildren
(
nodes
)
return
nodes
,
nil
}
}
plugin/gomark/parser/parser_test.go
View file @
43ef9eac
...
@@ -43,7 +43,7 @@ func TestParser(t *testing.T) {
...
@@ -43,7 +43,7 @@ func TestParser(t *testing.T) {
},
},
},
},
{
{
text
:
"Hello **world**!"
,
text
:
"Hello **world**!
\n
Here is a new line.
"
,
nodes
:
[]
ast
.
Node
{
nodes
:
[]
ast
.
Node
{
&
ast
.
Paragraph
{
&
ast
.
Paragraph
{
Children
:
[]
ast
.
Node
{
Children
:
[]
ast
.
Node
{
...
@@ -59,6 +59,14 @@ func TestParser(t *testing.T) {
...
@@ -59,6 +59,14 @@ func TestParser(t *testing.T) {
},
},
},
},
},
},
&
ast
.
LineBreak
{},
&
ast
.
Paragraph
{
Children
:
[]
ast
.
Node
{
&
ast
.
Text
{
Content
:
"Here is a new line."
,
},
},
},
},
},
},
},
{
{
...
@@ -89,8 +97,17 @@ func TestParser(t *testing.T) {
...
@@ -89,8 +97,17 @@ func TestParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
nodes
,
err
:=
Parse
(
tokens
)
nodes
,
_
:=
Parse
(
tokens
)
require
.
NoError
(
t
,
err
)
require
.
Equal
(
t
,
StringifyNodes
(
test
.
nodes
),
StringifyNodes
(
nodes
))
require
.
Equal
(
t
,
test
.
nodes
,
nodes
)
}
}
func
StringifyNodes
(
nodes
[]
ast
.
Node
)
string
{
var
result
string
for
_
,
node
:=
range
nodes
{
if
node
!=
nil
{
result
+=
node
.
String
()
}
}
}
return
result
}
}
plugin/gomark/parser/strikethrough.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -36,14 +38,14 @@ func (*StrikethroughParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -36,14 +38,14 @@ func (*StrikethroughParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
cursor
+
2
,
true
return
cursor
+
2
,
true
}
}
func
(
p
*
StrikethroughParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
StrikethroughParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
contentTokens
:=
tokens
[
2
:
size
-
2
]
contentTokens
:=
tokens
[
2
:
size
-
2
]
return
&
ast
.
Strikethrough
{
return
&
ast
.
Strikethrough
{
Content
:
tokenizer
.
Stringify
(
contentTokens
),
Content
:
tokenizer
.
Stringify
(
contentTokens
),
}
}
,
nil
}
}
plugin/gomark/parser/strikethrough_test.go
View file @
43ef9eac
...
@@ -40,6 +40,7 @@ func TestStrikethroughParser(t *testing.T) {
...
@@ -40,6 +40,7 @@ func TestStrikethroughParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
strikethrough
,
NewStrikethroughParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewStrikethroughParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
strikethrough
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/tag.go
View file @
43ef9eac
package
parser
package
parser
import
(
import
(
"errors"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/ast"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
)
...
@@ -32,14 +34,14 @@ func (*TagParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -32,14 +34,14 @@ func (*TagParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
len
(
contentTokens
)
+
1
,
true
return
len
(
contentTokens
)
+
1
,
true
}
}
func
(
p
*
TagParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
p
*
TagParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
size
,
ok
:=
p
.
Match
(
tokens
)
size
,
ok
:=
p
.
Match
(
tokens
)
if
size
==
0
||
!
ok
{
if
size
==
0
||
!
ok
{
return
nil
return
nil
,
errors
.
New
(
"not matched"
)
}
}
contentTokens
:=
tokens
[
1
:
size
]
contentTokens
:=
tokens
[
1
:
size
]
return
&
ast
.
Tag
{
return
&
ast
.
Tag
{
Content
:
tokenizer
.
Stringify
(
contentTokens
),
Content
:
tokenizer
.
Stringify
(
contentTokens
),
}
}
,
nil
}
}
plugin/gomark/parser/tag_test.go
View file @
43ef9eac
...
@@ -38,6 +38,7 @@ func TestTagParser(t *testing.T) {
...
@@ -38,6 +38,7 @@ func TestTagParser(t *testing.T) {
for
_
,
test
:=
range
tests
{
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
require
.
Equal
(
t
,
test
.
tag
,
NewTagParser
()
.
Parse
(
tokens
))
node
,
_
:=
NewTagParser
()
.
Parse
(
tokens
)
require
.
Equal
(
t
,
StringifyNodes
([]
ast
.
Node
{
test
.
tag
}),
StringifyNodes
([]
ast
.
Node
{
node
}))
}
}
}
}
plugin/gomark/parser/text.go
View file @
43ef9eac
...
@@ -20,11 +20,11 @@ func (*TextParser) Match(tokens []*tokenizer.Token) (int, bool) {
...
@@ -20,11 +20,11 @@ func (*TextParser) Match(tokens []*tokenizer.Token) (int, bool) {
return
1
,
true
return
1
,
true
}
}
func
(
*
TextParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
ast
.
Node
{
func
(
*
TextParser
)
Parse
(
tokens
[]
*
tokenizer
.
Token
)
(
ast
.
Node
,
error
)
{
if
len
(
tokens
)
==
0
{
if
len
(
tokens
)
==
0
{
return
&
ast
.
Text
{}
return
&
ast
.
Text
{}
,
nil
}
}
return
&
ast
.
Text
{
return
&
ast
.
Text
{
Content
:
tokens
[
0
]
.
String
(),
Content
:
tokens
[
0
]
.
String
(),
}
}
,
nil
}
}
plugin/gomark/renderer/html/html.go
0 → 100644
View file @
43ef9eac
package
html
import
(
"bytes"
"fmt"
"github.com/usememos/memos/plugin/gomark/ast"
)
// HTMLRenderer is a simple renderer that converts AST to HTML.
// nolint
type
HTMLRenderer
struct
{
output
*
bytes
.
Buffer
context
*
renderContext
}
type
renderContext
struct
{
}
// NewHTMLRenderer creates a new HTMLRenderer.
func
NewHTMLRenderer
()
*
HTMLRenderer
{
return
&
HTMLRenderer
{
output
:
new
(
bytes
.
Buffer
),
context
:
&
renderContext
{},
}
}
// RenderNode renders a single AST node to HTML.
func
(
r
*
HTMLRenderer
)
RenderNode
(
node
ast
.
Node
)
{
prevSibling
,
nextSibling
:=
node
.
GetPrevSibling
(),
node
.
GetNextSibling
()
switch
n
:=
node
.
(
type
)
{
case
*
ast
.
LineBreak
:
r
.
output
.
WriteString
(
"<br>"
)
case
*
ast
.
Paragraph
:
r
.
output
.
WriteString
(
"<p>"
)
r
.
RenderNodes
(
n
.
Children
)
r
.
output
.
WriteString
(
"</p>"
)
case
*
ast
.
CodeBlock
:
r
.
output
.
WriteString
(
"<pre><code>"
)
r
.
output
.
WriteString
(
n
.
Content
)
r
.
output
.
WriteString
(
"</code></pre>"
)
case
*
ast
.
Heading
:
r
.
output
.
WriteString
(
fmt
.
Sprintf
(
"<h%d>"
,
n
.
Level
))
r
.
RenderNodes
(
n
.
Children
)
r
.
output
.
WriteString
(
fmt
.
Sprintf
(
"</h%d>"
,
n
.
Level
))
case
*
ast
.
HorizontalRule
:
r
.
output
.
WriteString
(
"<hr>"
)
case
*
ast
.
Blockquote
:
if
prevSibling
==
nil
||
prevSibling
.
Type
()
!=
ast
.
NodeTypeBlockquote
{
r
.
output
.
WriteString
(
"<blockquote>"
)
}
r
.
RenderNodes
(
n
.
Children
)
if
nextSibling
!=
nil
&&
nextSibling
.
Type
()
==
ast
.
NodeTypeBlockquote
{
r
.
RenderNode
(
nextSibling
)
}
if
prevSibling
==
nil
||
prevSibling
.
Type
()
!=
ast
.
NodeTypeBlockquote
{
r
.
output
.
WriteString
(
"</blockquote>"
)
}
case
*
ast
.
Text
:
r
.
output
.
WriteString
(
n
.
Content
)
default
:
// Handle other block types if needed.
}
}
// RenderNodes renders a slice of AST nodes to HTML.
func
(
r
*
HTMLRenderer
)
RenderNodes
(
nodes
[]
ast
.
Node
)
{
for
_
,
node
:=
range
nodes
{
prevSibling
:=
node
.
GetPrevSibling
()
if
prevSibling
!=
nil
{
if
prevSibling
.
Type
()
==
node
.
Type
()
{
if
node
.
Type
()
==
ast
.
NodeTypeBlockquote
{
continue
}
}
}
r
.
RenderNode
(
node
)
}
}
// Render renders the AST to HTML.
func
(
r
*
HTMLRenderer
)
Render
(
astRoot
[]
ast
.
Node
)
string
{
r
.
RenderNodes
(
astRoot
)
return
r
.
output
.
String
()
}
plugin/gomark/renderer/html/html_test.go
0 → 100644
View file @
43ef9eac
package
html
import
(
"testing"
"github.com/stretchr/testify/require"
"github.com/usememos/memos/plugin/gomark/parser"
"github.com/usememos/memos/plugin/gomark/parser/tokenizer"
)
func
TestHTMLRenderer
(
t
*
testing
.
T
)
{
tests
:=
[]
struct
{
text
string
expected
string
}{
{
text
:
"Hello world!"
,
expected
:
`<p>Hello world!</p>`
,
},
{
text
:
"> Hello
\n
> world!"
,
expected
:
`<blockquote>Hello<br>world!</blockquote>`
,
},
}
for
_
,
test
:=
range
tests
{
tokens
:=
tokenizer
.
Tokenize
(
test
.
text
)
nodes
,
err
:=
parser
.
Parse
(
tokens
)
require
.
NoError
(
t
,
err
)
actual
:=
NewHTMLRenderer
()
.
Render
(
nodes
)
if
actual
!=
test
.
expected
{
t
.
Errorf
(
"expected: %s, actual: %s"
,
test
.
expected
,
actual
)
}
}
}
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment