77 lines
2.6 KiB
Nim
77 lines
2.6 KiB
Nim
import std/[
|
|
collections/sequtils,
|
|
sugar,
|
|
]
|
|
import fp/[
|
|
resultM,
|
|
]
|
|
import ./org_types
|
|
import ./org_builder
|
|
import ../parser/parser
|
|
# import ./org_text_link
|
|
|
|
# -- Parsers
|
|
|
|
let boldParser* = anyBetweenPair(ch('*'))
|
|
let italicParser* = anyBetweenPair(ch('/'))
|
|
let underlineParser* = anyBetweenPair(ch('_'))
|
|
let verbatimParser* = anyBetweenPair(ch('='))
|
|
let codeParser* = anyBetweenPair(ch('~'))
|
|
let strikeThroughParser* = anyBetweenPair(ch('+'))
|
|
|
|
# -- Tokenizers
|
|
|
|
let rawTextTokenizer* = tokenizeRawText(orgRawText)
|
|
|
|
let boldTokenizer* = tokenizeInlineTokens(orgBoldText)
|
|
let italicTokenizer* = tokenizeInlineTokens(orgItalicText)
|
|
let underlineTokenizer* = tokenizeInlineTokens(orgUnderlineText)
|
|
let verbatimTokenizer* = tokenizeInlineTokens(orgVerbatimText)
|
|
let codeTokenizer* = tokenizeInlineTokens(orgCodeText)
|
|
let strikeThroughTokenizer* = tokenizeInlineTokens(orgStrikeThroughText)
|
|
|
|
let styledTextTokenizers = @[
|
|
(boldParser, boldTokenizer),
|
|
(italicParser, italicTokenizer),
|
|
(underlineParser, underlineTokenizer),
|
|
(verbatimParser, verbatimTokenizer),
|
|
(codeParser, codeTokenizer),
|
|
(strikeThroughParser, strikeThroughTokenizer),
|
|
# (linkParser, linkTokenizerSeq),
|
|
]
|
|
|
|
proc tryTokenizeRawText(tokens: seq[ParserToken]): seq[OrgInlineBuilderT] =
|
|
## Merge all parser `tokens` into a string to tokenize for the builder.
|
|
## Unless the string is empty, in this case return an empty list.
|
|
let str = tokens.foldl(a & b.toString(), "")
|
|
if str.len == 0: @[]
|
|
else: @[rawTextTokenizer(str)]
|
|
|
|
proc tryTokenizeInline*(content: string): OrgInlineBuilderResult =
|
|
initOrgInlineBuilder(content)
|
|
.flatMap((builder: OrgInlineBuilder) => tryTokenize(
|
|
builder = builder,
|
|
builderFns = styledTextTokenizers,
|
|
defaultTokenizerFn = tryTokenizeRawText,
|
|
))
|
|
|
|
# -- Tests
|
|
|
|
when isMainModule:
|
|
block testParsers:
|
|
proc testParser(str: string, parser: parserFnT): string =
|
|
initParserResult(str).flatMap(parser).tokensToString()
|
|
|
|
assert testParser("*bold*", boldParser) == "bold"
|
|
assert testParser("/italic/", italicParser) == "italic"
|
|
assert testParser("_underline_", underlineParser) == "underline"
|
|
assert testParser("=verbatim=", verbatimParser) == "verbatim"
|
|
assert testParser("~code~", codeParser) == "code"
|
|
assert testParser("+strikeThrough+", strikeThroughParser) == "strikeThrough"
|
|
|
|
block testTokenizers:
|
|
let test = tryTokenizeInline(
|
|
"Regular *bold* [[placeholder.com]] /italic/ _underline_ =verbatim= ~code~ +strikethrough+ [[https://placeholder.com][title]]"
|
|
)
|
|
echo test.unsafeGet()
|