Clean up structure
This commit is contained in:
@@ -1,17 +1,19 @@
|
||||
import std/sugar
|
||||
import std/collections/sequtils
|
||||
import ./org_types
|
||||
import ../parser/parser_types
|
||||
|
||||
type OrgBuilderT* = seq[OrgElement]
|
||||
type StringBuilder* = Builder[OrgBuilderT]
|
||||
type OrgBuilderT* = OrgElement
|
||||
type OrgBuilder* = Builder[OrgBuilderT]
|
||||
type OrgBuilderResult* = BuilderResult[OrgBuilderT]
|
||||
|
||||
proc concat*(typeInfo: OrgBuilderT): (seq[ParserToken], seq[OrgBuilderT]) -> seq[OrgBuilderT] =
|
||||
return proc(xs: seq[ParserToken], ys: seq[OrgBuilderT]): seq[OrgBuilderT] =
|
||||
return ys & xs.foldl(a & b.tokenStringValue() & seperator, typeInfo)
|
||||
func makeOrgBuilderToken*(kind: orgElementKind): string -> OrgBuilderT =
|
||||
return func(content: string): OrgBuilderT =
|
||||
return OrgBuilderT(
|
||||
kind: orgRawText,
|
||||
content: content,
|
||||
)
|
||||
|
||||
proc initStringBuilder*(str: string): StringBuilderResult =
|
||||
StringBuilderResult
|
||||
.ok(StringBuilder((
|
||||
parser: initParser(str),
|
||||
tree: newSeq[OrgBuilderT](),
|
||||
)))
|
||||
proc mergeOrgToken*(orgTokenFn: string -> OrgBuilderT): (seq[ParserToken], seq[OrgBuilderT]) -> seq[OrgBuilderT] =
|
||||
return proc(parserTokens: seq[ParserToken], builderTokens: seq[OrgBuilderT]): seq[OrgBuilderT] =
|
||||
return builderTokens & parserTokens.foldl(a & b.tokenStringValue(), "").orgTokenFn()
|
||||
|
||||
@@ -5,12 +5,12 @@ import std/strutils
|
||||
import results
|
||||
import fusion/matching
|
||||
import ./org_types
|
||||
import ./org_builder
|
||||
import ../utils/fp
|
||||
import ../parser/parser_internals
|
||||
import ../parser/parser_types
|
||||
import ../parser/builder_api
|
||||
|
||||
|
||||
let boldParser* = parseBetweenPair(ch('*'))
|
||||
let italicParser* = parseBetweenPair(ch('/'))
|
||||
let underlineParser* = parseBetweenPair(ch('_'))
|
||||
@@ -18,146 +18,37 @@ let verbatimParser* = parseBetweenPair(ch('='))
|
||||
let codeParser* = parseBetweenPair(ch('~'))
|
||||
let strikeThroughParser* = parseBetweenPair(ch('+'))
|
||||
|
||||
type OrgBuilderT* = OrgElement
|
||||
type OrgBuilder* = Builder[OrgBuilderT]
|
||||
type OrgBuilderResult* = BuilderResult[OrgBuilderT]
|
||||
let rawTokenizer* = makeOrgBuilderToken(orgRawText)
|
||||
let boldTokenizer* = makeOrgBuilderToken(orgBoldText)
|
||||
let italicTokenizer* = makeOrgBuilderToken(orgItalicText)
|
||||
let underlineTokenizer* = makeOrgBuilderToken(orgUnderlineText)
|
||||
let verbatimTokenizer* = makeOrgBuilderToken(orgVerbatimText)
|
||||
let codeTokenizer* = makeOrgBuilderToken(orgCodeText)
|
||||
let strikeThroughTokenizer* = makeOrgBuilderToken(orgStrikeThroughText)
|
||||
|
||||
proc makeRawToken*(content: string): OrgBuilderT =
|
||||
OrgBuilderT(
|
||||
kind: orgRawText,
|
||||
content: content,
|
||||
)
|
||||
|
||||
proc makeBoldToken*(content: string): OrgBuilderT =
|
||||
OrgBuilderT(
|
||||
kind: orgBoldText,
|
||||
content: content,
|
||||
)
|
||||
|
||||
proc makeItalicToken*(content: string): OrgBuilderT =
|
||||
OrgBuilderT(
|
||||
kind: orgItalicText,
|
||||
content: content,
|
||||
)
|
||||
|
||||
proc makeUnderlineToken*(content: string): OrgBuilderT =
|
||||
OrgBuilderT(
|
||||
kind: orgUnderlineText,
|
||||
content: content,
|
||||
)
|
||||
|
||||
proc makeVerbatimToken*(content: string): OrgBuilderT =
|
||||
OrgBuilderT(
|
||||
kind: orgVerbatimText,
|
||||
content: content,
|
||||
)
|
||||
|
||||
proc makeCodeToken*(content: string): OrgBuilderT =
|
||||
OrgBuilderT(
|
||||
kind: orgCodeText,
|
||||
content: content,
|
||||
)
|
||||
|
||||
proc makeStrikeThroughToken*(content: string): OrgBuilderT =
|
||||
OrgBuilderT(
|
||||
kind: orgStrikeThroughText,
|
||||
content: content,
|
||||
)
|
||||
|
||||
proc makeOrgToken*(orgTokenFn: string -> OrgBuilderT): (seq[ParserToken], seq[OrgBuilderT]) -> seq[OrgBuilderT] =
|
||||
return proc(parserTokens: seq[ParserToken], builderTokens: seq[OrgBuilderT]): seq[OrgBuilderT] =
|
||||
return builderTokens & parserTokens.foldl(a & b.tokenStringValue(), "").orgTokenFn()
|
||||
|
||||
proc tryParseBuild[T](
|
||||
builder: Builder[T],
|
||||
builderFns: seq[tuple[
|
||||
parserFn: Parser -> ParserResult,
|
||||
concatFn: (seq[ParserToken], seq[T]) -> seq[T],
|
||||
]],
|
||||
defaultBuilderFn: (seq[ParserToken]) -> seq[T],
|
||||
stopAtParserFn = newline,
|
||||
): BuilderResult[T] =
|
||||
## Parse remaining text in `builder` by going checking in the `builderFns` list for a sucessful `parserFn`.
|
||||
## The `ok` `parserFn` result will be merged into the `Builder[T].tree` by using the `concatFn`.
|
||||
## Otherwise continue taking any character until the `stopAtParserFn` condition is found.
|
||||
## Any non-matching tokens will be converted using the `defaultBuilderFn`.
|
||||
let (parser, tree) = builder
|
||||
|
||||
# Mutating accumulators
|
||||
var parserAcc: ParserResult = ParserResult.ok(parser)
|
||||
var builderAcc: Builder[T] = builder
|
||||
|
||||
while parserAcc.isOk() and parserAcc.flatMap(stopAtParserFn).isErr():
|
||||
# Empty the parser tokens as we want to seperate them for the next parser in the sequence
|
||||
let emptyParser = parserAcc.map(emptyTokens)
|
||||
|
||||
# Find the first matching parser and convert its tokens
|
||||
var found = false
|
||||
for builderFn in builderFns:
|
||||
let (parserFn, concatFn) = builderFn
|
||||
|
||||
let parseResult = emptyParser.flatMap(parserFn)
|
||||
if parseResult.isOk():
|
||||
let okParser = parseResult.unsafeGet()
|
||||
|
||||
# Convert all previous unmatched tokens via the `defaultBuilderTokens`
|
||||
let defaultBuilderTokens = parserAcc
|
||||
.foldTokens(
|
||||
onError = _ => newSeq[T](),
|
||||
onSuccess = defaultBuilderFn,
|
||||
)
|
||||
|
||||
found = true
|
||||
parserAcc = parseResult.map(emptyTokens)
|
||||
builderAcc = builder.initBuilder(
|
||||
okParser,
|
||||
concatFn(
|
||||
okParser.tokens,
|
||||
builderAcc[1] & defaultBuilderTokens,
|
||||
),
|
||||
)
|
||||
break
|
||||
|
||||
if not found:
|
||||
parserAcc = parserAcc.flatMap(anyCh)
|
||||
|
||||
let defaultBuilderTokens = parserAcc
|
||||
.foldTokens(
|
||||
onError = _ => newSeq[T](),
|
||||
onSuccess = defaultBuilderFn,
|
||||
)
|
||||
|
||||
BuilderResult[T].ok(builder.initBuilder(
|
||||
builderAcc[0],
|
||||
builderAcc[1] & defaultBuilderTokens,
|
||||
))
|
||||
|
||||
proc makeRawTokenOrEmpty(xs: seq[ParserToken]): seq[OrgBuilderT] =
|
||||
let str = xs.foldl(a & b.tokenStringValue(), "")
|
||||
proc makeRawTokenOrEmpty(tokens: seq[ParserToken]): seq[OrgBuilderT] =
|
||||
## Merge all parser `tokens` into a string to tokenize for the builder.
|
||||
## Unless the string is empty, in this case return an empty list.
|
||||
let str = tokens.foldl(a & b.tokenStringValue(), "")
|
||||
if str.len == 0: @[]
|
||||
else: @[makeRawToken(str)]
|
||||
else: @[rawTokenizer(str)]
|
||||
|
||||
when isMainModule:
|
||||
|
||||
let foo = OrgBuilderResult.ok(OrgBuilder((
|
||||
let test = OrgBuilderResult.ok(OrgBuilder((
|
||||
parser: initParser("Regular *bold* /italic/ _underline_ =verbatim= ~code~ +strikethrough+"),
|
||||
tree: newSeq[OrgBuilderT](),
|
||||
)))
|
||||
.flatMap((builder: OrgBuilder) => tryParseBuild(
|
||||
builder = builder,
|
||||
builderFns = @[
|
||||
(boldParser, makeOrgToken(makeBoldToken)),
|
||||
(italicParser, makeOrgToken(makeItalicToken)),
|
||||
(underlineParser, makeOrgToken(makeUnderlineToken)),
|
||||
(verbatimParser, makeOrgToken(makeVerbatimToken)),
|
||||
(codeParser, makeOrgToken(makeCodeToken)),
|
||||
(strikeThroughParser, makeOrgToken(makeStrikeThroughToken)),
|
||||
(boldParser, mergeOrgToken(boldTokenizer)),
|
||||
(italicParser, mergeOrgToken(italicTokenizer)),
|
||||
(underlineParser, mergeOrgToken(underlineTokenizer)),
|
||||
(verbatimParser, mergeOrgToken(verbatimTokenizer)),
|
||||
(codeParser, mergeOrgToken(codeTokenizer)),
|
||||
(strikeThroughParser, mergeOrgToken(strikeThroughTokenizer)),
|
||||
],
|
||||
defaultBuilderFn = makeRawTokenOrEmpty,
|
||||
))
|
||||
# .foldBuilder(
|
||||
# err => "",
|
||||
# xs => $xs
|
||||
# )
|
||||
|
||||
echo foo
|
||||
echo test
|
||||
|
||||
Reference in New Issue
Block a user