diff --git a/README.md b/README.md index 5a8d17b..78c7ec1 100644 --- a/README.md +++ b/README.md @@ -119,6 +119,19 @@ There are several kinds of combinators included in `better-parse`: * `foo asJust bar` can be used to map a parser to some constant value. +* `bind`, `useBind` + + The bind combinator works like the map combinator, but instead of running a parser and mapping over its + result, bind runs a parser, transforms the output, then runs the transformed output on any remaining input. + + ```kotlin + val abMany = Token("(a|b)*") + val revABMany = abMany bind { Token(it.text.reversed()) } + // Parser, parses some string of "a"s and "b"s and then the reverse, returning just the reverse + ``` + + * `someParser useBind { ... }` is a `bind` equivalent that takes a function with receiver instead. Example: `id useBind { Token(text) }`. + * `optional(...)` Given a `Parser`, tries to parse the sequence with it, but returns a `null` result if the parser failed, and thus never fails itself: diff --git a/src/main/kotlin/com/github/h0tk3y/betterParse/combinators/BindCombinator.kt b/src/main/kotlin/com/github/h0tk3y/betterParse/combinators/BindCombinator.kt new file mode 100644 index 0000000..6f66db6 --- /dev/null +++ b/src/main/kotlin/com/github/h0tk3y/betterParse/combinators/BindCombinator.kt @@ -0,0 +1,32 @@ +package com.github.h0tk3y.betterParse.combinators + +import com.github.h0tk3y.betterParse.lexer.TokenMatch +import com.github.h0tk3y.betterParse.parser.ErrorResult +import com.github.h0tk3y.betterParse.parser.ParseResult +import com.github.h0tk3y.betterParse.parser.Parsed +import com.github.h0tk3y.betterParse.parser.Parser + +/** Parses the sequence with [innerParser], and if that succeeds, maps its [Parsed] result with [transform]. + * Then run this mapped result on any remaining input. + * Returns the [ErrorResult] of the `innerParser` otherwise. + * @sample BindTest*/ +class BindCombinator( + val innerParser: Parser, + val transform: (T) -> Parser +) : Parser { + override fun tryParse(tokens: Sequence): ParseResult { + val innerResult = innerParser.tryParse(tokens) + return when (innerResult) { + is ErrorResult -> innerResult + is Parsed -> { transform(innerResult.value).tryParse(innerResult.remainder) } + } + } +} + +/** Applies the [transform] function to the successful results of the receiver parser and then runs + * the new parser on any remaining input. See [BindCombinator]. */ +infix fun Parser.bind(transform: (A) -> Parser): Parser = BindCombinator(this, transform) + +/** Applies the [transform] receiver to the successful results of the receiver parser and then runs + * the new parser on any remaining input. See [BindCombinator]. */ +infix fun Parser.useBind(transform: A.() -> Parser): Parser = BindCombinator(this, transform) \ No newline at end of file diff --git a/src/main/kotlin/com/github/h0tk3y/betterParse/combinators/PureCombinator.kt b/src/main/kotlin/com/github/h0tk3y/betterParse/combinators/PureCombinator.kt new file mode 100644 index 0000000..f22d9cb --- /dev/null +++ b/src/main/kotlin/com/github/h0tk3y/betterParse/combinators/PureCombinator.kt @@ -0,0 +1,14 @@ +package com.github.h0tk3y.betterParse.combinators + +import com.github.h0tk3y.betterParse.lexer.TokenMatch +import com.github.h0tk3y.betterParse.parser.ParseResult +import com.github.h0tk3y.betterParse.parser.Parsed +import com.github.h0tk3y.betterParse.parser.Parser + +/** Returns [Parsed] of [value] without consuming any input */ +class PureCombinator(val value: T) : Parser { + override fun tryParse(tokens: Sequence): ParseResult = Parsed(value, tokens) +} + +/** Returns [Parsed] of [value] without consuming any input */ +fun pure(value: T) : Parser = PureCombinator(value) \ No newline at end of file diff --git a/src/test/kotlin/BindTest.kt b/src/test/kotlin/BindTest.kt new file mode 100644 index 0000000..a92b59e --- /dev/null +++ b/src/test/kotlin/BindTest.kt @@ -0,0 +1,52 @@ +import com.github.h0tk3y.betterParse.combinators.* +import com.github.h0tk3y.betterParse.lexer.DefaultTokenizer +import com.github.h0tk3y.betterParse.lexer.Token +import com.github.h0tk3y.betterParse.parser.toParsedOrThrow +import org.junit.Assert.assertEquals +import org.junit.Test + +class BindTest { + val a_or_b = Token("a_or_b", "[ab]") + val b_and_a = Token("b_and_a", "ba") + val c = Token("c", "c") + val lexer = DefaultTokenizer(listOf(b_and_a, a_or_b, c)) + + @Test fun testSuccessfulBind() { + val tokens = lexer.tokenize("aba") + val result = a_or_b.bind { + when(it.text) { + "a" -> b_and_a + "b" -> a_or_b + else -> c + } + }.tryParse(tokens) + assertEquals("ba", result.toParsedOrThrow().value.text) + } + + @Test fun testSuccessfulBindUse() { + val tokens = lexer.tokenize("baccba") + val result = (b_and_a useBind { + when(text) { + "ba" -> c + "a", "b" -> b_and_a + else -> a_or_b + } + }).tryParse(tokens) + assertEquals("c", result.toParsedOrThrow().value.text) + } + + @Test fun testBindPure() { + val tokens = lexer.tokenize("ba") + val result = b_and_a.tryParse(tokens) + val resultBindPure = b_and_a.bind { pure(it) }.tryParse(tokens) + assertEquals("ba", result.toParsedOrThrow().value.text) + assertEquals("ba", resultBindPure.toParsedOrThrow().value.text) + } + + @Throws fun testError() { + val tokens = lexer.tokenize("bbbb") + val resultNonBinded = b_and_a.tryParse(tokens) + val resultBinded = b_and_a.bind { pure(it.text) }.tryParse(tokens) + assertEquals(resultNonBinded, resultBinded) + } +} \ No newline at end of file diff --git a/src/test/kotlin/PureTest.kt b/src/test/kotlin/PureTest.kt new file mode 100644 index 0000000..a8f779a --- /dev/null +++ b/src/test/kotlin/PureTest.kt @@ -0,0 +1,26 @@ +import com.github.h0tk3y.betterParse.combinators.pure +import com.github.h0tk3y.betterParse.lexer.DefaultTokenizer +import com.github.h0tk3y.betterParse.lexer.Token +import com.github.h0tk3y.betterParse.parser.UnparsedRemainder +import com.github.h0tk3y.betterParse.parser.toParsedOrThrow +import com.github.h0tk3y.betterParse.parser.tryParseToEnd +import org.junit.Assert.assertEquals +import org.junit.Assert.assertTrue +import org.junit.Test + +class PureTest { + val a = Token("a", "a") + val lexer = DefaultTokenizer(listOf(a)) + + @Test fun testSuccessfulPure() { + val tokens = lexer.tokenize("a") + val result = pure(42).tryParse(tokens) + assertEquals(42, result.toParsedOrThrow().value) + } + + @Test fun testNotConsumesInputPure() { + val tokens = lexer.tokenize("a") + val result = pure(42).tryParseToEnd(tokens) + assertTrue(result is UnparsedRemainder) + } +} \ No newline at end of file