Merges thirdparty modules into this repo.

This commit is contained in:
2021-04-21 00:00:00 +00:00
parent acb898ad9b
commit 7802ad1211
45 changed files with 2826 additions and 12 deletions

15
thirdparty/dast/test/all.sh vendored Normal file
View File

@@ -0,0 +1,15 @@
#!/bin/sh
if [ ! -d ".git" ]; then
echo "plz run at root of this repository"
exit 1
fi
function test() {
result=`$1 "$2"`
if [ "$result" != "$3" ]; then
echo "\`$1 \"$2\"\` != \`$3\`"
fi
}
test ./test/math.d "1+2-3*4/5" "1"

91
thirdparty/dast/test/math.d vendored Normal file
View File

@@ -0,0 +1,91 @@
#!/usr/bin/env dub
/+ dub.json:
{
"name": "math",
"dependencies": {
"dast": {"path": "../"}
}
} +/
import std;
import dast.parse,
dast.tokenize;
enum TokenType {
@TextAllMatcher!isDigit Number,
@TextCompleteMatcher!"+" Add,
@TextCompleteMatcher!"-" Sub,
@TextCompleteMatcher!"*" Mul,
@TextCompleteMatcher!"/" Div,
@TextCompleteMatcher!"(" OpenParen,
@TextCompleteMatcher!")" CloseParen,
End,
}
alias Token = dast.tokenize.Token!(TokenType, string);
struct Whole {
int result;
}
struct TermList {
int value;
}
struct Term {
int value;
}
class RuleSet {
public:
@ParseRule:
static Whole ParseWhole(TermList terms, @(TokenType.End) Token) {
return Whole(terms.value);
}
static TermList ParseTermListFromAddedNextTerm(
TermList lterms, @(TokenType.Add) Token, Term term) {
return TermList(lterms.value + term.value);
}
static TermList ParseTermListFromSubtractedNextTerm(
TermList lterms, @(TokenType.Sub) Token, Term term) {
return TermList(lterms.value - term.value);
}
static TermList ParseTermListFirstItem(Term term) {
return TermList(term.value);
}
static Term ParseTermFromFirstNumber(@(TokenType.Number) Token num) {
return Term(num.text.to!int);
}
static Term ParseTermFromTermList(
@(TokenType.OpenParen) Token, TermList terms, @(TokenType.CloseParen) Token) {
return Term(terms.value);
}
static Term ParseMultipledTerm(
Term lterm, @(TokenType.Mul) Token, @(TokenType.Number) Token num) {
return Term(lterm.value * num.text.to!int);
}
static Term ParseDividedTerm(
Term lterm, @(TokenType.Div) Token, @(TokenType.Number) Token num) {
return Term(lterm.value / num.text.to!int);
}
}
void main(string[] args) {
assert(args.length == 2);
// PrintItemSet!(TokenType, RuleSet, Whole);
try {
args[1].
Tokenize!TokenType.
chain([Token("", TokenType.End)]).
Parse!Whole(cast(RuleSet) null).
result.writeln;
} catch (ParseException!Token e) {
"%s at token '%s' [%s] at (%d, %d)".
writefln(e.msg, e.token.text, e.token.type, e.token.pos.stline, e.token.pos.stchar);
}
}