summaryrefslogtreecommitdiff
path: root/test-suite/lalr
diff options
context:
space:
mode:
authorLudovic Courtès <ludo@gnu.org>2010-03-31 00:41:28 +0200
committerLudovic Courtès <ludo@gnu.org>2010-03-31 00:41:59 +0200
commit1b10152215db2ed381bd63c8c234eb44eb7ed414 (patch)
tree6e8fc7d5c965ba8bca3a62a34e382feb25ba69ae /test-suite/lalr
parent3ffd1ba96e986581d97079308fc15ef1fc933cdb (diff)
Add Boucher's `lalr-scm' as the `(system base lalr)' module.
Taken from r51 of <http://lalr-scm.googlecode.com/svn/trunk>. * module/Makefile.am (SYSTEM_BASE_SOURCES): Add `system/base/lalr.scm'. (NOCOMP_SOURCES): Add `system/base/lalr.upstream.scm'. * module/system/base/lalr.scm, module/system/base/lalr.upstream.scm: New files. * test-suite/Makefile.am (LALR_TESTS, LALR_EXTRA, TESTS, TESTS_ENVIRONMENT): New variables. (EXTRA_DIST): Add $(LALR_EXTRA). * test-suite/lalr/common-test.scm, test-suite/lalr/glr-test.scm, test-suite/lalr/test-glr-associativity.scm, test-suite/lalr/test-glr-basics-01.scm, test-suite/lalr/test-glr-basics-02.scm, test-suite/lalr/test-glr-basics-03.scm, test-suite/lalr/test-glr-basics-04.scm, test-suite/lalr/test-glr-basics-05.scm, test-suite/lalr/test-glr-script-expression.scm, test-suite/lalr/test-glr-single-expressions.scm, test-suite/lalr/test-lr-associativity-01.scm, test-suite/lalr/test-lr-associativity-02.scm, test-suite/lalr/test-lr-associativity-03.scm, test-suite/lalr/test-lr-associativity-04.scm, test-suite/lalr/test-lr-basics-01.scm, test-suite/lalr/test-lr-basics-02.scm, test-suite/lalr/test-lr-basics-03.scm, test-suite/lalr/test-lr-basics-04.scm, test-suite/lalr/test-lr-basics-05.scm, test-suite/lalr/test-lr-error-recovery-01.scm, test-suite/lalr/test-lr-error-recovery-02.scm, test-suite/lalr/test-lr-no-clause.scm, test-suite/lalr/test-lr-script-expression.scm, test-suite/lalr/test-lr-single-expressions.scm: New files.
Diffstat (limited to 'test-suite/lalr')
-rw-r--r--test-suite/lalr/common-test.scm63
-rw-r--r--test-suite/lalr/glr-test.scm88
-rw-r--r--test-suite/lalr/run-guile-test.sh30
-rw-r--r--test-suite/lalr/test-glr-associativity.scm102
-rw-r--r--test-suite/lalr/test-glr-basics-01.scm35
-rw-r--r--test-suite/lalr/test-glr-basics-02.scm30
-rw-r--r--test-suite/lalr/test-glr-basics-03.scm37
-rw-r--r--test-suite/lalr/test-glr-basics-04.scm43
-rw-r--r--test-suite/lalr/test-glr-basics-05.scm40
-rw-r--r--test-suite/lalr/test-glr-script-expression.scm125
-rw-r--r--test-suite/lalr/test-glr-single-expressions.scm60
-rw-r--r--test-suite/lalr/test-lr-associativity-01.scm91
-rw-r--r--test-suite/lalr/test-lr-associativity-02.scm91
-rw-r--r--test-suite/lalr/test-lr-associativity-03.scm85
-rw-r--r--test-suite/lalr/test-lr-associativity-04.scm83
-rw-r--r--test-suite/lalr/test-lr-basics-01.scm38
-rw-r--r--test-suite/lalr/test-lr-basics-02.scm33
-rw-r--r--test-suite/lalr/test-lr-basics-03.scm36
-rw-r--r--test-suite/lalr/test-lr-basics-04.scm31
-rw-r--r--test-suite/lalr/test-lr-basics-05.scm36
-rw-r--r--test-suite/lalr/test-lr-error-recovery-01.scm145
-rw-r--r--test-suite/lalr/test-lr-error-recovery-02.scm51
-rw-r--r--test-suite/lalr/test-lr-no-clause.scm40
-rw-r--r--test-suite/lalr/test-lr-script-expression.scm119
-rw-r--r--test-suite/lalr/test-lr-single-expressions.scm59
25 files changed, 1591 insertions, 0 deletions
diff --git a/test-suite/lalr/common-test.scm b/test-suite/lalr/common-test.scm
new file mode 100644
index 000000000..8563029ce
--- /dev/null
+++ b/test-suite/lalr/common-test.scm
@@ -0,0 +1,63 @@
+;;; common-test.scm --
+;;;
+
+;; Slightly modified for Guile by Ludovic Courtès <ludo@gnu.org>, 2010.
+
+(use-modules (system base lalr)
+ (ice-9 pretty-print))
+
+(define *error* '())
+
+(define-syntax when
+ (syntax-rules ()
+ ((_ ?expr ?body ...)
+ (if ?expr
+ (let () ?body ...)
+ #f))))
+
+(define-syntax check
+ (syntax-rules (=>)
+ ((_ ?expr => ?expected-result)
+ (check ?expr (=> equal?) ?expected-result))
+
+ ((_ ?expr (=> ?equal) ?expected-result)
+ (let ((result ?expr)
+ (expected ?expected-result))
+ (set! *error* '())
+ (when (not (?equal result expected))
+ (display "Failed test: \n")
+ (pretty-print (quote ?expr))(newline)
+ (display "\tresult was: ")
+ (pretty-print result)(newline)
+ (display "\texpected: ")
+ (pretty-print expected)(newline)
+ (exit 1))))))
+
+;;; --------------------------------------------------------------------
+
+(define (display-result v)
+ (if v
+ (begin
+ (display "==> ")
+ (display v)
+ (newline))))
+
+(define eoi-token
+ (make-lexical-token '*eoi* #f #f))
+
+(define (make-lexer tokens)
+ (lambda ()
+ (if (null? tokens)
+ eoi-token
+ (let ((t (car tokens)))
+ (set! tokens (cdr tokens))
+ t))))
+
+(define (error-handler message . args)
+ (set! *error* (cons `(error-handler ,message . ,(if (pair? args)
+ (lexical-token-category (car args))
+ '()))
+ *error*))
+ (cons message args))
+
+;;; end of file
diff --git a/test-suite/lalr/glr-test.scm b/test-suite/lalr/glr-test.scm
new file mode 100644
index 000000000..18b8e865f
--- /dev/null
+++ b/test-suite/lalr/glr-test.scm
@@ -0,0 +1,88 @@
+":";exec snow -- "$0" "$@"
+;;;
+;;;; Tests for the GLR parser generator
+;;;
+;;
+;; @created "Fri Aug 19 11:23:48 EDT 2005"
+;;
+
+(package* glr-test/v1.0.0
+ (require: lalr/v2.4.0))
+
+
+(define (syntax-error msg . args)
+ (display msg (current-error-port))
+ (for-each (cut format (current-error-port) " ~A" <>) args)
+ (newline (current-error-port))
+ (throw 'misc-error))
+
+
+(define (make-lexer words)
+ (let ((phrase words))
+ (lambda ()
+ (if (null? phrase)
+ '*eoi*
+ (let ((word (car phrase)))
+ (set! phrase (cdr phrase))
+ word)))))
+
+
+;;;
+;;;; Test 1
+;;;
+
+
+(define parser-1
+ ;; Grammar taken from Tomita's "An Efficient Augmented-Context-Free Parsing Algorithm"
+ (lalr-parser
+ (driver: glr)
+ (expect: 2)
+ (*n *v *d *p)
+ (<s> (<np> <vp>)
+ (<s> <pp>))
+ (<np> (*n)
+ (*d *n)
+ (<np> <pp>))
+ (<pp> (*p <np>))
+ (<vp> (*v <np>))))
+
+
+(define *phrase-1* '(*n *v *d *n *p *d *n *p *d *n *p *d *n))
+
+(define (test-1)
+ (parser-1 (make-lexer *phrase-1*) syntax-error))
+
+
+;;;
+;;;; Test 2
+;;;
+
+
+(define parser-2
+ ;; The dangling-else problem
+ (lalr-parser
+ (driver: glr)
+ (expect: 1)
+ ((nonassoc: if then else e s))
+ (<s> (s)
+ (if e then <s>)
+ (if e then <s> else <s>))))
+
+
+(define *phrase-2* '(if e then if e then s else s))
+
+(define (test-2)
+ (parser-2 (make-lexer *phrase-2*) syntax-error))
+
+
+
+
+(define (assert-length l n test-name)
+ (display "Test '")
+ (display test-name)
+ (display (if (not (= (length l) n)) "' failed!" "' passed!"))
+ (newline))
+
+(assert-length (test-1) 14 1)
+(assert-length (test-2) 2 2)
+
diff --git a/test-suite/lalr/run-guile-test.sh b/test-suite/lalr/run-guile-test.sh
new file mode 100644
index 000000000..ab29b83dd
--- /dev/null
+++ b/test-suite/lalr/run-guile-test.sh
@@ -0,0 +1,30 @@
+# guile-test.sh --
+#
+
+for item in \
+ test-glr-basics-01.scm \
+ test-glr-basics-02.scm \
+ test-glr-basics-03.scm \
+ test-glr-basics-04.scm \
+ test-glr-basics-05.scm \
+ test-glr-associativity.scm \
+ test-glr-script-expression.scm \
+ test-glr-single-expressions.scm \
+ \
+ test-lr-basics-01.scm \
+ test-lr-basics-02.scm \
+ test-lr-basics-03.scm \
+ test-lr-basics-04.scm \
+ test-lr-basics-05.scm \
+ test-lr-error-recovery-01.scm \
+ test-lr-error-recovery-02.scm \
+ test-lr-no-clause.scm \
+ test-lr-associativity-01.scm \
+ test-lr-script-expression.scm \
+ test-lr-single-expressions.scm
+ do
+printf "\n\n*** Running $item\n"
+guile $item
+done
+
+### end of file
diff --git a/test-suite/lalr/test-glr-associativity.scm b/test-suite/lalr/test-glr-associativity.scm
new file mode 100644
index 000000000..6a5a5e25b
--- /dev/null
+++ b/test-suite/lalr/test-glr-associativity.scm
@@ -0,0 +1,102 @@
+;;; test-glr-associativity.scm
+;;
+;;With the GLR parser both the terminal precedence and the non-terminal
+;;associativity are not respected; rather they generate two child
+;;processes.
+;;
+
+(load "common-test.scm")
+
+(define parser
+ (lalr-parser
+ (driver: glr)
+ (expect: 0)
+
+ (N LPAREN RPAREN
+ (left: + -)
+ (right: * /)
+ (nonassoc: uminus))
+
+ (output (expr) : $1)
+ (expr (expr + expr) : (list $1 '+ $3)
+ (expr - expr) : (list $1 '- $3)
+ (expr * expr) : (list $1 '* $3)
+ (expr / expr) : (list $1 '/ $3)
+ (- expr (prec: uminus)) : (list '- $2)
+ (N) : $1
+ (LPAREN expr RPAREN) : $2)))
+
+(define (doit . tokens)
+ (parser (make-lexer tokens) error-handler))
+
+;;; --------------------------------------------------------------------
+
+;;Remember that the result of the GLR driver is a list of parses, not a
+;;single parse.
+
+(check
+ (doit (make-lexical-token 'N #f 1))
+ => '(1))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token '+ #f '+)
+ (make-lexical-token 'N #f 2))
+ => '((1 + 2)))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token '* #f '*)
+ (make-lexical-token 'N #f 2))
+ => '((1 * 2)))
+
+(check
+ (doit (make-lexical-token '- #f '-)
+ (make-lexical-token 'N #f 1))
+ => '((- 1)))
+
+(check
+ (doit (make-lexical-token '- #f '-)
+ (make-lexical-token '- #f '-)
+ (make-lexical-token 'N #f 1))
+ => '((- (- 1))))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token '+ #f '+)
+ (make-lexical-token '- #f '-)
+ (make-lexical-token 'N #f 2))
+ => '((1 + (- 2))))
+
+;;; --------------------------------------------------------------------
+
+(check
+ ;;left-associativity
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token '+ #f '+)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token '+ #f '+)
+ (make-lexical-token 'N #f 3))
+ => '(((1 + 2) + 3)))
+
+(check
+ ;;right-associativity
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token '* #f '*)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token '* #f '*)
+ (make-lexical-token 'N #f 3))
+ => '(((1 * 2) * 3)
+ (1 * (2 * 3))))
+
+(check
+ ;;precedence
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token '+ #f '+)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token '* #f '*)
+ (make-lexical-token 'N #f 3))
+ => '(((1 + 2) * 3)
+ (1 + (2 * 3))))
+
+;;; end of file
diff --git a/test-suite/lalr/test-glr-basics-01.scm b/test-suite/lalr/test-glr-basics-01.scm
new file mode 100644
index 000000000..8cac63c71
--- /dev/null
+++ b/test-suite/lalr/test-glr-basics-01.scm
@@ -0,0 +1,35 @@
+;;; test-lr-basics-01.scm --
+;;
+;;A grammar that only accept a single terminal as input. It refuses the
+;;end-of-input as first token.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let* ((lexer (make-lexer tokens))
+ (parser (lalr-parser (expect: 0)
+ (driver: glr)
+ (A)
+ (e (A) : $1))))
+ (parser lexer error-handler)))
+
+(check
+ (doit (make-lexical-token 'A #f 1))
+ => '(1))
+
+(check
+ (doit)
+ => '())
+
+(check
+ ;;Parse correctly the first A and reduce it. The second A triggers
+ ;;an error which empties the stack and consumes all the input
+ ;;tokens. Finally, an unexpected end-of-input error is returned
+ ;;because EOI is invalid as first token after the start.
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2)
+ (make-lexical-token 'A #f 3))
+ => '())
+
+;;; end of file
diff --git a/test-suite/lalr/test-glr-basics-02.scm b/test-suite/lalr/test-glr-basics-02.scm
new file mode 100644
index 000000000..a4e24ad9d
--- /dev/null
+++ b/test-suite/lalr/test-glr-basics-02.scm
@@ -0,0 +1,30 @@
+;;; test-lr-basics-02.scm --
+;;
+;;A grammar that only accept a single terminal or the EOI.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (driver: glr)
+ (A)
+ (e (A) : $1
+ () : 0))))
+ (parser (make-lexer tokens) error-handler)))
+
+(check
+ (doit)
+ => '(0))
+
+(check
+ (doit (make-lexical-token 'A #f 1))
+ => '(1))
+
+(check
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2)
+ (make-lexical-token 'A #f 3))
+ => '())
+
+;;; end of file
diff --git a/test-suite/lalr/test-glr-basics-03.scm b/test-suite/lalr/test-glr-basics-03.scm
new file mode 100644
index 000000000..ec80ed514
--- /dev/null
+++ b/test-suite/lalr/test-glr-basics-03.scm
@@ -0,0 +1,37 @@
+;;; test-lr-basics-03.scm --
+;;
+;;A grammar that accepts fixed sequences of a single terminal or the
+;;EOI.
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (driver: glr)
+ (A)
+ (e (A) : (list $1)
+ (A A) : (list $1 $2)
+ (A A A) : (list $1 $2 $3)
+ () : 0))))
+ (parser (make-lexer tokens) error-handler)))
+
+(check
+ (doit (make-lexical-token 'A #f 1))
+ => '((1)))
+
+(check
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2))
+ => '((1 2)))
+
+(check
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2)
+ (make-lexical-token 'A #f 3))
+ => '((1 2 3)))
+
+(check
+ (doit)
+ => '(0))
+
+;;; end of file
diff --git a/test-suite/lalr/test-glr-basics-04.scm b/test-suite/lalr/test-glr-basics-04.scm
new file mode 100644
index 000000000..00d287110
--- /dev/null
+++ b/test-suite/lalr/test-glr-basics-04.scm
@@ -0,0 +1,43 @@
+;;; test-lr-basics-04.scm --
+;;
+;;A grammar accepting a sequence of equal tokens of arbitrary length.
+;;The return value is the value of the last parsed token.
+
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (driver: glr)
+ (A)
+ (e (e A) : $2
+ (A) : $1
+ () : 0))))
+ (parser (make-lexer tokens) error-handler)))
+
+(check
+ (doit)
+ => '(0))
+
+(check
+ ;;Two results because there is a shift/reduce conflict, so two
+ ;;processes are generated.
+ (doit (make-lexical-token 'A #f 1))
+ => '(1 1))
+
+(check
+ ;;Two results because there is a shift/reduce conflict, so two
+ ;;processes are generated. Notice that the rules:
+ ;;
+ ;; (e A) (A)
+ ;;
+ ;;generate only one conflict when the second "A" comes. The third
+ ;;"A" comes when the state is inside the rule "(e A)", so there is
+ ;;no conflict.
+ ;;
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2)
+ (make-lexical-token 'A #f 3))
+ => '(3 3))
+
+;;; end of file
diff --git a/test-suite/lalr/test-glr-basics-05.scm b/test-suite/lalr/test-glr-basics-05.scm
new file mode 100644
index 000000000..ca48fd79f
--- /dev/null
+++ b/test-suite/lalr/test-glr-basics-05.scm
@@ -0,0 +1,40 @@
+;;; test-lr-basics-05.scm --
+;;
+;;A grammar accepting a sequence of equal tokens of arbitrary length.
+;;The return value is the list of values.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (driver: glr)
+ (A)
+ (e (e A) : (cons $2 $1)
+ (A) : (list $1)
+ () : (list 0)))))
+ (parser (make-lexer tokens) error-handler)))
+
+(check
+ (doit)
+ => '((0)))
+
+(check
+ (doit (make-lexical-token 'A #f 1))
+ => '((1 0)
+ (1)))
+
+(check
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2))
+ => '((2 1 0)
+ (2 1)))
+
+(check
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2)
+ (make-lexical-token 'A #f 3))
+ => '((3 2 1 0)
+ (3 2 1)))
+
+;;; end of file
diff --git a/test-suite/lalr/test-glr-script-expression.scm b/test-suite/lalr/test-glr-script-expression.scm
new file mode 100644
index 000000000..5d6d4265c
--- /dev/null
+++ b/test-suite/lalr/test-glr-script-expression.scm
@@ -0,0 +1,125 @@
+;;; test-lr-script-expression.scm --
+;;
+;;Parse scripts, each line an expression.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (driver: glr)
+ (N O C T (left: A) (left: M) (nonassoc: U))
+
+ (script (lines) : (reverse $1))
+
+ (lines (lines line) : (cons $2 $1)
+ (line) : (list $1))
+
+ (line (T) : #\newline
+ (E T) : $1
+ (error T) : (list 'error-clause $2))
+
+ (E (N) : $1
+ (E A E) : ($2 $1 $3)
+ (E M E) : ($2 $1 $3)
+ (A E (prec: U)) : ($1 $2)
+ (O E C) : $2))))
+ (parser (make-lexer tokens) error-handler)))
+
+;;; --------------------------------------------------------------------
+;;; Correct input
+
+(check
+ (doit (make-lexical-token 'T #f #\newline))
+ => '((#\newline)))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'T #f #\newline))
+ => '((1)))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'T #f #\newline))
+ => '((3)))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 3)
+ (make-lexical-token 'T #f #\newline))
+ => '((9) (7)))
+
+(check
+ (doit (make-lexical-token 'N #f 10)
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 3)
+ (make-lexical-token 'T #f #\newline))
+ => '((23)))
+
+(check
+ (doit (make-lexical-token 'O #f #\()
+ (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'C #f #\))
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 3)
+ (make-lexical-token 'T #f #\newline))
+ => '((9)))
+
+(check
+ (doit (make-lexical-token 'O #f #\()
+ (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'C #f #\))
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 3)
+ (make-lexical-token 'T #f #\newline)
+
+ (make-lexical-token 'N #f 4)
+ (make-lexical-token 'M #f /)
+ (make-lexical-token 'N #f 5)
+ (make-lexical-token 'T #f #\newline))
+ => '((9 4/5)))
+
+;;; --------------------------------------------------------------------
+
+(check
+ ;;Successful error recovery.
+ (doit (make-lexical-token 'O #f #\()
+ (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 3)
+ (make-lexical-token 'T #f #\newline)
+
+ (make-lexical-token 'N #f 4)
+ (make-lexical-token 'M #f /)
+ (make-lexical-token 'N #f 5)
+ (make-lexical-token 'T #f #\newline))
+ => '())
+
+(check
+ ;;Unexpected end of input.
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2))
+ => '())
+
+(check
+ ;;Unexpected end of input.
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'T #f #\newline))
+ => '())
+
+;;; end of file
diff --git a/test-suite/lalr/test-glr-single-expressions.scm b/test-suite/lalr/test-glr-single-expressions.scm
new file mode 100644
index 000000000..9415262d4
--- /dev/null
+++ b/test-suite/lalr/test-glr-single-expressions.scm
@@ -0,0 +1,60 @@
+;;; test-lr-single-expressions.scm --
+;;
+;;Grammar accepting single expressions.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (driver: glr)
+ (N O C (left: A) (left: M) (nonassoc: U))
+
+ (E (N) : $1
+ (E A E) : ($2 $1 $3)
+ (E M E) : ($2 $1 $3)
+ (A E (prec: U)) : ($1 $2)
+ (O E C) : $2))))
+ (parser (make-lexer tokens) error-handler)))
+
+;;; --------------------------------------------------------------------
+
+(check ;correct input
+ (doit (make-lexical-token 'N #f 1))
+ => '(1))
+
+(check ;correct input
+ (doit (make-lexical-token 'A #f -)
+ (make-lexical-token 'N #f 1))
+ => '(-1))
+
+(check ;correct input
+ (doit (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 1))
+ => '(1))
+
+(check ;correct input
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2))
+ => '(3))
+
+(check ;correct input
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 3))
+ => '(9 7))
+
+(check ;correct input
+ (doit (make-lexical-token 'O #f #\()
+ (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'C #f #\))
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 3))
+ => '(9))
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-associativity-01.scm b/test-suite/lalr/test-lr-associativity-01.scm
new file mode 100644
index 000000000..8519dee1c
--- /dev/null
+++ b/test-suite/lalr/test-lr-associativity-01.scm
@@ -0,0 +1,91 @@
+;;; test-lr-associativity-01.scm --
+;;
+;;Show how to use left and right associativity. Notice that the
+;;terminal M is declared as right associative; this influences the
+;;binding of values to the $n symbols in the semantic clauses. The
+;;semantic clause in the rule:
+;;
+;; (E M E M E) : (list $1 $2 (list $3 $4 $5))
+;;
+;;looks like it is right-associated, and it is because we have declared
+;;M as "right:".
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser
+ (expect: 0)
+ (N (left: A)
+ (right: M)
+ (nonassoc: U))
+ (E (N) : $1
+ (E A E) : (list $1 $2 $3)
+ (E M E) : (list $1 $2 $3)
+ (E M E M E) : (list $1 $2 (list $3 $4 $5))
+ (A E (prec: U)) : (list '- $2)))))
+ (parser (make-lexer tokens) error-handler)))
+
+;;; --------------------------------------------------------------------
+;;; Single operator.
+
+(check
+ (doit (make-lexical-token 'N #f 1))
+ => 1)
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 2))
+ => '(1 + 2))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 2))
+ => '(1 * 2))
+
+(check
+ (doit (make-lexical-token 'A #f '-)
+ (make-lexical-token 'N #f 1))
+ => '(- 1))
+
+;;; --------------------------------------------------------------------
+;;; Precedence.
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 3))
+ => '(1 + (2 * 3)))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 3))
+ => '((1 * 2) + 3))
+
+;;; --------------------------------------------------------------------
+;;; Associativity.
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 3))
+ => '((1 + 2) + 3))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 3))
+ => '(1 * (2 * 3)))
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-associativity-02.scm b/test-suite/lalr/test-lr-associativity-02.scm
new file mode 100644
index 000000000..6fb62e767
--- /dev/null
+++ b/test-suite/lalr/test-lr-associativity-02.scm
@@ -0,0 +1,91 @@
+;;; test-lr-associativity-02.scm --
+;;
+;;Show how to use left and right associativity. Notice that the
+;;terminal M is declared as left associative; this influences the
+;;binding of values to the $n symbols in the semantic clauses. The
+;;semantic clause in the rule:
+;;
+;; (E M E M E) : (list $1 $2 (list $3 $4 $5))
+;;
+;;looks like it is right-associated, but the result is left-associated
+;;because we have declared M as "left:".
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser
+ (expect: 0)
+ (N (left: A)
+ (left: M)
+ (nonassoc: U))
+ (E (N) : $1
+ (E A E) : (list $1 $2 $3)
+ (E M E) : (list $1 $2 $3)
+ (E M E M E) : (list $1 $2 (list $3 $4 $5))
+ (A E (prec: U)) : (list '- $2)))))
+ (parser (make-lexer tokens) error-handler)))
+
+;;; --------------------------------------------------------------------
+;;; Single operator.
+
+(check
+ (doit (make-lexical-token 'N #f 1))
+ => 1)
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 2))
+ => '(1 + 2))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 2))
+ => '(1 * 2))
+
+(check
+ (doit (make-lexical-token 'A #f '-)
+ (make-lexical-token 'N #f 1))
+ => '(- 1))
+
+;;; --------------------------------------------------------------------
+;;; Precedence.
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 3))
+ => '(1 + (2 * 3)))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 3))
+ => '((1 * 2) + 3))
+
+;;; --------------------------------------------------------------------
+;;; Associativity.
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 3))
+ => '((1 + 2) + 3))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 3))
+ => '((1 * 2) * 3))
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-associativity-03.scm b/test-suite/lalr/test-lr-associativity-03.scm
new file mode 100644
index 000000000..4c35b822b
--- /dev/null
+++ b/test-suite/lalr/test-lr-associativity-03.scm
@@ -0,0 +1,85 @@
+;;; test-lr-associativity-01.scm --
+;;
+;;Show how to use left and right associativity. Notice that the
+;;terminal M is declared as non-associative; this influences the binding
+;;of values to the $n symbols in the semantic clauses. The semantic
+;;clause in the rule:
+;;
+;; (E M E M E) : (list $1 $2 (list $3 $4 $5))
+;;
+;;looks like it is right-associated, and it is because we have declared
+;;M as "right:".
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser
+ (expect: 0)
+ (N (nonassoc: A)
+ (nonassoc: M))
+ (E (N) : $1
+ (E A E) : (list $1 $2 $3)
+ (E A E A E) : (list (list $1 $2 $3) $4 $5)
+ (E M E) : (list $1 $2 $3)
+ (E M E M E) : (list $1 $2 (list $3 $4 $5))))))
+ (parser (make-lexer tokens) error-handler)))
+
+;;; --------------------------------------------------------------------
+;;; Single operator.
+
+(check
+ (doit (make-lexical-token 'N #f 1))
+ => 1)
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 2))
+ => '(1 + 2))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 2))
+ => '(1 * 2))
+
+;;; --------------------------------------------------------------------
+;;; Precedence.
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 3))
+ => '(1 + (2 * 3)))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 3))
+ => '((1 * 2) + 3))
+
+;;; --------------------------------------------------------------------
+;;; Associativity.
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 3))
+ => '((1 + 2) + 3))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 3))
+ => '(1 * (2 * 3)))
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-associativity-04.scm b/test-suite/lalr/test-lr-associativity-04.scm
new file mode 100644
index 000000000..0aea3f096
--- /dev/null
+++ b/test-suite/lalr/test-lr-associativity-04.scm
@@ -0,0 +1,83 @@
+;;; test-lr-associativity-04.scm --
+;;
+;;Show how to use associativity.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser
+ (expect: 0)
+ (N (left: A)
+ (left: M))
+ (E (N) : $1
+
+ (E A E) : (list $1 $2 $3)
+ (E A E A E) : (list (list $1 $2 $3) $4 $5)
+
+ (E M E) : (list $1 $2 $3)
+ (E M E M E) : (list $1 $2 (list $3 $4 $5))
+
+ (E A E M E) : (list $1 $2 $3 $4 $5)
+ (E M E A E) : (list $1 $2 $3 $4 $5)
+ ))))
+ (parser (make-lexer tokens) error-handler)))
+
+;;; --------------------------------------------------------------------
+;;; Single operator.
+
+(check
+ (doit (make-lexical-token 'N #f 1))
+ => 1)
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 2))
+ => '(1 + 2))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 2))
+ => '(1 * 2))
+
+;;; --------------------------------------------------------------------
+;;; Precedence.
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 3))
+ => '(1 + (2 * 3)))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 3))
+ => '((1 * 2) + 3))
+
+;;; --------------------------------------------------------------------
+;;; Associativity.
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'A #f '+)
+ (make-lexical-token 'N #f 3))
+ => '((1 + 2) + 3))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f '*)
+ (make-lexical-token 'N #f 3))
+ => '((1 * 2) * 3))
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-basics-01.scm b/test-suite/lalr/test-lr-basics-01.scm
new file mode 100644
index 000000000..0176fe689
--- /dev/null
+++ b/test-suite/lalr/test-lr-basics-01.scm
@@ -0,0 +1,38 @@
+;;; test-lr-basics-01.scm --
+;;
+;;A grammar that only accept a single terminal as input. It refuses the
+;;end-of-input as first token.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let* ((lexer (make-lexer tokens))
+ (parser (lalr-parser (expect: 0)
+ (A)
+ (e (A) : $1))))
+ (parser lexer error-handler)))
+
+(check
+ (doit (make-lexical-token 'A #f 1))
+ => 1)
+
+(check
+ (let ((r (doit)))
+ (cons r *error*))
+ => '(#f (error-handler "Syntax error: unexpected end of input")))
+
+(check
+ ;;Parse correctly the first A and reduce it. The second A triggers
+ ;;an error which empties the stack and consumes all the input
+ ;;tokens. Finally, an unexpected end-of-input error is returned
+ ;;because EOI is invalid as first token after the start.
+ (let ((r (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2)
+ (make-lexical-token 'A #f 3))))
+ (cons r *error*))
+ => '(#f
+ (error-handler "Syntax error: unexpected end of input")
+ (error-handler "Syntax error: unexpected token : " . A)))
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-basics-02.scm b/test-suite/lalr/test-lr-basics-02.scm
new file mode 100644
index 000000000..4a5abc1af
--- /dev/null
+++ b/test-suite/lalr/test-lr-basics-02.scm
@@ -0,0 +1,33 @@
+;;; test-lr-basics-02.scm --
+;;
+;;A grammar that only accept a single terminal or the EOI.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (A)
+ (e (A) : $1
+ () : 0))))
+ (parser (make-lexer tokens) error-handler)))
+
+(check
+ (doit)
+ => 0)
+
+(check
+ (doit (make-lexical-token 'A #f 1))
+ => 1)
+
+(check
+ ;;Parse correctly the first A and reduce it. The second A triggers
+ ;;an error which empties the stack and consumes all the input
+ ;;tokens. Finally, the end-of-input token is correctly parsed.
+ (let ((r (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2)
+ (make-lexical-token 'A #f 3))))
+ (cons r *error*))
+ => '(0 (error-handler "Syntax error: unexpected token : " . A)))
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-basics-03.scm b/test-suite/lalr/test-lr-basics-03.scm
new file mode 100644
index 000000000..156de360d
--- /dev/null
+++ b/test-suite/lalr/test-lr-basics-03.scm
@@ -0,0 +1,36 @@
+;;; test-lr-basics-03.scm --
+;;
+;;A grammar that accepts fixed sequences of a single terminal or the
+;;EOI.
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (A)
+ (e (A) : (list $1)
+ (A A) : (list $1 $2)
+ (A A A) : (list $1 $2 $3)
+ () : 0))))
+ (parser (make-lexer tokens) error-handler)))
+
+(check
+ (doit (make-lexical-token 'A #f 1))
+ => '(1))
+
+(check
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2))
+ => '(1 2))
+
+(check
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2)
+ (make-lexical-token 'A #f 3))
+ => '(1 2 3))
+
+(check
+ (doit)
+ => 0)
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-basics-04.scm b/test-suite/lalr/test-lr-basics-04.scm
new file mode 100644
index 000000000..34b8eda77
--- /dev/null
+++ b/test-suite/lalr/test-lr-basics-04.scm
@@ -0,0 +1,31 @@
+;;; test-lr-basics-04.scm --
+;;
+;;A grammar accepting a sequence of equal tokens of arbitrary length.
+;;The return value is the value of the last parsed token.
+
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (A)
+ (e (e A) : $2
+ (A) : $1
+ () : 0))))
+ (parser (make-lexer tokens) error-handler)))
+
+(check
+ (doit)
+ => 0)
+
+(check
+ (doit (make-lexical-token 'A #f 1))
+ => 1)
+
+(check
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2)
+ (make-lexical-token 'A #f 3))
+ => 3)
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-basics-05.scm b/test-suite/lalr/test-lr-basics-05.scm
new file mode 100644
index 000000000..ffb91d4c1
--- /dev/null
+++ b/test-suite/lalr/test-lr-basics-05.scm
@@ -0,0 +1,36 @@
+;;; test-lr-basics-05.scm --
+;;
+;;A grammar accepting a sequence of equal tokens of arbitrary length.
+;;The return value is the list of values.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (A)
+ (e (e A) : (cons $2 $1)
+ (A) : (list $1)
+ () : 0))))
+ (parser (make-lexer tokens) error-handler)))
+
+(check
+ (doit)
+ => 0)
+
+(check
+ (doit (make-lexical-token 'A #f 1))
+ => '(1))
+
+(check
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2))
+ => '(2 1))
+
+(check
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'A #f 2)
+ (make-lexical-token 'A #f 3))
+ => '(3 2 1))
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-error-recovery-01.scm b/test-suite/lalr/test-lr-error-recovery-01.scm
new file mode 100644
index 000000000..7ad756b68
--- /dev/null
+++ b/test-suite/lalr/test-lr-error-recovery-01.scm
@@ -0,0 +1,145 @@
+;;; test-lr-error-recovery-01.scm --
+;;
+;;Test error recovery with a terminator terminal.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser
+ (expect: 0)
+ (NUMBER BAD NEWLINE)
+
+ (script (lines) : (reverse $1)
+ () : 0)
+ (lines (lines line) : (cons $2 $1)
+ (line) : (list $1))
+ (line (NEWLINE) : (list 'line $1)
+ (NUMBER NEWLINE) : (list 'line $1 $2)
+ (NUMBER NUMBER NEWLINE) : (list 'line $1 $2 $3)
+
+ ;;This semantic action will cause "(recover $1
+ ;;$2)" to be the result of the offending line.
+ (error NEWLINE) : (list 'recover $1 $2)))))
+ (parser (make-lexer tokens) error-handler)))
+
+;;; --------------------------------------------------------------------
+;;; No errors, grammar tests.
+
+(check
+ (doit)
+ => 0)
+
+(check
+ (doit (make-lexical-token 'NEWLINE #f #\newline))
+ => '((line #\newline)))
+
+(check
+ (doit (make-lexical-token 'NUMBER #f 1)
+ (make-lexical-token 'NEWLINE #f #\newline))
+ => '((line 1 #\newline)))
+
+(check
+ (doit (make-lexical-token 'NUMBER #f 1)
+ (make-lexical-token 'NUMBER #f 2)
+ (make-lexical-token 'NEWLINE #f #\newline))
+ => '((line 1 2 #\newline)))
+
+(check
+ (doit (make-lexical-token 'NUMBER #f 1)
+ (make-lexical-token 'NEWLINE #f #\newline)
+ (make-lexical-token 'NUMBER #f 2)
+ (make-lexical-token 'NEWLINE #f #\newline))
+ => '((line 1 #\newline)
+ (line 2 #\newline)))
+
+(check
+ (doit (make-lexical-token 'NUMBER #f 1)
+ (make-lexical-token 'NEWLINE #f #\newline)
+ (make-lexical-token 'NUMBER #f 2)
+ (make-lexical-token 'NEWLINE #f #\newline)
+ (make-lexical-token 'NUMBER #f 3)
+ (make-lexical-token 'NEWLINE #f #\newline))
+ => '((line 1 #\newline)
+ (line 2 #\newline)
+ (line 3 #\newline)))
+
+(check
+ (doit (make-lexical-token 'NUMBER #f 1)
+ (make-lexical-token 'NEWLINE #f #\newline)
+ (make-lexical-token 'NUMBER #f 2)
+ (make-lexical-token 'NEWLINE #f #\newline)
+ (make-lexical-token 'NUMBER #f 3)
+ (make-lexical-token 'NEWLINE #f #\newline)
+ (make-lexical-token 'NUMBER #f 41)
+ (make-lexical-token 'NUMBER #f 42)
+ (make-lexical-token 'NEWLINE #f #\newline))
+ => '((line 1 #\newline)
+ (line 2 #\newline)
+ (line 3 #\newline)
+ (line 41 42 #\newline)))
+
+;;; --------------------------------------------------------------------
+;;; Successful error recovery.
+
+(check
+ ;;The BAD triggers an error, recovery happens, the first NEWLINE is
+ ;;correctly parsed as recovery token; the second line is correct.
+ (let ((r (doit (make-lexical-token 'NUMBER #f 1)
+ (make-lexical-token 'BAD #f 'alpha)
+ (make-lexical-token 'NEWLINE #f #\newline)
+ (make-lexical-token 'NUMBER #f 2)
+ (make-lexical-token 'NEWLINE #f #\newline))))
+ (cons r *error*))
+ => '(((recover #f #f)
+ (line 2 #\newline))
+ (error-handler "Syntax error: unexpected token : " . BAD)))
+
+
+(check
+ ;;The first BAD triggers an error, recovery happens skipping the
+ ;;second and third BADs, the first NEWLINE is detected as
+ ;;synchronisation token; the second line is correct.
+ (let ((r (doit (make-lexical-token 'NUMBER #f 1)
+ (make-lexical-token 'BAD #f 'alpha)
+ (make-lexical-token 'BAD #f 'beta)
+ (make-lexical-token 'BAD #f 'delta)
+ (make-lexical-token 'NEWLINE #f #\newline)
+ (make-lexical-token 'NUMBER #f 2)
+ (make-lexical-token 'NEWLINE #f #\newline))))
+ (cons r *error*))
+ => '(((recover #f #f)
+ (line 2 #\newline))
+ (error-handler "Syntax error: unexpected token : " . BAD)))
+
+;;; --------------------------------------------------------------------
+;;; Failed error recovery.
+
+(check
+ ;;End-of-input is found after NUMBER.
+ (let ((r (doit (make-lexical-token 'NUMBER #f 1))))
+ (cons r *error*))
+ => '(#f (error-handler "Syntax error: unexpected end of input")))
+
+(check
+ ;;The BAD triggers the error, the stack is rewind up to the start,
+ ;;then end-of-input happens while trying to skip tokens until the
+ ;;synchronisation one is found. End-of-input is an acceptable token
+ ;;after the start.
+ (let ((r (doit (make-lexical-token 'NUMBER #f 1)
+ (make-lexical-token 'BAD #f 'alpha)
+ (make-lexical-token 'BAD #f 'beta)
+ (make-lexical-token 'BAD #f 'delta))))
+ (cons r *error*))
+ => '(0 (error-handler "Syntax error: unexpected token : " . BAD)))
+
+(check
+ ;;The BAD triggers the error, the stack is rewind up to the start,
+ ;;then end-of-input happens while trying to skip tokens until the
+ ;;synchronisation one is found. End-of-input is an acceptable token
+ ;;after the start.
+ (let ((r (doit (make-lexical-token 'BAD #f 'alpha))))
+ (cons r *error*))
+ => '(0 (error-handler "Syntax error: unexpected token : " . BAD)))
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-error-recovery-02.scm b/test-suite/lalr/test-lr-error-recovery-02.scm
new file mode 100644
index 000000000..a82498b50
--- /dev/null
+++ b/test-suite/lalr/test-lr-error-recovery-02.scm
@@ -0,0 +1,51 @@
+;;; test-lr-error-recovery-02.scm --
+;;
+;;Test error recovery policy when the synchronisation terminal has the
+;;same category of the lookahead that raises the error.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (A B C)
+ (alphas (alpha) : $1
+ (alphas alpha) : $2)
+ (alpha (A B) : (list $1 $2)
+ (C) : $1
+ (error C) : 'error-form))))
+ (parser (make-lexer tokens) error-handler)))
+
+;;; --------------------------------------------------------------------
+;;; No error, just grammar tests.
+
+(check
+ (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'B #f 2))
+ => '(1 2))
+
+(check
+ (doit (make-lexical-token 'C #f 3))
+ => '3)
+
+;;; --------------------------------------------------------------------
+;;; Successful error recovery.
+
+(check
+ ;;Error, recovery, end-of-input.
+ (let ((r (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'C #f 3))))
+ (cons r *error*))
+ => '(error-form (error-handler "Syntax error: unexpected token : " . C)))
+
+(check
+ ;;Error, recovery, correct parse of "A B".
+ (let ((r (doit (make-lexical-token 'A #f 1)
+ (make-lexical-token 'C #f 3)
+ (make-lexical-token 'A #f 1)
+ (make-lexical-token 'B #f 2))))
+ (cons r *error*))
+ => '((1 2)
+ (error-handler "Syntax error: unexpected token : " . C)))
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-no-clause.scm b/test-suite/lalr/test-lr-no-clause.scm
new file mode 100644
index 000000000..fb98da6d8
--- /dev/null
+++ b/test-suite/lalr/test-lr-no-clause.scm
@@ -0,0 +1,40 @@
+;;; test-lr-no-clause.scm --
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (NUMBER COMMA NEWLINE)
+
+ (lines (lines line) : (list $2)
+ (line) : (list $1))
+ (line (NEWLINE) : #\newline
+ (NUMBER NEWLINE) : $1
+ ;;this is a rule with no semantic action
+ (COMMA NUMBER NEWLINE)))))
+ (parser (make-lexer tokens) error-handler)))
+
+(check
+ ;;correct input
+ (doit (make-lexical-token 'NUMBER #f 1)
+ (make-lexical-token 'NEWLINE #f #\newline))
+ => '(1))
+
+(check
+ ;;correct input with comma, which is a rule with no client form
+ (doit (make-lexical-token 'COMMA #f #\,)
+ (make-lexical-token 'NUMBER #f 1)
+ (make-lexical-token 'NEWLINE #f #\newline))
+ => '(#(line-3 #\, 1 #\newline)))
+
+(check
+ ;;correct input with comma, which is a rule with no client form
+ (doit (make-lexical-token 'NUMBER #f 1)
+ (make-lexical-token 'NEWLINE #f #\newline)
+ (make-lexical-token 'COMMA #f #\,)
+ (make-lexical-token 'NUMBER #f 2)
+ (make-lexical-token 'NEWLINE #f #\newline))
+ => '(#(line-3 #\, 2 #\newline)))
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-script-expression.scm b/test-suite/lalr/test-lr-script-expression.scm
new file mode 100644
index 000000000..8cf1a9bf2
--- /dev/null
+++ b/test-suite/lalr/test-lr-script-expression.scm
@@ -0,0 +1,119 @@
+;;; test-lr-script-expression.scm --
+;;
+;;Parse scripts, each line an expression.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (N O C T (left: A) (left: M) (nonassoc: U))
+
+ (script (lines) : (reverse $1))
+
+ (lines (lines line) : (cons $2 $1)
+ (line) : (list $1))
+
+ (line (T) : #\newline
+ (E T) : $1
+ (error T) : (list 'error-clause $2))
+
+ (E (N) : $1
+ (E A E) : ($2 $1 $3)
+ (E M E) : ($2 $1 $3)
+ (A E (prec: U)) : ($1 $2)
+ (O E C) : $2))))
+ (parser (make-lexer tokens) error-handler)))
+
+;;; --------------------------------------------------------------------
+;;; Correct input
+
+(check
+ (doit (make-lexical-token 'T #f #\newline))
+ => '(#\newline))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'T #f #\newline))
+ => '(1))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'T #f #\newline))
+ => '(3))
+
+(check
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 3)
+ (make-lexical-token 'T #f #\newline))
+ => '(7))
+
+(check
+ (doit (make-lexical-token 'O #f #\()
+ (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'C #f #\))
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 3)
+ (make-lexical-token 'T #f #\newline))
+ => '(9))
+
+(check
+ (doit (make-lexical-token 'O #f #\()
+ (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'C #f #\))
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 3)
+ (make-lexical-token 'T #f #\newline)
+
+ (make-lexical-token 'N #f 4)
+ (make-lexical-token 'M #f /)
+ (make-lexical-token 'N #f 5)
+ (make-lexical-token 'T #f #\newline))
+ => '(9 4/5))
+
+;;; --------------------------------------------------------------------
+
+(check
+ ;;Successful error recovery.
+ (doit (make-lexical-token 'O #f #\()
+ (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 3)
+ (make-lexical-token 'T #f #\newline)
+
+ (make-lexical-token 'N #f 4)
+ (make-lexical-token 'M #f /)
+ (make-lexical-token 'N #f 5)
+ (make-lexical-token 'T #f #\newline))
+ => '((error-clause #f)
+ 4/5))
+
+(check
+ ;;Unexpected end of input.
+ (let ((r (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2))))
+ (cons r *error*))
+ => '(#f (error-handler "Syntax error: unexpected end of input")))
+
+(check
+ ;;Unexpected end of input.
+ (let ((r (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'T #f #\newline))))
+ (cons r *error*))
+ => '(((error-clause #f))
+ (error-handler "Syntax error: unexpected token : " . T)))
+
+;;; end of file
diff --git a/test-suite/lalr/test-lr-single-expressions.scm b/test-suite/lalr/test-lr-single-expressions.scm
new file mode 100644
index 000000000..5fcd9f3b0
--- /dev/null
+++ b/test-suite/lalr/test-lr-single-expressions.scm
@@ -0,0 +1,59 @@
+;;; test-lr-single-expressions.scm --
+;;
+;;Grammar accepting single expressions.
+;;
+
+(load "common-test.scm")
+
+(define (doit . tokens)
+ (let ((parser (lalr-parser (expect: 0)
+ (N O C (left: A) (left: M) (nonassoc: U))
+
+ (E (N) : $1
+ (E A E) : ($2 $1 $3)
+ (E M E) : ($2 $1 $3)
+ (A E (prec: U)) : ($1 $2)
+ (O E C) : $2))))
+ (parser (make-lexer tokens) error-handler)))
+
+;;; --------------------------------------------------------------------
+
+(check ;correct input
+ (doit (make-lexical-token 'N #f 1))
+ => 1)
+
+(check ;correct input
+ (doit (make-lexical-token 'A #f -)
+ (make-lexical-token 'N #f 1))
+ => -1)
+
+(check ;correct input
+ (doit (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 1))
+ => 1)
+
+(check ;correct input
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2))
+ => 3)
+
+(check ;correct input
+ (doit (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 3))
+ => 7)
+
+(check ;correct input
+ (doit (make-lexical-token 'O #f #\()
+ (make-lexical-token 'N #f 1)
+ (make-lexical-token 'A #f +)
+ (make-lexical-token 'N #f 2)
+ (make-lexical-token 'C #f #\))
+ (make-lexical-token 'M #f *)
+ (make-lexical-token 'N #f 3))
+ => 9)
+
+;;; end of file