Compare commits
159 Commits
Author | SHA1 | Date |
---|---|---|
jasder | 6ea283bf56 | |
jasder | 58ef8f982c | |
jasder | 9dec2f1756 | |
jasder | 041c4872f4 | |
jasder | 6e3aceb580 | |
jasder | 1616316694 | |
jasder | 59fb219eca | |
jasder | 96b2c56d39 | |
jasder | 2b797f1833 | |
jasder | 574f5f280b | |
forgetest1 | 58119ce017 | |
Terence Parr | 6255420440 | |
Alexander Bigerl | d5e8ada699 | |
Alexander Bigerl | 49cf9d4fe2 | |
Terence Parr | e404d26f61 | |
Matthew Paletta | f901c8d39a | |
Terence Parr | 478e46409e | |
Mike Lischke | 4b4ca4058a | |
Terence Parr | 697074c3e9 | |
Terence Parr | b71f6894c7 | |
Terence Parr | eec33c5210 | |
Terence Parr | 765de17f2a | |
Terence Parr | c254f00921 | |
Alexander Bigerl | 6a46c1dcb0 | |
Alexander Bigerl | 3238961241 | |
b1f6c1c4 | e6023b7555 | |
b1f6c1c4 | 0172e8661f | |
Terence Parr | 4dfacf63e2 | |
Andrei DAMIAN | e7bdb71a28 | |
ericvergnaud | 3590d4d5e1 | |
Eric Vergnaud | d82e8921a6 | |
Eric Vergnaud | 20a4d9d138 | |
parrt | c2f104cd08 | |
parrt | c0e22b99d6 | |
parrt | 2b5bb2249e | |
parrt | 5e5b6d35b4 | |
parrt | cb4e59d420 | |
parrt | 8d621b73b6 | |
Terence Parr | d889ba8668 | |
Mike Lischke | 4431f1ff69 | |
Mike Lischke | ff629d543a | |
Mike Lischke | 9d1737f33b | |
Mike Lischke | 5731e64e22 | |
Mike Lischke | 84d4ce73de | |
Mike Lischke | f881e3ec3f | |
Mike Lischke | 93c06214a6 | |
Mike Lischke | faa64fdfa7 | |
Mike Lischke | 62a829fb65 | |
Terence Parr | 0f8bddcbc1 | |
Damian Andrei | 76975c15ae | |
ericvergnaud | dc460a0514 | |
ericvergnaud | 9af2480f48 | |
不是油条 | ab2b71ce42 | |
Terence Parr | 715acdb44a | |
不是油条 | 31dd28630e | |
Karl Heinz Marbaise | 1b791a8694 | |
Jiri Slaby | 5a808b470e | |
Jiri Slaby | ac10e7bb79 | |
ericvergnaud | 60fd27ff93 | |
Xingyu Xie | c3ad8fca0a | |
Xingyu Xie | 3074c7f10f | |
Terence Parr | fde0b28dfb | |
Mike Lischke | 83911369ad | |
Felix Nieuwenhuizen | a08edf89ce | |
Felix Nieuwenhuizen | 03ee69bc3f | |
Felix Nieuwenhuizen | 98a3f7d640 | |
Felix Nieuwenhuizen | 66bf523beb | |
ericvergnaud | 84722e9fcc | |
Eric Vergnaud | 1281fb770e | |
Oscar Bonilla | c0935e190b | |
ericvergnaud | 12be1d299d | |
ericvergnaud | 5719d3496c | |
Eric Vergnaud | bb7e8bae42 | |
Eric Vergnaud | ce3700f3a0 | |
Eric Vergnaud | e414c829cf | |
Eric Vergnaud | e56f8abf3b | |
Eric Vergnaud | a95897c3fa | |
Eric Vergnaud | 2f12fa2688 | |
Eric Vergnaud | 88b582a73c | |
Eric Vergnaud | f670a3944d | |
Eric Vergnaud | 9f03a7f987 | |
Eric Vergnaud | f13debd885 | |
Eric Vergnaud | 6647c6bb0d | |
Eric Vergnaud | c0cc0fd8a4 | |
ericvergnaud | 00fbf95183 | |
Eric Vergnaud | 94f9759d14 | |
Eric Vergnaud | 26291a11d8 | |
Eric Vergnaud | 586721462c | |
ericvergnaud | 9668ce7dbb | |
Eric Vergnaud | f0ca1140a7 | |
Eric Vergnaud | e99363ac1a | |
Eric Vergnaud | 25196001d7 | |
Eric Vergnaud | 64d49cc4d8 | |
Eric Vergnaud | a60c32d369 | |
Taras Sotnikov | f7b4100fcc | |
Ken Domino | 4b649103f3 | |
Alexander Bigerl | ed2631b2d0 | |
ericvergnaud | e50ecf4961 | |
ericvergnaud | 62a0b02bf4 | |
Kko | ac9d62a994 | |
Kko | 8bcc12de28 | |
ericvergnaud | 3c67a9d4ea | |
ericvergnaud | be7bc5d81b | |
ericvergnaud | 36bde45396 | |
Eric Vergnaud | 68ea904826 | |
Eric Vergnaud | 2fbc00a8f0 | |
Eric Vergnaud | 59f1f581de | |
ericvergnaud | 365a56f10f | |
ericvergnaud | f37dfef7e5 | |
Eric Vergnaud | 5521eb2936 | |
ericvergnaud | 8b05471e6c | |
ericvergnaud | bca2536f3f | |
Ivan Kochurkin | cf16ec791e | |
kaby76 | ce479dc228 | |
Eric Vergnaud | 84d8348dc1 | |
Eric Vergnaud | 05f422268a | |
ericvergnaud | b2b1793700 | |
Eric Vergnaud | 4830925ab0 | |
Eric Vergnaud | 6a7c3ad256 | |
Eric Vergnaud | b3542834a5 | |
Eric Vergnaud | 0fa457a36e | |
Eric Vergnaud | a5589d0dab | |
Eric Vergnaud | 92f4c0707b | |
Eric Vergnaud | 0caddd8634 | |
Eric Vergnaud | daf6336430 | |
Eric Vergnaud | d3e2fdae79 | |
Eric Vergnaud | 4d0bc718df | |
Eric Vergnaud | 142a32ed5a | |
ericvergnaud | 47fd7026b3 | |
ericvergnaud | 5b34cc1ec3 | |
ericvergnaud | 4c2ccdb2c0 | |
ericvergnaud | 8227df1c1e | |
Eric Vergnaud | 020333deff | |
ericvergnaud | b050ac43de | |
Eric Vergnaud | 954649d5aa | |
Eric Vergnaud | 58da376bef | |
Eric Vergnaud | 4322672435 | |
Eric Vergnaud | 559d526648 | |
Eric Vergnaud | e8dfc30f91 | |
Eric Vergnaud | 329f1301ea | |
Eric Vergnaud | a9f11612dd | |
Eric Vergnaud | 7a9a26c7ec | |
Eric Vergnaud | 53b65c015c | |
ericvergnaud | 107f40c63c | |
ericvergnaud | 1095804388 | |
Eric Vergnaud | 5dce78c87a | |
Alexander Bigerl | d2bc3a5cde | |
Alexander Bigerl | 680a42e023 | |
Terence Parr | d34d2e1c27 | |
Eric Vergnaud | f26c95ff13 | |
parrt | 464bcbc32d | |
parrt | 19ec50f38a | |
Mike Lischke | e24f97dc0d | |
Andrei DAMIAN | 249ecf328b | |
Andrei DAMIAN | f33a0f3d13 | |
Larry Li | 4bcbbe23a1 | |
Matthew Paletta | 3b3da29d45 | |
Matthew Paletta | 6e3d900c96 | |
Matthew Paletta | 0daa97a5f0 |
|
@ -0,0 +1,128 @@
|
|||
environment:
|
||||
matrix:
|
||||
- job_name: java-tool-and-runtime
|
||||
- job_name: csharp-runtime
|
||||
job_depends_on: java-tool-and-runtime
|
||||
- job_name: dart-runtime
|
||||
job_depends_on: java-tool-and-runtime
|
||||
- job_name: go-runtime
|
||||
job_depends_on: java-tool-and-runtime
|
||||
- job_name: javascript-runtime
|
||||
job_depends_on: java-tool-and-runtime
|
||||
- job_name: php-runtime
|
||||
job_depends_on: java-tool-and-runtime
|
||||
- job_name: python2-runtime
|
||||
job_depends_on: java-tool-and-runtime
|
||||
- job_name: python3-runtime
|
||||
job_depends_on: java-tool-and-runtime
|
||||
|
||||
matrix:
|
||||
fast_finish: false
|
||||
|
||||
version: '4.9.1-SNAPSHOT+AppVeyor.{build}'
|
||||
cache:
|
||||
- '%USERPROFILE%\.m2'
|
||||
- '%USERPROFILE%\.nuget\packages -> **\project.json'
|
||||
image: Visual Studio 2019
|
||||
# not using MSBuild
|
||||
build: off
|
||||
|
||||
for:
|
||||
- matrix:
|
||||
only:
|
||||
- job_name: java-tool-and-runtime
|
||||
build_script:
|
||||
- mvn -q -DskipTests install --batch-mode
|
||||
test_script:
|
||||
- cd tool-testsuite
|
||||
- mvn -q test
|
||||
- cd ..\runtime-testsuite
|
||||
- mvn -q -Dtest=java.* test
|
||||
|
||||
- matrix:
|
||||
only:
|
||||
- job_name: csharp-runtime
|
||||
build_script:
|
||||
- mvn -q -DskipTests install --batch-mode
|
||||
- dotnet build runtime/CSharp/src/Antlr4.csproj -c Release
|
||||
after_build:
|
||||
- dotnet pack runtime/CSharp/src/Antlr4.csproj -c Release
|
||||
test_script:
|
||||
- cd runtime-testsuite
|
||||
- mvn -q -Dtest=csharp.* test
|
||||
artifacts:
|
||||
- path: 'runtime\**\*.nupkg'
|
||||
name: NuGet
|
||||
|
||||
- matrix:
|
||||
only:
|
||||
- job_name: dart-runtime
|
||||
install:
|
||||
- cinst -y dart-sdk --version=2.8.4
|
||||
build_script:
|
||||
- mvn -q -DskipTests install --batch-mode
|
||||
test_script:
|
||||
- cd runtime-testsuite
|
||||
- mvn -q -Dtest=dart.* test -Dantlr-dart-dart="C:\tools\dart-sdk\bin\dart.exe" -Dantlr-dart-pub="C:\tools\dart-sdk\bin\pub.bat" -Dantlr-dart-dart2native="C:\tools\dart-sdk\bin\dart2native.bat"
|
||||
|
||||
- matrix:
|
||||
only:
|
||||
- job_name: go-runtime
|
||||
build_script:
|
||||
- mvn -q -DskipTests install --batch-mode
|
||||
test_script:
|
||||
- cd runtime-testsuite
|
||||
- mvn -q -Dtest=go.* test
|
||||
|
||||
- matrix:
|
||||
only:
|
||||
- job_name: javascript-runtime
|
||||
install:
|
||||
- cinst nodejs.install
|
||||
- node --version
|
||||
- npm --version
|
||||
- npm install -g yarn@v1.22.10
|
||||
build_script:
|
||||
- cd runtime\JavaScript\
|
||||
- npm install
|
||||
- npm link
|
||||
- cd ..\..
|
||||
- mvn -q -DskipTests install --batch-mode
|
||||
test_script:
|
||||
- cd runtime\JavaScript\
|
||||
- yarn test
|
||||
- cd ..\..
|
||||
- cd runtime-testsuite
|
||||
- mvn -q -Dtest=javascript.* test -Dantlr-javascript-npm="C:\Program Files\nodejs\npm.cmd" -Dantlr-javascript-nodejs="C:\Program Files\nodejs\node.exe"
|
||||
|
||||
- matrix:
|
||||
only:
|
||||
- job_name: php-runtime
|
||||
install:
|
||||
- git clone https://github.com/antlr/antlr-php-runtime.git
|
||||
- mv antlr-php-runtime runtime/PHP
|
||||
- cinst -y php --params "/InstallDir:C:\tools\php"
|
||||
- cinst -y composer
|
||||
build_script:
|
||||
- mvn -q -DskipTests install --batch-mode
|
||||
test_script:
|
||||
- cd runtime-testsuite
|
||||
- mvn -q -Dtest=php.* test -Dantlr-php-php="C:\tools\php\php.exe"
|
||||
|
||||
- matrix:
|
||||
only:
|
||||
- job_name: python2-runtime
|
||||
build_script:
|
||||
- mvn -q -DskipTests install --batch-mode
|
||||
test_script:
|
||||
- cd runtime-testsuite
|
||||
- mvn -q -Dtest=python2.* test -Dantlr-python2-python="C:\Python27\python.exe"
|
||||
|
||||
- matrix:
|
||||
only:
|
||||
- job_name: python3-runtime
|
||||
build_script:
|
||||
- mvn -q -DskipTests install --batch-mode
|
||||
test_script:
|
||||
- cd runtime-testsuite
|
||||
- mvn -q -Dtest=python3.* test -Dantlr-python3-python="C:\Python35\python.exe"
|
|
@ -0,0 +1,66 @@
|
|||
version: 2.1
|
||||
|
||||
jobs:
|
||||
test_tool_and_runtime_java:
|
||||
docker:
|
||||
- image: cimg/openjdk:8.0
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: build tool
|
||||
command: mvn -B -V -DskipTests=true -Dmaven.javadoc.skip=true install
|
||||
- run:
|
||||
name: test runtime
|
||||
command: |
|
||||
cd runtime-testsuite
|
||||
mvn -q -Dparallel=methods -DthreadCount=4 -Dtest=java.* test
|
||||
cd ..
|
||||
- run:
|
||||
name: test tool
|
||||
command: |
|
||||
cd tool-testsuite
|
||||
mvn -q -Dparallel=methods -DthreadCount=4 test
|
||||
cd ..
|
||||
test_runtime:
|
||||
parameters:
|
||||
test-group:
|
||||
description: The section
|
||||
type: string
|
||||
default: ALL
|
||||
target:
|
||||
description: The target
|
||||
type: string
|
||||
default: java
|
||||
docker:
|
||||
- image: cimg/openjdk:8.0
|
||||
environment:
|
||||
TARGET: << parameters.target >>
|
||||
GROUP: << parameters.test-group >>
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Install << parameters.target >> pre-requisites
|
||||
command: |
|
||||
f=".circleci/scripts/install-linux-<< parameters.target >>.sh"; ! [ -x "$f" ] || "$f"
|
||||
- run:
|
||||
name: Build ANTLR4 tool
|
||||
command: mvn -B -V -DskipTests=true -Dmaven.javadoc.skip=true install
|
||||
- run:
|
||||
name: Test << parameters.target >> runtime
|
||||
command: |
|
||||
.circleci/scripts/run-tests-<< parameters.target >>.sh
|
||||
|
||||
workflows:
|
||||
build:
|
||||
jobs:
|
||||
- test_tool_and_runtime_java
|
||||
- test_runtime:
|
||||
matrix:
|
||||
parameters:
|
||||
target: [ dart, go, python2, python3, javascript, php ]
|
||||
- test_runtime:
|
||||
matrix:
|
||||
parameters:
|
||||
# target: [ cpp, dotnet, swift ]
|
||||
target: [ cpp, dotnet ]
|
||||
test-group: [ LEXER, PARSER, RECURSION ]
|
|
@ -0,0 +1,35 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "installing cpp SDK..."
|
||||
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y clang
|
||||
sudo apt-get install -y cmake
|
||||
sudo apt-get install -y pkg-config
|
||||
sudo apt-get install -y uuid-dev
|
||||
|
||||
echo "done installing cpp SDK"
|
||||
|
||||
clang++ --version
|
||||
cmake --version
|
||||
|
||||
echo "building cpp runtime..."
|
||||
|
||||
pushd "runtime/Cpp/"
|
||||
echo $PWD
|
||||
rc=0
|
||||
if [ $rc == 0 ]; then
|
||||
cmake . -DCMAKE_BUILD_TYPE=release
|
||||
rc=$?
|
||||
fi
|
||||
if [ $rc == 0 ]; then
|
||||
make -j 8
|
||||
rc=$?
|
||||
fi
|
||||
popd
|
||||
|
||||
|
||||
echo "done building cpp runtime"
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "installing dart SDK..."
|
||||
sudo apt-get update
|
||||
sudo apt-get install apt-transport-https
|
||||
sudo sh -c 'wget -qO- https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add -'
|
||||
sudo sh -c 'wget -qO- https://storage.googleapis.com/download.dartlang.org/linux/debian/dart_stable.list > /etc/apt/sources.list.d/dart_stable.list'
|
||||
sudo apt-get update
|
||||
sudo apt-get install dart=2.8.4-1
|
||||
export PATH="$PATH:/usr/lib/dart/bin"
|
||||
echo "done installing dart SDK"
|
||||
sudo apt-get install -f
|
|
@ -0,0 +1,19 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "installing .Net SDK..."
|
||||
wget https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
|
||||
sudo dpkg -i packages-microsoft-prod.deb
|
||||
sudo apt-get update; \
|
||||
sudo apt-get install -y apt-transport-https && \
|
||||
sudo apt-get update && \
|
||||
sudo apt-get install -y dotnet-sdk-3.1
|
||||
export PATH=$PATH:~/.dotnet
|
||||
echo "done installing .Net SDK"
|
||||
|
||||
# we need to build the runtime before test run, since we used "--no-dependencies"
|
||||
# when we call dotnet cli for restore and build, in order to speed up
|
||||
echo "building runtime..."
|
||||
dotnet build -c Release -f netstandard2.0 runtime/CSharp/src/Antlr4.csproj
|
||||
echo "done building runtime"
|
|
@ -0,0 +1,9 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "installing go SDK..."
|
||||
sudo apt update
|
||||
sudo apt install golang-go
|
||||
go version
|
||||
echo "done installing go SDK"
|
|
@ -0,0 +1,21 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# use v14 and check
|
||||
echo "installing nodejs..."
|
||||
curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -
|
||||
sudo apt-get install -y nodejs
|
||||
echo node version: $(node --version)
|
||||
echo "done installing nodejs"
|
||||
|
||||
echo "installing yarn..."
|
||||
sudo npm install -g yarn@v1.22.10
|
||||
echo "done installing yarn"
|
||||
|
||||
echo "packaging javascript runtime..."
|
||||
pushd runtime/JavaScript
|
||||
sudo npm install
|
||||
sudo npm link
|
||||
popd
|
||||
echo "done packaging javascript runtime"
|
|
@ -0,0 +1,24 @@
|
|||
#!/bin/bash
|
||||
|
||||
echo "before patching"
|
||||
ls -all /lib/x86_64-linux-gnu/ | grep libcurl
|
||||
|
||||
# This would fix missing CURL_OPENSSL_3
|
||||
# use a dedicated temp dir in the user space
|
||||
mkdir ~/libcurl3
|
||||
cd ~/libcurl3
|
||||
# fetch latest libcurl3
|
||||
wget http://archive.ubuntu.com/ubuntu/pool/main/c/curl/libcurl3_7.47.0-1ubuntu2_amd64.deb
|
||||
# extract data.tar.xz
|
||||
ar x libcurl3* data.tar.xz
|
||||
# extract all from data.tar.xz
|
||||
tar xf data.tar.xz
|
||||
# copy libcurl.so.3 where required
|
||||
sudo cp -L ~/libcurl3/usr/lib/x86_64-linux-gnu/libcurl.so.4.4.0 /lib/x86_64-linux-gnu/libcurl.so.4.4.0
|
||||
sudo ln -sf libcurl.so.4.4.0 /lib/x86_64-linux-gnu/libcurl.so.4
|
||||
cd ..
|
||||
# drop dedicated temp dir
|
||||
sudo rm -rf ~/libcurl3
|
||||
|
||||
echo "after patching"
|
||||
ls -all /lib/x86_64-linux-gnu/ | grep libcurl
|
|
@ -0,0 +1,18 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF
|
||||
sudo apt-get update -qq
|
||||
|
||||
sudo apt update
|
||||
|
||||
sudo apt install php-all-dev
|
||||
php -v
|
||||
|
||||
sudo apt install composer
|
||||
|
||||
git clone https://github.com/antlr/antlr-php-runtime.git runtime/PHP
|
||||
composer install -d runtime/PHP
|
||||
|
||||
mvn install -DskipTests=true -Dmaven.javadoc.skip=true -B -V
|
|
@ -0,0 +1,8 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "installing python 2..."
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install python2
|
||||
echo "done installing python 2"
|
|
@ -0,0 +1,8 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "installing python 3..."
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install python3
|
||||
echo "done installing python 3"
|
|
@ -0,0 +1,36 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
echo "installing swift SDK..."
|
||||
|
||||
.circleci/scripts/install-linux-libcurl3.sh
|
||||
|
||||
# see https://tecadmin.net/install-swift-ubuntu-1604-xenial/
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install clang libicu-dev
|
||||
sudo apt-get install libpython2.7 libpython2.7-dev
|
||||
|
||||
export SWIFT_VERSION=swift-5.3.2
|
||||
echo "installing gpg key..."
|
||||
wget -q -O - https://swift.org/keys/all-keys.asc | sudo gpg --import -
|
||||
echo "downloading SDK gpg key..."
|
||||
SWIFT_SDK=https://swift.org/builds/$SWIFT_VERSION-release/ubuntu1604/$SWIFT_VERSION-RELEASE/$SWIFT_VERSION-RELEASE-ubuntu16.04.tar.gz
|
||||
echo $SWIFT_SDK
|
||||
wget -q $SWIFT_SDK
|
||||
sudo tar xzf $SWIFT_VERSION-RELEASE-ubuntu16.04.tar.gz
|
||||
mv $SWIFT_VERSION-RELEASE-ubuntu16.04 $PWD/swift
|
||||
|
||||
export SWIFT_HOME=$PWD/swift/$SWIFT_VERSION-RELEASE-ubuntu16.04/usr/bin/
|
||||
export PATH=$PWD/swift/usr/bin:$PATH
|
||||
|
||||
# This would fix a know linker issue mentioned in: # https://bugs.swift.org/browse/SR-2299
|
||||
sudo ln -sf ld.gold /usr/bin/ld
|
||||
# This would fix missing libtinfo.so.5
|
||||
sudo apt install libncurses5
|
||||
|
||||
echo "done installing swift SDK..."
|
||||
|
||||
# check swift
|
||||
swift --version
|
||||
swift build --version
|
|
@ -0,0 +1,17 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
pushd runtime-testsuite
|
||||
echo "running maven tests..."
|
||||
if [ $GROUP == "LEXER" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest=cpp.* test
|
||||
elif [ $GROUP == "PARSER" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.ParserTests" -Dtest=cpp.* test
|
||||
elif [ $GROUP == "RECURSION" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LeftRecursionTests" -Dtest=cpp.* test
|
||||
else
|
||||
mvn -q -Dtest=cpp.* test
|
||||
fi
|
||||
popd
|
||||
|
|
@ -0,0 +1,11 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
dart --version
|
||||
|
||||
pushd runtime-testsuite
|
||||
echo "running maven tests..."
|
||||
# mvn -q -Dparallel=classes -DthreadCount=4 -Dtest=dart.* test
|
||||
mvn -q -Dtest=dart.* test
|
||||
popd
|
|
@ -0,0 +1,16 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
pushd runtime-testsuite/
|
||||
echo "running maven tests..."
|
||||
if [ $GROUP == "LEXER" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest=csharp.* test
|
||||
elif [ $GROUP == "PARSER" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.ParserTests" -Dtest=csharp.* test
|
||||
elif [ $GROUP == "RECURSION" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LeftRecursionTests" -Dtest=csharp.* test
|
||||
else
|
||||
mvn -q -Dtest=csharp.* test
|
||||
fi
|
||||
popd
|
|
@ -0,0 +1,10 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
go version
|
||||
|
||||
pushd runtime-testsuite
|
||||
echo "running maven tests..."
|
||||
mvn -q -Dparallel=methods -DthreadCount=4 -Dtest=go.* test
|
||||
popd
|
|
@ -0,0 +1,23 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
declare -i RESULT=0
|
||||
|
||||
pushd runtime/JavaScript
|
||||
|
||||
echo "running jest tests..."
|
||||
yarn test
|
||||
RESULT+=$?
|
||||
|
||||
popd
|
||||
|
||||
pushd runtime-testsuite
|
||||
|
||||
echo "running maven tests..."
|
||||
mvn -q -Dtest=javascript.* test
|
||||
RESULT+=$?
|
||||
|
||||
popd
|
||||
|
||||
exit $RESULT
|
|
@ -0,0 +1,11 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
php -v
|
||||
|
||||
php_path=$(which php)
|
||||
pushd runtime-testsuite
|
||||
echo "running maven tests..."
|
||||
mvn -q -DPHP_PATH="${php_path}" -Dparallel=methods -DthreadCount=4 -Dtest=php.* test
|
||||
popd
|
|
@ -0,0 +1,24 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
python2 --version
|
||||
|
||||
pushd runtime/Python2/tests
|
||||
echo "running native tests..."
|
||||
python2 run.py
|
||||
rc=$?
|
||||
if [ $rc != 0 ]; then
|
||||
echo "failed running native tests"
|
||||
fi
|
||||
popd
|
||||
|
||||
if [ $rc == 0 ]; then
|
||||
pushd runtime-testsuite
|
||||
echo "running maven tests..."
|
||||
mvn -q -Dtest=python2.* test
|
||||
rc=$?
|
||||
popd
|
||||
fi
|
||||
|
||||
# return $rc
|
|
@ -0,0 +1,24 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
python3 --version
|
||||
|
||||
pushd runtime/Python3/tests
|
||||
echo "running native tests..."
|
||||
python3 run.py
|
||||
rc=$?
|
||||
if [ $rc != 0 ]; then
|
||||
echo "failed running native tests"
|
||||
fi
|
||||
popd
|
||||
|
||||
if [ $rc == 0 ]; then
|
||||
pushd runtime-testsuite
|
||||
echo "running maven tests..."
|
||||
mvn -q -Dtest=python3.* test
|
||||
rc=$?
|
||||
popd
|
||||
fi
|
||||
|
||||
# return $rc
|
|
@ -0,0 +1,27 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
pushd runtime/Swift
|
||||
echo "running native tests..."
|
||||
./boot.py --test
|
||||
rc=$?
|
||||
if [ $rc != 0 ]; then
|
||||
echo "failed running native tests"
|
||||
fi
|
||||
popd
|
||||
|
||||
if [ $rc == 0 ]; then
|
||||
pushd runtime-testsuite
|
||||
echo "running maven tests..."
|
||||
if [ $GROUP == "LEXER" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest=swift.* test
|
||||
elif [ $GROUP == "PARSER" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.ParserTests" -Dtest=swift.* test
|
||||
elif [ $GROUP == "RECURSION" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LeftRecursionTests" -Dtest=swift.* test
|
||||
else
|
||||
mvn -q -Dtest=swift.* test
|
||||
fi
|
||||
popd
|
||||
fi
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,30 @@
|
|||
Sharing the trouble of getting github action runners to work on a mac coming straight out of the factory, running Big Sur
|
||||
|
||||
XCode (you need XCode to build the Swift runtime):
|
||||
- install XCode from the Mac App Store
|
||||
- Launch it, this will force installation of components
|
||||
- Go to Preferences -> Locations and select XCode as Command Line Tools
|
||||
|
||||
Brew (you need Brew to install maven):
|
||||
- get the script from https://brew.sh
|
||||
- once installed, run the following:
|
||||
echo 'eval $(/opt/homebrew/bin/brew shellenv)' >> /Users/{user-account}/.zprofile
|
||||
eval $(/opt/homebrew/bin/brew shellenv)
|
||||
(you need to repeat these last steps for each user account)
|
||||
|
||||
Maven (supposedly installed by the github workflow, but it's convenient to have a global install for troubleshooting):
|
||||
- brew install maven
|
||||
|
||||
JDK (we need a specific JDK):
|
||||
- download openjdk8 from Oracle (later versions break the build due to some packages having disappeared)
|
||||
- install it -> this will mess up your JAVA_HOME completely, pointing to /Library/Internet...
|
||||
- fix the JAVA_HOME mess as follows:
|
||||
sudo rm -fr /Library/Internet\ Plug-Ins/JavaAppletPlugin.plugin
|
||||
sudo rm -fr /Library/PreferencePanes/JavaControlPanel.prefpane
|
||||
|
||||
C++:
|
||||
- brew install cmake
|
||||
|
||||
C#:
|
||||
- .github/scripts/install-dotnet-on-osx.sh
|
||||
(you need to repeat this step for each user account)
|
|
@ -0,0 +1,15 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
cd runtime-testsuite/
|
||||
|
||||
if [ $GROUP == "LEXER" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest=cpp.* test
|
||||
elif [ $GROUP == "PARSER" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.ParserTests" -Dtest=cpp.* test
|
||||
elif [ $GROUP == "RECURSION" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LeftRecursionTests" -Dtest=cpp.* test
|
||||
else
|
||||
mvn -q -Dtest=cpp.* test
|
||||
fi
|
|
@ -0,0 +1,23 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
export PATH=$PATH:~/.dotnet
|
||||
|
||||
# we need to build the runtime before test run, since we used "--no-dependencies"
|
||||
# when we call dotnet cli for restore and build, in order to speed up
|
||||
|
||||
dotnet build -c Release -f netstandard2.0 runtime/CSharp/Antlr4.csproj
|
||||
|
||||
# run tests
|
||||
cd runtime-testsuite/
|
||||
|
||||
if [ $GROUP == "LEXER" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest=csharp.* test
|
||||
elif [ $GROUP == "PARSER" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.ParserTests" -Dtest=csharp.* test
|
||||
elif [ $GROUP == "RECURSION" ]; then
|
||||
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LeftRecursionTests" -Dtest=csharp.* test
|
||||
else
|
||||
mvn -q -Dtest=csharp.* test
|
||||
fi
|
|
@ -0,0 +1,50 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# linux specific setup, those setup have to be
|
||||
# here since environment variables doesn't pass
|
||||
# across scripts
|
||||
if [ $RUNNER_OS == "Linux" ]; then
|
||||
export SWIFT_VERSION=swift-5.0.1
|
||||
export SWIFT_HOME=$(pwd)/swift/$SWIFT_VERSION-RELEASE-ubuntu16.04/usr/bin/
|
||||
export PATH=$SWIFT_HOME:$PATH
|
||||
|
||||
# download swift
|
||||
mkdir swift
|
||||
curl https://swift.org/builds/$SWIFT_VERSION-release/ubuntu1604/$SWIFT_VERSION-RELEASE/$SWIFT_VERSION-RELEASE-ubuntu16.04.tar.gz -s | tar xz -C swift &> /dev/null
|
||||
fi
|
||||
|
||||
if [ -z "${JAVA_HOME}" ]
|
||||
then
|
||||
export JAVA_HOME="$(java -XshowSettings:properties -version 2>&1 |
|
||||
grep 'java\.home' | awk '{ print $3 }')"
|
||||
fi
|
||||
echo "export JAVA_HOME=$JAVA_HOME"
|
||||
|
||||
# check swift
|
||||
swift --version
|
||||
swift build --version
|
||||
|
||||
# run swift tests
|
||||
pushd runtime/Swift
|
||||
./boot.py --test
|
||||
rc=$?
|
||||
popd
|
||||
|
||||
if [ $rc == 0 ]; then
|
||||
# run java tests
|
||||
cd runtime-testsuite/
|
||||
if [ $GROUP == "LEXER" ]; then
|
||||
mvn -e -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest="swift.*" test
|
||||
elif [ $GROUP == "PARSER" ]; then
|
||||
mvn -e -q -Dgroups="org.antlr.v4.test.runtime.category.ParserTests" -Dtest="swift.*" test
|
||||
elif [ $GROUP == "RECURSION" ]; then
|
||||
mvn -e -q -Dgroups="org.antlr.v4.test.runtime.category.LeftRecursionTests" -Dtest="swift.*" test
|
||||
else
|
||||
mvn -e -q -Dtest=swift.* test
|
||||
fi
|
||||
rc=$?
|
||||
cat target/surefire-reports/*.dumpstream || true
|
||||
fi
|
||||
exit $rc
|
|
@ -0,0 +1,34 @@
|
|||
name: MacOSX
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ master ]
|
||||
pull_request:
|
||||
branches: [ master ]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: [self-hosted, macOS, x64]
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
# TARGET: [swift, cpp, dotnet] disabling dotnet which is unstable on M1
|
||||
TARGET: [swift, cpp]
|
||||
GROUP: [LEXER, PARSER, RECURSION]
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up JDK 1.8
|
||||
uses: actions/setup-java@v1
|
||||
with:
|
||||
java-version: 1.8
|
||||
- name: Set up Maven
|
||||
uses: stCarolas/setup-maven@v4
|
||||
with:
|
||||
maven-version: 3.5.4
|
||||
- name: Build tool with Maven
|
||||
run: mvn install -DskipTests=true -Dmaven.javadoc.skip=true -B -V
|
||||
- name: Test with Maven
|
||||
run: arch -x86_64 .github/scripts/run-tests-${{ matrix.TARGET }}.sh
|
||||
env:
|
||||
TARGET: ${{ matrix.TARGET }}
|
||||
GROUP: ${{ matrix.GROUP }}
|
|
@ -100,3 +100,4 @@ javac-services.0.log.lck
|
|||
|
||||
# Don't ignore python tests
|
||||
!runtime/Python3/test/
|
||||
Antlr4.sln
|
||||
|
|
186
.travis.yml
186
.travis.yml
|
@ -12,105 +12,12 @@ cache:
|
|||
- $HOME/Library/Caches/Homebrew
|
||||
|
||||
stages:
|
||||
- smoke-test
|
||||
- main-test
|
||||
# - smoke-test
|
||||
# - main-test
|
||||
- extended-test
|
||||
|
||||
matrix:
|
||||
include:
|
||||
- os: linux
|
||||
dist: focal
|
||||
compiler: clang
|
||||
jdk: openjdk11
|
||||
env:
|
||||
- TARGET=cpp
|
||||
- CXX=g++-10
|
||||
- GROUP=LEXER
|
||||
stage: main-test
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- sourceline: 'deb http://apt.llvm.org/focal/ llvm-toolchain-focal-10 main'
|
||||
packages:
|
||||
- g++-10
|
||||
- uuid-dev
|
||||
- clang-10
|
||||
- os: linux
|
||||
dist: focal
|
||||
compiler: clang
|
||||
jdk: openjdk11
|
||||
env:
|
||||
- TARGET=cpp
|
||||
- CXX=g++-10
|
||||
- GROUP=PARSER
|
||||
stage: main-test
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- sourceline: 'deb http://apt.llvm.org/focal/ llvm-toolchain-focal-10 main'
|
||||
packages:
|
||||
- g++-10
|
||||
- uuid-dev
|
||||
- clang-10
|
||||
- os: linux
|
||||
dist: focal
|
||||
compiler: clang
|
||||
jdk: openjdk11
|
||||
env:
|
||||
- TARGET=cpp
|
||||
- CXX=g++-10
|
||||
- GROUP=RECURSION
|
||||
stage: main-test
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- sourceline: 'deb http://apt.llvm.org/focal/ llvm-toolchain-focal-10 main'
|
||||
packages:
|
||||
- g++-10
|
||||
- uuid-dev
|
||||
- clang-10
|
||||
- os: osx
|
||||
compiler: clang
|
||||
osx_image: xcode10.2
|
||||
env:
|
||||
- TARGET=cpp
|
||||
- GROUP=LEXER
|
||||
stage: extended-test
|
||||
- os: osx
|
||||
compiler: clang
|
||||
osx_image: xcode10.2
|
||||
env:
|
||||
- TARGET=cpp
|
||||
- GROUP=PARSER
|
||||
stage: extended-test
|
||||
- os: osx
|
||||
compiler: clang
|
||||
osx_image: xcode10.2
|
||||
env:
|
||||
- TARGET=cpp
|
||||
- GROUP=RECURSION
|
||||
stage: extended-test
|
||||
- os: osx
|
||||
compiler: clang
|
||||
osx_image: xcode10.2
|
||||
env:
|
||||
- TARGET=swift
|
||||
- GROUP=LEXER
|
||||
stage: main-test
|
||||
- os: osx
|
||||
compiler: clang
|
||||
osx_image: xcode10.2
|
||||
env:
|
||||
- TARGET=swift
|
||||
- GROUP=PARSER
|
||||
stage: main-test
|
||||
- os: osx
|
||||
compiler: clang
|
||||
osx_image: xcode10.2
|
||||
env:
|
||||
- TARGET=swift
|
||||
- GROUP=RECURSION
|
||||
stage: main-test
|
||||
- os: linux
|
||||
dist: xenial
|
||||
compiler: clang
|
||||
|
@ -118,95 +25,6 @@ matrix:
|
|||
- TARGET=swift
|
||||
- GROUP=ALL
|
||||
stage: extended-test
|
||||
- os: osx
|
||||
osx_image: xcode10.2
|
||||
env:
|
||||
- TARGET=dotnet
|
||||
- GROUP=LEXER
|
||||
stage: extended-test
|
||||
- os: osx
|
||||
osx_image: xcode10.2
|
||||
env:
|
||||
- TARGET=dotnet
|
||||
- GROUP=PARSER
|
||||
stage: extended-test
|
||||
- os: osx
|
||||
osx_image: xcode10.2
|
||||
env:
|
||||
- TARGET=dotnet
|
||||
- GROUP=RECURSION
|
||||
stage: extended-test
|
||||
- os: linux
|
||||
dist: trusty
|
||||
jdk: openjdk7
|
||||
env: TARGET=java
|
||||
stage: extended-test
|
||||
- os: linux
|
||||
jdk: openjdk8
|
||||
env: TARGET=java
|
||||
stage: smoke-test
|
||||
- os: linux
|
||||
jdk: openjdk8
|
||||
env:
|
||||
- TARGET=dotnet
|
||||
- GROUP=MAIN
|
||||
stage: main-test
|
||||
- os: linux
|
||||
jdk: openjdk8
|
||||
env: TARGET=dart
|
||||
stage: main-test
|
||||
- os: linux
|
||||
language: php
|
||||
php:
|
||||
- 7.2
|
||||
jdk: openjdk8
|
||||
env: TARGET=php
|
||||
stage: main-test
|
||||
- os: linux
|
||||
jdk: openjdk8
|
||||
env:
|
||||
- TARGET=dotnet
|
||||
- GROUP=LEXER
|
||||
stage: extended-test
|
||||
- os: linux
|
||||
jdk: openjdk8
|
||||
env:
|
||||
- TARGET=dotnet
|
||||
- GROUP=PARSER
|
||||
stage: extended-test
|
||||
- os: linux
|
||||
jdk: openjdk8
|
||||
env:
|
||||
- TARGET=dotnet
|
||||
- GROUP=RECURSION
|
||||
stage: extended-test
|
||||
- os: linux
|
||||
jdk: openjdk8
|
||||
env: TARGET=python2
|
||||
stage: main-test
|
||||
- os: linux
|
||||
jdk: openjdk8
|
||||
env: TARGET=python3
|
||||
addons:
|
||||
apt:
|
||||
sources:
|
||||
- deadsnakes # source required so it finds the package definition below
|
||||
packages:
|
||||
- python3.7
|
||||
stage: main-test
|
||||
- os: linux
|
||||
dist: trusty
|
||||
jdk: openjdk8
|
||||
env: TARGET=javascript
|
||||
stage: main-test
|
||||
before_install:
|
||||
- nvm install 14 # otherwise it runs by default on node 8
|
||||
- f="./.travis/before-install-linux-javascript.sh"; ! [ -x "$f" ] || "$f"
|
||||
- os: linux
|
||||
dist: trusty
|
||||
jdk: openjdk8
|
||||
env: TARGET=go
|
||||
stage: main-test
|
||||
|
||||
before_install:
|
||||
- f="./.travis/before-install-$TRAVIS_OS_NAME-$TARGET.sh"; ! [ -x "$f" ] || "$f"
|
||||
|
|
|
@ -7,7 +7,7 @@ export PATH=$PATH:/Users/travis/.dotnet
|
|||
# we need to build the runtime before test run, since we used "--no-dependencies"
|
||||
# when we call dotnet cli for restore and build, in order to speed up
|
||||
|
||||
dotnet build -c Release -f netstandard2.0 ../runtime/CSharp/Antlr4.csproj
|
||||
dotnet build -c Release -f netstandard2.0 ../runtime/CSharp/src/Antlr4.csproj
|
||||
|
||||
# call test
|
||||
|
||||
|
|
16
README.md
16
README.md
|
@ -1,6 +1,14 @@
|
|||
# ANTLR v4
|
||||
|
||||
[![Build Travis-CI Status](https://travis-ci.org/antlr/antlr4.svg?branch=master)](https://travis-ci.org/antlr/antlr4) [![Build AppVeyor Status](https://ci.appveyor.com/api/projects/status/5acpbx1pg7bhgh8v/branch/master?svg=true)](https://ci.appveyor.com/project/parrt/antlr4) [![Java 7+](https://img.shields.io/badge/java-7+-4c7e9f.svg)](http://java.oracle.com) [![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://raw.githubusercontent.com/antlr/antlr4/master/LICENSE.txt)
|
||||
[![Java 7+](https://img.shields.io/badge/java-7+-4c7e9f.svg)](http://java.oracle.com)
|
||||
[![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://raw.githubusercontent.com/antlr/antlr4/master/LICENSE.txt)
|
||||
|
||||
**Build status**
|
||||
|
||||
[![Github CI Build Status (MacOSX)](https://img.shields.io/github/workflow/status/antlr/antlr4/MacOSX?label=MacOSX)](https://github.com/antlr/antlr4/actions)
|
||||
[![AppVeyor CI Build Status (Windows)](https://img.shields.io/appveyor/build/parrt/antlr4?label=Windows)](https://ci.appveyor.com/project/parrt/antlr4)
|
||||
[![Circle CI Build Status (Linux)](https://img.shields.io/circleci/build/gh/antlr/antlr4/master?label=Linux)](https://app.circleci.com/pipelines/github/antlr/antlr4)
|
||||
[![Travis-CI Build Status (Swift-Linux)](https://img.shields.io/travis/antlr/antlr4.svg?label=Linux-Swift&branch=master)](https://travis-ci.com/github/antlr/antlr4)
|
||||
|
||||
**ANTLR** (ANother Tool for Language Recognition) is a powerful parser generator for reading, processing, executing, or translating structured text or binary files. It's widely used to build languages, tools, and frameworks. From a grammar, ANTLR generates a parser that can build parse trees and also generates a listener interface (or visitor) that makes it easy to respond to the recognition of phrases of interest.
|
||||
|
||||
|
@ -13,8 +21,8 @@
|
|||
* [Terence Parr](http://www.cs.usfca.edu/~parrt/), parrt@cs.usfca.edu
|
||||
ANTLR project lead and supreme dictator for life
|
||||
[University of San Francisco](http://www.usfca.edu/)
|
||||
* [Sam Harwell](http://tunnelvisionlabs.com/) (Tool co-author, Java and C# target)
|
||||
* Eric Vergnaud (Javascript, Python2, Python3 targets and significant work on C# target)
|
||||
* [Sam Harwell](http://tunnelvisionlabs.com/) (Tool co-author, Java and original C# target)
|
||||
* [Eric Vergnaud](https://github.com/ericvergnaud) (Javascript, Python2, Python3 targets and maintenance of C# target)
|
||||
* [Peter Boyer](https://github.com/pboyer) (Go target)
|
||||
* [Mike Lischke](http://www.soft-gems.net/) (C++ completed target)
|
||||
* Dan McLaughlin (C++ initial target)
|
||||
|
@ -32,7 +40,7 @@ ANTLR project lead and supreme dictator for life
|
|||
* [Official site](http://www.antlr.org/)
|
||||
* [Documentation](https://github.com/antlr/antlr4/blob/master/doc/index.md)
|
||||
* [FAQ](https://github.com/antlr/antlr4/blob/master/doc/faq/index.md)
|
||||
* [ANTLR code generation targets](https://github.com/antlr/antlr4/blob/master/doc/targets.md)<br>(Currently: Java, C#, Python2|3, JavaScript, Go, C++, Swift)
|
||||
* [ANTLR code generation targets](https://github.com/antlr/antlr4/blob/master/doc/targets.md)<br>(Currently: Java, C#, Python2|3, JavaScript, Go, C++, Swift, Dart, PHP)
|
||||
* [Java API](http://www.antlr.org/api/Java/index.html)
|
||||
* [ANTLR v3](http://www.antlr3.org/)
|
||||
* [v3 to v4 Migration, differences](https://github.com/antlr/antlr4/blob/master/doc/faq/general.md)
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<parent>
|
||||
<groupId>org.antlr</groupId>
|
||||
<artifactId>antlr4-master</artifactId>
|
||||
<version>4.9.1</version>
|
||||
<version>4.9.3-SNAPSHOT</version>
|
||||
</parent>
|
||||
<artifactId>antlr4-maven-plugin</artifactId>
|
||||
<packaging>maven-plugin</packaging>
|
||||
|
|
|
@ -55,7 +55,7 @@ import java.util.Set;
|
|||
name = "antlr4",
|
||||
defaultPhase = LifecyclePhase.GENERATE_SOURCES,
|
||||
requiresDependencyResolution = ResolutionScope.COMPILE,
|
||||
requiresProject = true)
|
||||
requiresProject = true, threadSafe = true)
|
||||
public class Antlr4Mojo extends AbstractMojo {
|
||||
|
||||
// First, let's deal with the options that the ANTLR tool itself
|
||||
|
|
22
appveyor.yml
22
appveyor.yml
|
@ -1,22 +0,0 @@
|
|||
version: '4.9.1-SNAPSHOT+AppVeyor.{build}'
|
||||
cache:
|
||||
- '%USERPROFILE%\.m2'
|
||||
- '%USERPROFILE%\.nuget\packages -> **\project.json'
|
||||
image: Visual Studio 2019
|
||||
build: off
|
||||
install:
|
||||
- git clone https://github.com/antlr/antlr-php-runtime.git
|
||||
- mv antlr-php-runtime runtime/PHP
|
||||
- cinst -y php --params "/InstallDir:C:\tools\php"
|
||||
- cinst -y composer
|
||||
- cinst -y dart-sdk --version=2.8.4
|
||||
build_script:
|
||||
- mvn -DskipTests install --batch-mode
|
||||
- dotnet build runtime/CSharp/Antlr4.csproj -c Release
|
||||
after_build:
|
||||
- dotnet pack runtime/CSharp/Antlr4.csproj -c Release
|
||||
test_script:
|
||||
- mvn install -Dantlr-php-php="C:\tools\php\php.exe" -Dantlr-dart-dart="C:\tools\dart-sdk\bin\dart.exe" -Dantlr-dart-pub="C:\tools\dart-sdk\bin\pub.bat" -Dantlr-dart-dart2native="C:\tools\dart-sdk\bin\dart2native.bat" -Dantlr-python2-python="C:\Python27\python.exe" -Dantlr-python3-python="C:\Python35\python.exe" --batch-mode
|
||||
artifacts:
|
||||
- path: 'runtime\**\*.nupkg'
|
||||
name: NuGet
|
|
@ -269,6 +269,7 @@ YYYY/MM/DD, github id, Full name, email
|
|||
2020/06/02, cohomology, Kilian Kilger, kkilger AT gmail.com
|
||||
2020/06/04, IohannRabeson, Iohann Rabeson, iotaka6@gmail.com
|
||||
2020/06/04, sigmasoldi3r, Pablo Blanco, pablobc.1995@gmail.com
|
||||
2020/06/15, mattpaletta, Matthew Paletta, mattpaletta@gmail.com
|
||||
2020/07/01, sha-N, Shan M Mathews, admin@bluestarqatar.com
|
||||
2020/08/22, stevenjohnstone, Steven Johnstone, steven.james.johnstone@gmail.com
|
||||
2020/09/06, ArthurSonzogni, Sonzogni Arthur, arthursonzogni@gmail.com
|
||||
|
@ -284,3 +285,12 @@ YYYY/MM/DD, github id, Full name, email
|
|||
2020/11/26, mr-c, Michael R. Crusoe, 1330696+mr-c@users.noreply.github.com
|
||||
2020/12/01, maxence-lefebvre, Maxence Lefebvre, maxence-lefebvre@users.noreply.github.com
|
||||
2020/12/03, electrum, David Phillips, david@acz.org
|
||||
2021/01/25, l215884529, Qiheng Liu, 13607681+l215884529@users.noreply.github.com
|
||||
2021/02/02, tsotnikov, Taras Sotnikov, taras.sotnikov@gmail.com
|
||||
2021/02/10, jirislaby, Jiri Slaby, jirislaby@gmail.com
|
||||
2021/02/21, namasikanam, Xingyu Xie, namasikanam@gmail.com
|
||||
2021/02/27, khmarbaise, Karl Heinz Marbaise, github@soebes.com
|
||||
2021/03/02, hackeris
|
||||
2021/03/03, xTachyon, Damian Andrei, xTachyon@users.noreply.github.com
|
||||
2021/04/07, b1f6c1c4, Jinzheng Tu, b1f6c1c4@gmail.com
|
||||
2021/04/24, bigerl, Alexander Bigerl, alexander [äät] bigerl [pkt] eu
|
||||
|
|
|
@ -57,11 +57,9 @@ $ mvn install -DskipTests=true # make sure all artifacts are visible on this m
|
|||
Now, make sure C# runtime is built and installed locally.
|
||||
|
||||
```bash
|
||||
cd ~/antlr/code/antlr4/runtime/CSharp/runtime/CSharp
|
||||
# kill previous ones manually as "xbuild /t:Clean" didn't seem to do it
|
||||
find . -name '*.dll' -exec rm {} \;
|
||||
# build
|
||||
xbuild /p:Configuration=Release Antlr4.Runtime/Antlr4.Runtime.mono.csproj
|
||||
cd ~/antlr/code/antlr4/runtime/CSharp/src
|
||||
rm -rf `find . -name '{obj,bin}'`
|
||||
dotnet build -c Release runtime/CSharp/src/Antlr4.csproj
|
||||
```
|
||||
|
||||
C++ test rig automatically builds C++ runtime during tests. Others don't need a prebuilt lib.
|
||||
|
|
|
@ -25,6 +25,22 @@ Checking connectivity... done.
|
|||
Checking out files: 100% (1427/1427), done.
|
||||
```
|
||||
|
||||
# Check your environment
|
||||
|
||||
If you are starting from a clean, minimum Ubuntu OS, check your environment.
|
||||
|
||||
|
||||
```bash
|
||||
$ sudo apt-get update
|
||||
$ # Get Java
|
||||
$ java > /dev/null 2>&1
|
||||
$ if [[ "$?" != "0" ]]; then sudo apt install -y openjdk-11-jre-headless; fi
|
||||
$ # Get Mvn
|
||||
$ mvn > /dev/null 2>&1
|
||||
$ if [[ "$?" != "0" ]]; then sudo apt install -y maven; fi
|
||||
|
||||
```
|
||||
|
||||
# Compile
|
||||
|
||||
```bash
|
||||
|
|
|
@ -36,7 +36,7 @@ using Antlr4.Runtime.Tree;
|
|||
|
||||
public void MyParseMethod() {
|
||||
String input = "your text to parse here";
|
||||
ICharStream stream = CharStreams.fromstring(input);
|
||||
ICharStream stream = CharStreams.fromString(input);
|
||||
ITokenSource lexer = new MyGrammarLexer(stream);
|
||||
ITokenStream tokens = new CommonTokenStream(lexer);
|
||||
MyGrammarParser parser = new MyGrammarParser(tokens);
|
||||
|
|
|
@ -91,7 +91,7 @@ Now a fully functioning script might look like the following:
|
|||
|
||||
```javascript
|
||||
import antlr4 from 'antlr4';
|
||||
import MyGrammarLexer from './MyGrammarLexer.js');
|
||||
import MyGrammarLexer from './MyGrammarLexer.js';
|
||||
import MyGrammarParser from './MyGrammarParser.js';
|
||||
import MyGrammarListener from './MyGrammarListener.js';
|
||||
|
||||
|
|
|
@ -79,7 +79,7 @@ These more or less correspond to `isJavaIdentifierPart` and `isJavaIdentifierSta
|
|||
|
||||
## Literals
|
||||
|
||||
ANTLR does not distinguish between character and string literals as most languages do. All literal strings one or more characters in length are enclosed in single quotes such as `’;’`, `’if’`, `’>=’`, and `’\’` (refers to the one-character string containing the single quote character). Literals never contain regular expressions.
|
||||
ANTLR does not distinguish between character and string literals as most languages do. All literal strings one or more characters in length are enclosed in single quotes such as `’;’`, `’if’`, `’>=’`, and `’\’’` (refers to the one-character string containing the single quote character). Literals never contain regular expressions.
|
||||
|
||||
Literals can contain Unicode escape sequences of the form `’\uXXXX’` (for Unicode code points up to `’U+FFFF’`) or `’\u{XXXXXX}’` (for all Unicode code points), where `’XXXX’` is the hexadecimal Unicode code point value.
|
||||
|
||||
|
|
|
@ -24,11 +24,11 @@ $ git push upstream 4.9-rc1
|
|||
|
||||
## Copy PHP runtime over
|
||||
|
||||
Bump version to 4.9 in `runtime/PHP/src/RuntimeMetaData.php` in separate repository and commit plus push.
|
||||
Bump version to 4.9 in `~/antlr/code/antlr-php-runtime/src/RuntimeMetaData.php` in separate repository and commit plus push.
|
||||
|
||||
```
|
||||
cd ~/antlr/code/antlr-php-runtime
|
||||
... vi src/RuntimeMetaData.php ...
|
||||
cd ~/antlr/code/antlr-php-runtime/src
|
||||
... vi RuntimeMetaData.php ...
|
||||
git commit -a -m "Update PHP Runtime to latest version"
|
||||
```
|
||||
|
||||
|
@ -36,6 +36,7 @@ them back over in the ANTLR repo:
|
|||
|
||||
```
|
||||
cd runtime
|
||||
rm -rf PHP
|
||||
mkdir PHP
|
||||
cp ~/antlr/code/antlr-php-runtime/LICENSE PHP
|
||||
cp ~/antlr/code/antlr-php-runtime/README.md PHP
|
||||
|
@ -54,10 +55,11 @@ Edit the repository looking for 4.5 or whatever and update it. Bump version in t
|
|||
* runtime/Python2/src/antlr4/Recognizer.py
|
||||
* runtime/Python3/setup.py
|
||||
* runtime/Python3/src/antlr4/Recognizer.py
|
||||
* runtime/CSharp/Antlr4.csproj
|
||||
* runtime/CSharp/src/Antlr4.csproj
|
||||
* runtime/PHP/src/RuntimeMetaData.php
|
||||
* runtime/JavaScript/package.json
|
||||
* runtime/JavaScript/src/antlr4/Recognizer.js
|
||||
* runtime/JavaScript/package-lock.json
|
||||
* runtime/Cpp/VERSION
|
||||
* runtime/Cpp/runtime/src/RuntimeMetaData.cpp
|
||||
* runtime/Cpp/cmake/ExternalAntlr4Cpp.cmake
|
||||
|
@ -66,6 +68,11 @@ Edit the repository looking for 4.5 or whatever and update it. Bump version in t
|
|||
* runtime/Swift/Antlr4/org/antlr/v4/runtime/RuntimeMetaData.swift
|
||||
* runtime/Dart/lib/src/runtime_meta_data.dart
|
||||
* runtime/Dart/pubspec.yaml
|
||||
* runtime/Swift/Tests/Antlr4Tests/RuntimeMetaDataTests.swift
|
||||
* runtime/Swift/Sources/Antlr4/RuntimeMetaData.swift
|
||||
* runtime/CSharp/src/Tree/Xpath/XPathLexer.cs
|
||||
* runtime/CSharp/src/README.md
|
||||
* runtime/CSharp/src/Properties/AssemblyInfo.cs
|
||||
* tool/src/org/antlr/v4/codegen/target/GoTarget.java
|
||||
* tool/src/org/antlr/v4/codegen/target/CppTarget.java
|
||||
* tool/src/org/antlr/v4/codegen/target/CSharpTarget.java
|
||||
|
@ -73,6 +80,7 @@ Edit the repository looking for 4.5 or whatever and update it. Bump version in t
|
|||
* tool/src/org/antlr/v4/codegen/target/Python2Target.java
|
||||
* tool/src/org/antlr/v4/codegen/target/Python3Target.java
|
||||
* tool/src/org/antlr/v4/codegen/target/SwiftTarget.java
|
||||
* tool/src/org/antlr/v4/codegen/target/PHPTarget.java
|
||||
* tool/src/org/antlr/v4/codegen/Target.java
|
||||
* tool/resources/org/antlr/v4/tool/templates/codegen/Swift/Swift.stg
|
||||
|
||||
|
@ -80,11 +88,12 @@ Here is a simple script to display any line from the critical files with, say, `
|
|||
|
||||
```bash
|
||||
mvn clean
|
||||
rm -rf runtime/CSharp/bin
|
||||
rm -rf runtime/CSharp/obj
|
||||
rm -rf runtime/CSharp/src/bin
|
||||
rm -rf runtime/CSharp/src/obj
|
||||
rm -rf runtime/Cpp/runtime/build
|
||||
rm -rf runtime/gen
|
||||
rm -rf runtime/JavaScript/dist
|
||||
find tool runtime -type f -exec grep -l '4\.9' {} \;
|
||||
find runtime runtime -type f -exec grep -l '4\.9' {} \;
|
||||
```
|
||||
|
||||
Commit to repository.
|
||||
|
@ -293,6 +302,7 @@ cd runtime/JavaScript
|
|||
```bash
|
||||
cd runtime/JavaScript
|
||||
npm update
|
||||
npm install
|
||||
npm run build
|
||||
npm login
|
||||
npm publish # don't put antlr4 on there or it will try to push the old version for some reason
|
||||
|
@ -321,10 +331,10 @@ Of course you need Mono and `nuget` to be installed. On mac:
|
|||
|
||||
From @kvanTTT: Install `dotnet` on any platform (see https://dotnet.microsoft.com/download) and run the following command on any OS (Win, Linux, macOS):
|
||||
|
||||
* building: `dotnet build runtime/CSharp/Antlr4.csproj -c Release`
|
||||
Output `.dll` will be in `runtime/CSharp/bin/Release/netstandard2.0` or in `runtime/CSharp/bin/Release/netstandard2.1`
|
||||
* packing: `dotnet pack runtime/CSharp/Antlr4.csproj -c Release`
|
||||
Output `.nupkg` will be in `runtime/CSharp/bin/Release/Antlr4.Runtime.Standard.4.9.0.nupkg`
|
||||
* building: `dotnet build runtime/CSharp/src/Antlr4.csproj -c Release`
|
||||
Output `.dll` will be in `runtime/CSharp/src/bin/Release/netstandard2.0` or in `runtime/CSharp/src/bin/Release/netstandard2.1`
|
||||
* packing: `dotnet pack runtime/CSharp/src/Antlr4.csproj -c Release`
|
||||
Output `.nupkg` will be in `runtime/CSharp/src/bin/Release/Antlr4.Runtime.Standard.4.9.1.nupkg`
|
||||
|
||||
Alternatively, you can install Visual Studio 2017 and make sure to check boxes with .NET Core SDK.
|
||||
|
||||
|
@ -460,6 +470,8 @@ popd
|
|||
|
||||
### Dart
|
||||
|
||||
Install Dart SDK from https://dart.dev/get-dart
|
||||
|
||||
Push to pub.dev
|
||||
|
||||
```bash
|
||||
|
@ -473,7 +485,13 @@ Otherwise enter `N` to ignore the warning.
|
|||
|
||||
## Update javadoc for runtime and tool
|
||||
|
||||
First, gen javadoc:
|
||||
Above build should make latest in
|
||||
|
||||
```
|
||||
~/.m2/repository/org/antlr/antlr4-runtime/4.9/antlr4-runtime-4.9
|
||||
```
|
||||
|
||||
but you can regen (watch pom version!):
|
||||
|
||||
```bash
|
||||
$ cd antlr4
|
||||
|
|
|
@ -0,0 +1,5 @@
|
|||
|
||||
dsdfewrfff
|
||||
function d(){
|
||||
dsflsfks;lfksldf
|
||||
}
|
2
pom.xml
2
pom.xml
|
@ -13,7 +13,7 @@
|
|||
</parent>
|
||||
<groupId>org.antlr</groupId>
|
||||
<artifactId>antlr4-master</artifactId>
|
||||
<version>4.9.1</version>
|
||||
<version>4.9.3-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
|
||||
<name>ANTLR 4</name>
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<parent>
|
||||
<groupId>org.antlr</groupId>
|
||||
<artifactId>antlr4-master</artifactId>
|
||||
<version>4.9.1</version>
|
||||
<version>4.9.3-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
<artifactId>antlr4-runtime-test-annotations</artifactId>
|
||||
|
|
|
@ -10,7 +10,7 @@
|
|||
<parent>
|
||||
<groupId>org.antlr</groupId>
|
||||
<artifactId>antlr4-master</artifactId>
|
||||
<version>4.9.1</version>
|
||||
<version>4.9.3-SNAPSHOT</version>
|
||||
</parent>
|
||||
<artifactId>antlr4-runtime-testsuite</artifactId>
|
||||
<name>ANTLR 4 Runtime Tests (2nd generation)</name>
|
||||
|
|
|
@ -9,7 +9,7 @@
|
|||
<parent>
|
||||
<groupId>org.antlr</groupId>
|
||||
<artifactId>antlr4-master</artifactId>
|
||||
<version>4.9.1</version>
|
||||
<version>4.9.3-SNAPSHOT</version>
|
||||
<relativePath>../../pom.xml</relativePath>
|
||||
</parent>
|
||||
<artifactId>antlr4-runtime-test-annotation-processors</artifactId>
|
||||
|
|
|
@ -6,10 +6,11 @@
|
|||
|
||||
package org.antlr.v4.test.runtime;
|
||||
|
||||
import com.sun.tools.javac.main.JavaCompiler;
|
||||
import com.sun.tools.javac.model.JavacElements;
|
||||
import com.sun.tools.javac.processing.JavacProcessingEnvironment;
|
||||
import com.sun.tools.javac.tree.JCTree;
|
||||
import com.sun.tools.javac.tree.TreeMaker;
|
||||
import com.sun.tools.javac.util.Context;
|
||||
import com.sun.tools.javac.util.List;
|
||||
|
||||
import javax.annotation.processing.AbstractProcessor;
|
||||
|
@ -20,6 +21,7 @@ import javax.annotation.processing.SupportedSourceVersion;
|
|||
import javax.lang.model.SourceVersion;
|
||||
import javax.lang.model.element.Element;
|
||||
import javax.lang.model.element.TypeElement;
|
||||
import java.lang.reflect.Field;
|
||||
import java.util.Set;
|
||||
|
||||
/**
|
||||
|
@ -38,6 +40,7 @@ import java.util.Set;
|
|||
@SupportedAnnotationTypes({"org.antlr.v4.test.runtime.CommentHasStringValue"})
|
||||
@SupportedSourceVersion(SourceVersion.RELEASE_7)
|
||||
public class CommentHasStringValueProcessor extends AbstractProcessor {
|
||||
|
||||
protected JavacElements utilities;
|
||||
protected TreeMaker treeMaker;
|
||||
|
||||
|
@ -46,9 +49,21 @@ public class CommentHasStringValueProcessor extends AbstractProcessor {
|
|||
super.init(processingEnv);
|
||||
// Messager messager = processingEnv.getMessager();
|
||||
// messager.printMessage(Diagnostic.Kind.NOTE, "WOW INIT--------------------");
|
||||
JavacProcessingEnvironment javacProcessingEnv = (JavacProcessingEnvironment) processingEnv;
|
||||
utilities = javacProcessingEnv.getElementUtils();
|
||||
treeMaker = TreeMaker.instance(javacProcessingEnv.getContext());
|
||||
utilities = (JavacElements)processingEnv.getElementUtils();
|
||||
treeMaker = TreeMaker.instance(extractContext(utilities));
|
||||
}
|
||||
|
||||
private static Context extractContext(JavacElements utilities) {
|
||||
try {
|
||||
Field compilerField = JavacElements.class.getDeclaredField("javaCompiler");
|
||||
compilerField.setAccessible(true);
|
||||
JavaCompiler compiler = (JavaCompiler)compilerField.get(utilities);
|
||||
Field contextField = JavaCompiler.class.getDeclaredField("context");
|
||||
contextField.setAccessible(true);
|
||||
return (Context)contextField.get(compiler);
|
||||
} catch (NoSuchFieldException | IllegalAccessException e) {
|
||||
throw new IllegalStateException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -66,8 +81,7 @@ public class CommentHasStringValueProcessor extends AbstractProcessor {
|
|||
else if ( elementTree instanceof JCTree.JCMethodDecl ) {
|
||||
JCTree.JCStatement[] statements = new JCTree.JCStatement[1];
|
||||
statements[0] = treeMaker.Return(literal);
|
||||
JCTree.JCBlock body = treeMaker.Block(0, List.from(statements));
|
||||
((JCTree.JCMethodDecl)elementTree).body = body;
|
||||
((JCTree.JCMethodDecl)elementTree).body = treeMaker.Block(0, List.from(statements));
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
|
|
@ -11,9 +11,7 @@ import org.antlr.v4.runtime.misc.Pair;
|
|||
import org.antlr.v4.runtime.misc.Utils;
|
||||
import org.antlr.v4.tool.ANTLRMessage;
|
||||
import org.antlr.v4.tool.DefaultToolListener;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.*;
|
||||
import org.junit.rules.TestRule;
|
||||
import org.junit.rules.TestWatcher;
|
||||
import org.junit.runner.Description;
|
||||
|
@ -44,6 +42,7 @@ import static org.junit.Assume.assumeFalse;
|
|||
* @since 4.6.
|
||||
*/
|
||||
public abstract class BaseRuntimeTest {
|
||||
|
||||
public final static String[] Targets = {
|
||||
"Cpp",
|
||||
"CSharp",
|
||||
|
@ -56,20 +55,45 @@ public abstract class BaseRuntimeTest {
|
|||
"Swift"
|
||||
};
|
||||
|
||||
static {
|
||||
// Add heartbeat thread to gen minimal output for travis, appveyor to
|
||||
// avoid timeout.
|
||||
@BeforeClass
|
||||
public static void startHeartbeatToAvoidTimeout() {
|
||||
if (isTravisCI() || isAppVeyorCI())
|
||||
startHeartbeat();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void stopHeartbeat() {
|
||||
heartbeat = false;
|
||||
}
|
||||
|
||||
private static boolean isAppVeyorCI() {
|
||||
// see https://www.appveyor.com/docs/environment-variables/
|
||||
String s = System.getenv("APPVEYOR");
|
||||
return s!=null && "true".equals(s.toLowerCase());
|
||||
}
|
||||
|
||||
private static boolean isTravisCI() {
|
||||
// see https://docs.travis-ci.com/user/environment-variables/#default-environment-variables
|
||||
String s = System.getenv("TRAVIS");
|
||||
return s!=null && "true".equals(s.toLowerCase());
|
||||
}
|
||||
|
||||
static boolean heartbeat = false;
|
||||
|
||||
private static void startHeartbeat() {
|
||||
// Add heartbeat thread to gen minimal output for travis, appveyor to avoid timeout.
|
||||
Thread t = new Thread("heartbeat") {
|
||||
@Override
|
||||
public void run() {
|
||||
while (true) {
|
||||
System.out.print('.');
|
||||
heartbeat = true;
|
||||
while (heartbeat) {
|
||||
try {
|
||||
Thread.sleep(5000);
|
||||
}
|
||||
catch (Exception e) {
|
||||
//noinspection BusyWait
|
||||
Thread.sleep(10000);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace();
|
||||
}
|
||||
System.out.print('.');
|
||||
}
|
||||
}
|
||||
};
|
||||
|
@ -87,11 +111,6 @@ public abstract class BaseRuntimeTest {
|
|||
this.delegate = delegate;
|
||||
}
|
||||
|
||||
public static void mkdir(String dir) {
|
||||
File f = new File(dir);
|
||||
f.mkdirs();
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
// From http://junit.sourceforge.net/javadoc/org/junit/Assume.html
|
||||
|
@ -118,22 +137,24 @@ public abstract class BaseRuntimeTest {
|
|||
|
||||
@Test
|
||||
public void testOne() throws Exception {
|
||||
// System.out.println(descriptor.getTestName());
|
||||
// System.out.println(delegate.getTmpDir());
|
||||
if (descriptor.ignore(descriptor.getTarget()) ) {
|
||||
System.out.println("Ignore " + descriptor);
|
||||
return;
|
||||
}
|
||||
|
||||
delegate.beforeTest(descriptor);
|
||||
if (descriptor.getTestType().contains("Parser") ) {
|
||||
testParser(descriptor);
|
||||
}
|
||||
else {
|
||||
testLexer(descriptor);
|
||||
}
|
||||
delegate.afterTest(descriptor);
|
||||
}
|
||||
|
||||
public void testParser(RuntimeTestDescriptor descriptor) throws Exception {
|
||||
mkdir(delegate.getTmpDir());
|
||||
RuntimeTestUtils.mkdir(delegate.getTempParserDirPath());
|
||||
|
||||
Pair<String, String> pair = descriptor.getGrammar();
|
||||
|
||||
|
@ -150,7 +171,7 @@ public abstract class BaseRuntimeTest {
|
|||
g.registerRenderer(String.class, new StringRenderer());
|
||||
g.importTemplates(targetTemplates);
|
||||
ST grammarST = new ST(g, spair.b);
|
||||
writeFile(delegate.getTmpDir(), spair.a+".g4", grammarST.render());
|
||||
writeFile(delegate.getTempParserDirPath(), spair.a+".g4", grammarST.render());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -175,7 +196,7 @@ public abstract class BaseRuntimeTest {
|
|||
}
|
||||
|
||||
public void testLexer(RuntimeTestDescriptor descriptor) throws Exception {
|
||||
mkdir(delegate.getTmpDir());
|
||||
RuntimeTestUtils.mkdir(delegate.getTempParserDirPath());
|
||||
|
||||
Pair<String, String> pair = descriptor.getGrammar();
|
||||
|
||||
|
@ -192,7 +213,7 @@ public abstract class BaseRuntimeTest {
|
|||
g.registerRenderer(String.class, new StringRenderer());
|
||||
g.importTemplates(targetTemplates);
|
||||
ST grammarST = new ST(g, spair.b);
|
||||
writeFile(delegate.getTmpDir(), spair.a+".g4", grammarST.render());
|
||||
writeFile(delegate.getTempParserDirPath(), spair.a+".g4", grammarST.render());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -216,7 +237,7 @@ public abstract class BaseRuntimeTest {
|
|||
boolean defaultListener,
|
||||
String... extraOptions)
|
||||
{
|
||||
mkdir(workdir);
|
||||
RuntimeTestUtils.mkdir(workdir);
|
||||
writeFile(workdir, grammarFileName, grammarStr);
|
||||
return antlrOnString(workdir, targetName, grammarFileName, defaultListener, extraOptions);
|
||||
}
|
||||
|
|
|
@ -0,0 +1,238 @@
|
|||
package org.antlr.v4.test.runtime;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.automata.LexerATNFactory;
|
||||
import org.antlr.v4.automata.ParserATNFactory;
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.runtime.atn.ATNDeserializer;
|
||||
import org.antlr.v4.runtime.atn.ATNSerializer;
|
||||
import org.antlr.v4.semantics.SemanticPipeline;
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
import org.antlr.v4.tool.LexerGrammar;
|
||||
import org.junit.rules.TestRule;
|
||||
import org.junit.rules.TestWatcher;
|
||||
import org.junit.runner.Description;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.LinkOption;
|
||||
import java.nio.file.Path;
|
||||
import java.util.Locale;
|
||||
import java.util.logging.Logger;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
@SuppressWarnings("ResultOfMethodCallIgnored")
|
||||
public abstract class BaseRuntimeTestSupport implements RuntimeTestSupport {
|
||||
|
||||
// -J-Dorg.antlr.v4.test.BaseTest.level=FINE
|
||||
protected static final Logger logger = Logger.getLogger(BaseRuntimeTestSupport.class.getName());
|
||||
|
||||
public static final String NEW_LINE = System.getProperty("line.separator");
|
||||
public static final String PATH_SEP = System.getProperty("path.separator");
|
||||
|
||||
private File tempTestDir = null;
|
||||
|
||||
/** If error during parser execution, store stderr here; can't return
|
||||
* stdout and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
private String parseErrors;
|
||||
|
||||
/** Errors found while running antlr */
|
||||
private StringBuilder antlrToolErrors;
|
||||
|
||||
@org.junit.Rule
|
||||
public final TestRule testWatcher = new TestWatcher() {
|
||||
|
||||
@Override
|
||||
protected void succeeded(Description description) {
|
||||
testSucceeded(description);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
protected void testSucceeded(Description description) {
|
||||
// remove tmpdir if no error.
|
||||
eraseTempDir();
|
||||
}
|
||||
|
||||
@Override
|
||||
public File getTempParserDir() {
|
||||
return getTempTestDir();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTempParserDirPath() {
|
||||
return getTempParserDir() == null ? null : getTempParserDir().getAbsolutePath();
|
||||
}
|
||||
|
||||
@Override
|
||||
public final File getTempTestDir() {
|
||||
return tempTestDir;
|
||||
}
|
||||
|
||||
@Override
|
||||
public final String getTempDirPath() {
|
||||
return tempTestDir ==null ? null : tempTestDir.getAbsolutePath();
|
||||
}
|
||||
|
||||
|
||||
public void setParseErrors(String errors) {
|
||||
this.parseErrors = errors;
|
||||
}
|
||||
|
||||
public String getParseErrors() {
|
||||
return parseErrors;
|
||||
}
|
||||
|
||||
public String getANTLRToolErrors() {
|
||||
if ( antlrToolErrors.length()==0 ) {
|
||||
return null;
|
||||
}
|
||||
return antlrToolErrors.toString();
|
||||
}
|
||||
|
||||
protected abstract String getPropertyPrefix();
|
||||
|
||||
@Override
|
||||
public void testSetUp() throws Exception {
|
||||
createTempDir();
|
||||
antlrToolErrors = new StringBuilder();
|
||||
}
|
||||
|
||||
private void createTempDir() {
|
||||
// new output dir for each test
|
||||
String propName = getPropertyPrefix() + "-test-dir";
|
||||
String prop = System.getProperty(propName);
|
||||
if(prop!=null && prop.length()>0) {
|
||||
tempTestDir = new File(prop);
|
||||
} else {
|
||||
String dirName = getClass().getSimpleName() + "-" + Thread.currentThread().getName() + "-" + System.currentTimeMillis();
|
||||
tempTestDir = new File(System.getProperty("java.io.tmpdir"), dirName);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testTearDown() throws Exception {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void beforeTest(RuntimeTestDescriptor descriptor) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void afterTest(RuntimeTestDescriptor descriptor) {
|
||||
}
|
||||
|
||||
public void eraseTempDir() {
|
||||
if(shouldEraseTempDir()) {
|
||||
eraseDirectory(getTempTestDir());
|
||||
}
|
||||
}
|
||||
|
||||
protected boolean shouldEraseTempDir() {
|
||||
if(tempTestDir == null)
|
||||
return false;
|
||||
String propName = getPropertyPrefix() + "-erase-test-dir";
|
||||
String prop = System.getProperty(propName);
|
||||
if (prop != null && prop.length() > 0)
|
||||
return Boolean.getBoolean(prop);
|
||||
else
|
||||
return true;
|
||||
}
|
||||
|
||||
public static void eraseDirectory(File dir) {
|
||||
if ( dir.exists() ) {
|
||||
eraseFilesInDir(dir);
|
||||
dir.delete();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
public static void eraseFilesInDir(File dir) {
|
||||
String[] files = dir.list();
|
||||
for(int i = 0; files!=null && i < files.length; i++) {
|
||||
try {
|
||||
eraseFile(dir, files[i]);
|
||||
} catch(IOException e) {
|
||||
logger.info(e.getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private static void eraseFile(File dir, String name) throws IOException {
|
||||
File file = new File(dir,name);
|
||||
if(Files.isSymbolicLink((file.toPath())))
|
||||
Files.delete(file.toPath());
|
||||
else if(file.isDirectory()) {
|
||||
// work around issue where Files.isSymbolicLink returns false on Windows for node/antlr4 linked package
|
||||
if("antlr4".equals(name))
|
||||
; // logger.warning("antlr4 not seen as a symlink");
|
||||
else
|
||||
eraseDirectory(file);
|
||||
} else
|
||||
file.delete();
|
||||
}
|
||||
|
||||
|
||||
private static String detectedOS;
|
||||
|
||||
public static String getOS() {
|
||||
if (detectedOS == null) {
|
||||
String os = System.getProperty("os.name", "generic").toLowerCase(Locale.ENGLISH);
|
||||
if (os.contains("mac") || os.contains("darwin")) {
|
||||
detectedOS = "mac";
|
||||
}
|
||||
else if (os.contains("win")) {
|
||||
detectedOS = "windows";
|
||||
}
|
||||
else if (os.contains("nux")) {
|
||||
detectedOS = "linux";
|
||||
}
|
||||
else {
|
||||
detectedOS = "unknown";
|
||||
}
|
||||
}
|
||||
return detectedOS;
|
||||
}
|
||||
|
||||
|
||||
public static boolean isWindows() {
|
||||
return getOS().equalsIgnoreCase("windows");
|
||||
}
|
||||
|
||||
protected ATN createATN(Grammar g, boolean useSerializer) {
|
||||
if ( g.atn==null ) {
|
||||
semanticProcess(g);
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
|
||||
ParserATNFactory f = g.isLexer() ? new LexerATNFactory((LexerGrammar) g) : new ParserATNFactory(g);
|
||||
|
||||
g.atn = f.createATN();
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
}
|
||||
|
||||
ATN atn = g.atn;
|
||||
if ( useSerializer ) {
|
||||
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
|
||||
return new ATNDeserializer().deserialize(serialized);
|
||||
}
|
||||
|
||||
return atn;
|
||||
}
|
||||
protected void semanticProcess(Grammar g) {
|
||||
if ( g.ast!=null && !g.ast.hasErrors ) {
|
||||
// System.out.println(g.ast.toStringTree());
|
||||
Tool antlr = new Tool();
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
|
||||
for (Grammar imp : g.getImportedGrammars()) {
|
||||
antlr.processNonCombinedGrammar(imp, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
|
@ -0,0 +1,91 @@
|
|||
package org.antlr.v4.test.runtime;
|
||||
|
||||
import org.antlr.v4.runtime.*;
|
||||
import org.antlr.v4.runtime.misc.IntegerList;
|
||||
import org.antlr.v4.runtime.misc.Interval;
|
||||
|
||||
public class MockIntTokenStream implements TokenStream {
|
||||
|
||||
public IntegerList types;
|
||||
int p=0;
|
||||
|
||||
public MockIntTokenStream(IntegerList types) { this.types = types; }
|
||||
|
||||
@Override
|
||||
public void consume() { p++; }
|
||||
|
||||
@Override
|
||||
public int LA(int i) { return LT(i).getType(); }
|
||||
|
||||
@Override
|
||||
public int mark() {
|
||||
return index();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int index() { return p; }
|
||||
|
||||
@Override
|
||||
public void release(int marker) {
|
||||
seek(marker);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void seek(int index) {
|
||||
p = index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return types.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSourceName() {
|
||||
return UNKNOWN_SOURCE_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token LT(int i) {
|
||||
CommonToken t;
|
||||
int rawIndex = p + i - 1;
|
||||
if ( rawIndex>=types.size() ) t = new CommonToken(Token.EOF);
|
||||
else t = new CommonToken(types.get(rawIndex));
|
||||
t.setTokenIndex(rawIndex);
|
||||
return t;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token get(int i) {
|
||||
return new org.antlr.v4.runtime.CommonToken(types.get(i));
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenSource getTokenSource() {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getText() {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getText(Interval interval) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getText(RuleContext ctx) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getText(Token start, Token stop) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
}
|
|
@ -6,6 +6,8 @@
|
|||
|
||||
package org.antlr.v4.test.runtime;
|
||||
|
||||
import java.io.File;
|
||||
|
||||
/** This interface describes functionality needed to execute a runtime test.
|
||||
* Unfortunately the Base*Test.java files are big junk drawers. This is
|
||||
* an attempt to make it more obvious what new target implementers have to
|
||||
|
@ -14,13 +16,22 @@ package org.antlr.v4.test.runtime;
|
|||
* @since 4.6
|
||||
*/
|
||||
public interface RuntimeTestSupport {
|
||||
void testSetUp() throws Exception;
|
||||
void testTearDown() throws Exception;
|
||||
|
||||
// dir containing grammar input and output
|
||||
File getTempParserDir();
|
||||
String getTempParserDirPath();
|
||||
|
||||
// dir containing test input and output
|
||||
File getTempTestDir();
|
||||
String getTempDirPath();
|
||||
void eraseTempDir();
|
||||
|
||||
String getTmpDir();
|
||||
void testSetUp() throws Exception;
|
||||
void testTearDown() throws Exception;
|
||||
|
||||
void beforeTest(RuntimeTestDescriptor descriptor);
|
||||
void afterTest(RuntimeTestDescriptor descriptor);
|
||||
|
||||
String getStdout();
|
||||
String getParseErrors();
|
||||
String getANTLRToolErrors();
|
||||
|
||||
|
@ -39,4 +50,5 @@ public interface RuntimeTestSupport {
|
|||
String startRuleName,
|
||||
String input,
|
||||
boolean showDiagnosticErrors);
|
||||
|
||||
}
|
||||
|
|
|
@ -0,0 +1,89 @@
|
|||
package org.antlr.v4.test.runtime;
|
||||
|
||||
import org.antlr.v4.runtime.*;
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.runtime.atn.LexerATNSimulator;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
import org.antlr.v4.runtime.misc.IntegerList;
|
||||
import org.antlr.v4.tool.LexerGrammar;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
public abstract class RuntimeTestUtils {
|
||||
|
||||
/** Sort a list */
|
||||
public static <T extends Comparable<? super T>> List<T> sort(List<T> data) {
|
||||
List<T> dup = new ArrayList<T>(data);
|
||||
dup.addAll(data);
|
||||
Collections.sort(dup);
|
||||
return dup;
|
||||
}
|
||||
|
||||
/** Return map sorted by key */
|
||||
public static <K extends Comparable<? super K>,V> LinkedHashMap<K,V> sort(Map<K,V> data) {
|
||||
LinkedHashMap<K,V> dup = new LinkedHashMap<K, V>();
|
||||
List<K> keys = new ArrayList<K>(data.keySet());
|
||||
Collections.sort(keys);
|
||||
for (K k : keys) {
|
||||
dup.put(k, data.get(k));
|
||||
}
|
||||
return dup;
|
||||
}
|
||||
|
||||
public static List<String> getTokenTypes(LexerGrammar lg,
|
||||
ATN atn,
|
||||
CharStream input) {
|
||||
LexerATNSimulator interp = new LexerATNSimulator(atn, new DFA[]{new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE))}, null);
|
||||
List<String> tokenTypes = new ArrayList<String>();
|
||||
int ttype;
|
||||
boolean hitEOF = false;
|
||||
do {
|
||||
if ( hitEOF ) {
|
||||
tokenTypes.add("EOF");
|
||||
break;
|
||||
}
|
||||
int t = input.LA(1);
|
||||
ttype = interp.match(input, Lexer.DEFAULT_MODE);
|
||||
if ( ttype==Token.EOF ) {
|
||||
tokenTypes.add("EOF");
|
||||
}
|
||||
else {
|
||||
tokenTypes.add(lg.typeToTokenList.get(ttype));
|
||||
}
|
||||
|
||||
if ( t== IntStream.EOF ) {
|
||||
hitEOF = true;
|
||||
}
|
||||
} while ( ttype!=Token.EOF );
|
||||
return tokenTypes;
|
||||
}
|
||||
|
||||
public static IntegerList getTokenTypesViaATN(String input, LexerATNSimulator lexerATN) {
|
||||
ANTLRInputStream in = new ANTLRInputStream(input);
|
||||
IntegerList tokenTypes = new IntegerList();
|
||||
int ttype;
|
||||
do {
|
||||
ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
|
||||
tokenTypes.add(ttype);
|
||||
} while ( ttype!= Token.EOF );
|
||||
return tokenTypes;
|
||||
}
|
||||
|
||||
public static void copyFile(File source, File dest) throws IOException {
|
||||
InputStream is = new FileInputStream(source);
|
||||
OutputStream os = new FileOutputStream(dest);
|
||||
byte[] buf = new byte[4 << 10];
|
||||
int l;
|
||||
while ((l = is.read(buf)) > -1) {
|
||||
os.write(buf, 0, l);
|
||||
}
|
||||
is.close();
|
||||
os.close();
|
||||
}
|
||||
|
||||
public static void mkdir(String dir) {
|
||||
File f = new File(dir);
|
||||
f.mkdirs();
|
||||
}
|
||||
}
|
|
@ -6,14 +6,28 @@ public abstract class TestContext {
|
|||
return "true".equals(String.valueOf(System.getenv("TRAVIS")).toLowerCase());
|
||||
}
|
||||
|
||||
public static boolean isGitHubCI() {
|
||||
return "true".equals(String.valueOf(System.getenv("CI")).toLowerCase());
|
||||
}
|
||||
|
||||
public static boolean isAppVeyorCI() {
|
||||
return "true".equals(String.valueOf(System.getenv("APPVEYOR")).toLowerCase());
|
||||
}
|
||||
|
||||
public static boolean isCircleCI() {
|
||||
return "true".equals(String.valueOf(System.getenv("CIRCLECI")).toLowerCase());
|
||||
}
|
||||
|
||||
public static boolean isCI() {
|
||||
return isAppVeyorCI() || isCircleCI() || isGitHubCI() || isTravisCI();
|
||||
}
|
||||
|
||||
@SuppressWarnings("BooleanMethodIsAlwaysInverted")
|
||||
public static boolean isSupportedTarget(String target) {
|
||||
if(isAppVeyorCI())
|
||||
return !target.matches("Swift|Node");
|
||||
return !target.matches("Swift");
|
||||
else
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -5,64 +5,19 @@
|
|||
*/
|
||||
package org.antlr.v4.test.runtime.cpp;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.automata.ATNFactory;
|
||||
import org.antlr.v4.automata.ATNPrinter;
|
||||
import org.antlr.v4.automata.LexerATNFactory;
|
||||
import org.antlr.v4.automata.ParserATNFactory;
|
||||
import org.antlr.v4.codegen.CodeGenerator;
|
||||
import org.antlr.v4.runtime.ANTLRInputStream;
|
||||
import org.antlr.v4.runtime.CharStream;
|
||||
import org.antlr.v4.runtime.CommonToken;
|
||||
import org.antlr.v4.runtime.CommonTokenStream;
|
||||
import org.antlr.v4.runtime.IntStream;
|
||||
import org.antlr.v4.runtime.Lexer;
|
||||
import org.antlr.v4.runtime.Parser;
|
||||
import org.antlr.v4.runtime.RuleContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.TokenSource;
|
||||
import org.antlr.v4.runtime.TokenStream;
|
||||
import org.antlr.v4.runtime.WritableToken;
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.runtime.atn.ATNDeserializer;
|
||||
import org.antlr.v4.runtime.atn.ATNSerializer;
|
||||
import org.antlr.v4.runtime.atn.ATNState;
|
||||
import org.antlr.v4.runtime.atn.DecisionState;
|
||||
import org.antlr.v4.runtime.atn.LexerATNSimulator;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
import org.antlr.v4.runtime.misc.IntegerList;
|
||||
import org.antlr.v4.runtime.misc.Interval;
|
||||
import org.antlr.v4.runtime.tree.ParseTree;
|
||||
import org.antlr.v4.semantics.SemanticPipeline;
|
||||
import org.antlr.v4.test.runtime.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.RuntimeTestSupport;
|
||||
import org.antlr.v4.test.runtime.StreamVacuum;
|
||||
import org.antlr.v4.tool.ANTLRMessage;
|
||||
import org.antlr.v4.tool.DOTGenerator;
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
import org.antlr.v4.tool.GrammarSemanticsMessage;
|
||||
import org.antlr.v4.tool.LexerGrammar;
|
||||
import org.antlr.v4.tool.Rule;
|
||||
import org.antlr.v4.test.runtime.*;
|
||||
import org.stringtemplate.v4.ST;
|
||||
import org.stringtemplate.v4.STGroup;
|
||||
import org.stringtemplate.v4.STGroupString;
|
||||
|
||||
import java.io.File;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.net.URISyntaxException;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Locale;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
|
||||
|
@ -72,248 +27,12 @@ import static org.junit.Assert.assertFalse;
|
|||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class BaseCppTest implements RuntimeTestSupport {
|
||||
// -J-Dorg.antlr.v4.test.BaseTest.level=FINE
|
||||
// private static final Logger LOGGER = Logger.getLogger(BaseTest.class.getName());
|
||||
public static final String newline = System.getProperty("line.separator");
|
||||
public static final String pathSep = System.getProperty("path.separator");
|
||||
public class BaseCppTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
|
||||
|
||||
public String tmpdir = null;
|
||||
|
||||
/** If error during parser execution, store stderr here; can't return
|
||||
* stdout and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
protected String stderrDuringParse;
|
||||
|
||||
/** Errors found while running antlr */
|
||||
protected StringBuilder antlrToolErrors;
|
||||
|
||||
private String getPropertyPrefix() {
|
||||
protected String getPropertyPrefix() {
|
||||
return "antlr-" + getLanguage().toLowerCase();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testSetUp() throws Exception {
|
||||
// new output dir for each test
|
||||
String propName = getPropertyPrefix() + "-test-dir";
|
||||
String prop = System.getProperty(propName);
|
||||
if(prop!=null && prop.length()>0) {
|
||||
tmpdir = prop;
|
||||
}
|
||||
else {
|
||||
tmpdir = new File(System.getProperty("java.io.tmpdir"),
|
||||
getClass().getSimpleName()+"-"+Thread.currentThread().getName()+"-"+System.currentTimeMillis()).getAbsolutePath();
|
||||
}
|
||||
antlrToolErrors = new StringBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testTearDown() throws Exception {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTmpDir() {
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStdout() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getParseErrors() {
|
||||
return stderrDuringParse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getANTLRToolErrors() {
|
||||
if ( antlrToolErrors.length()==0 ) {
|
||||
return null;
|
||||
}
|
||||
return antlrToolErrors.toString();
|
||||
}
|
||||
|
||||
protected org.antlr.v4.Tool newTool(String[] args) {
|
||||
Tool tool = new Tool(args);
|
||||
return tool;
|
||||
}
|
||||
|
||||
protected Tool newTool() {
|
||||
org.antlr.v4.Tool tool = new Tool(new String[] {"-o", tmpdir});
|
||||
return tool;
|
||||
}
|
||||
|
||||
protected ATN createATN(Grammar g, boolean useSerializer) {
|
||||
if ( g.atn==null ) {
|
||||
semanticProcess(g);
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
|
||||
ParserATNFactory f;
|
||||
if ( g.isLexer() ) {
|
||||
f = new LexerATNFactory((LexerGrammar)g);
|
||||
}
|
||||
else {
|
||||
f = new ParserATNFactory(g);
|
||||
}
|
||||
|
||||
g.atn = f.createATN();
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
}
|
||||
|
||||
ATN atn = g.atn;
|
||||
if (useSerializer) {
|
||||
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
|
||||
return new ATNDeserializer().deserialize(serialized);
|
||||
}
|
||||
|
||||
return atn;
|
||||
}
|
||||
|
||||
protected void semanticProcess(Grammar g) {
|
||||
if ( g.ast!=null && !g.ast.hasErrors ) {
|
||||
System.out.println(g.ast.toStringTree());
|
||||
Tool antlr = new Tool();
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
|
||||
for (Grammar imp : g.getImportedGrammars()) {
|
||||
antlr.processNonCombinedGrammar(imp, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public DFA createDFA(Grammar g, DecisionState s) {
|
||||
// PredictionDFAFactory conv = new PredictionDFAFactory(g, s);
|
||||
// DFA dfa = conv.createDFA();
|
||||
// conv.issueAmbiguityWarnings();
|
||||
// System.out.print("DFA="+dfa);
|
||||
// return dfa;
|
||||
return null;
|
||||
}
|
||||
|
||||
// public void minimizeDFA(DFA dfa) {
|
||||
// DFAMinimizer dmin = new DFAMinimizer(dfa);
|
||||
// dfa.minimized = dmin.minimize();
|
||||
// }
|
||||
|
||||
IntegerList getTypesFromString(Grammar g, String expecting) {
|
||||
IntegerList expectingTokenTypes = new IntegerList();
|
||||
if ( expecting!=null && !expecting.trim().isEmpty() ) {
|
||||
for (String tname : expecting.replace(" ", "").split(",")) {
|
||||
int ttype = g.getTokenType(tname);
|
||||
expectingTokenTypes.add(ttype);
|
||||
}
|
||||
}
|
||||
return expectingTokenTypes;
|
||||
}
|
||||
|
||||
public IntegerList getTokenTypesViaATN(String input, LexerATNSimulator lexerATN) {
|
||||
ANTLRInputStream in = new ANTLRInputStream(input);
|
||||
IntegerList tokenTypes = new IntegerList();
|
||||
int ttype;
|
||||
do {
|
||||
ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
|
||||
tokenTypes.add(ttype);
|
||||
} while ( ttype!= Token.EOF );
|
||||
return tokenTypes;
|
||||
}
|
||||
|
||||
public List<String> getTokenTypes(LexerGrammar lg,
|
||||
ATN atn,
|
||||
CharStream input)
|
||||
{
|
||||
LexerATNSimulator interp = new LexerATNSimulator(atn,new DFA[] { new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE)) },null);
|
||||
List<String> tokenTypes = new ArrayList<String>();
|
||||
int ttype;
|
||||
boolean hitEOF = false;
|
||||
do {
|
||||
if ( hitEOF ) {
|
||||
tokenTypes.add("EOF");
|
||||
break;
|
||||
}
|
||||
int t = input.LA(1);
|
||||
ttype = interp.match(input, Lexer.DEFAULT_MODE);
|
||||
if ( ttype == Token.EOF ) {
|
||||
tokenTypes.add("EOF");
|
||||
}
|
||||
else {
|
||||
tokenTypes.add(lg.typeToTokenList.get(ttype));
|
||||
}
|
||||
|
||||
if ( t== IntStream.EOF ) {
|
||||
hitEOF = true;
|
||||
}
|
||||
} while ( ttype!=Token.EOF );
|
||||
return tokenTypes;
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkRuleDFA(String gtext, String ruleName, String expecting)
|
||||
throws Exception
|
||||
{
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(gtext, equeue);
|
||||
ATN atn = createATN(g, false);
|
||||
ATNState s = atn.ruleToStartState[g.getRule(ruleName).index];
|
||||
if ( s==null ) {
|
||||
System.err.println("no such rule: "+ruleName);
|
||||
return null;
|
||||
}
|
||||
ATNState t = s.transition(0).target;
|
||||
if ( !(t instanceof DecisionState) ) {
|
||||
System.out.println(ruleName+" has no decision");
|
||||
return null;
|
||||
}
|
||||
DecisionState blk = (DecisionState)t;
|
||||
checkRuleDFA(g, blk, expecting);
|
||||
return equeue.all;
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkRuleDFA(String gtext, int decision, String expecting)
|
||||
throws Exception
|
||||
{
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(gtext, equeue);
|
||||
ATN atn = createATN(g, false);
|
||||
DecisionState blk = atn.decisionToState.get(decision);
|
||||
checkRuleDFA(g, blk, expecting);
|
||||
return equeue.all;
|
||||
}
|
||||
|
||||
void checkRuleDFA(Grammar g, DecisionState blk, String expecting)
|
||||
throws Exception
|
||||
{
|
||||
DFA dfa = createDFA(g, blk);
|
||||
String result = null;
|
||||
if ( dfa!=null ) result = dfa.toString();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkLexerDFA(String gtext, String expecting)
|
||||
throws Exception
|
||||
{
|
||||
return checkLexerDFA(gtext, LexerGrammar.DEFAULT_MODE_NAME, expecting);
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkLexerDFA(String gtext, String modeName, String expecting)
|
||||
throws Exception
|
||||
{
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
LexerGrammar g = new LexerGrammar(gtext, equeue);
|
||||
g.atn = createATN(g, false);
|
||||
// LexerATNToDFAConverter conv = new LexerATNToDFAConverter(g);
|
||||
// DFA dfa = conv.createDFA(modeName);
|
||||
// g.setLookaheadDFA(0, dfa); // only one decision to worry about
|
||||
//
|
||||
// String result = null;
|
||||
// if ( dfa!=null ) result = dfa.toString();
|
||||
// assertEquals(expecting, result);
|
||||
//
|
||||
// return equeue.all;
|
||||
return null;
|
||||
}
|
||||
|
||||
protected String getLanguage() {
|
||||
return "Cpp";
|
||||
}
|
||||
|
@ -338,44 +57,13 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
null,
|
||||
lexerName,"-no-listener");
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
String output = execModule("Test.cpp");
|
||||
return output;
|
||||
}
|
||||
|
||||
public ParseTree execStartRule(String startRuleName, Parser parser)
|
||||
throws IllegalAccessException, InvocationTargetException,
|
||||
NoSuchMethodException
|
||||
{
|
||||
Method startRule = null;
|
||||
Object[] args = null;
|
||||
try {
|
||||
startRule = parser.getClass().getMethod(startRuleName);
|
||||
}
|
||||
catch (NoSuchMethodException nsme) {
|
||||
// try with int _p arg for recursive func
|
||||
startRule = parser.getClass().getMethod(startRuleName, int.class);
|
||||
args = new Integer[] {0};
|
||||
}
|
||||
ParseTree result = (ParseTree)startRule.invoke(parser, args);
|
||||
// System.out.println("parse tree = "+result.toStringTree(parser));
|
||||
return result;
|
||||
}
|
||||
|
||||
// protected String execParser(String grammarFileName,
|
||||
// String grammarStr,
|
||||
// String parserName,
|
||||
// String lexerName,
|
||||
// String listenerName,
|
||||
// String visitorName,
|
||||
// String startRuleName,
|
||||
// String input,
|
||||
// boolean debug) {
|
||||
// return execParser(grammarFileName, grammarStr, parserName, lexerName,
|
||||
// listenerName, visitorName, startRuleName, input, debug);
|
||||
// }
|
||||
//
|
||||
@Override
|
||||
public String execParser(String grammarFileName,
|
||||
String grammarStr,
|
||||
|
@ -393,7 +81,7 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
lexerName,
|
||||
"-visitor");
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
rawBuildRecognizerTestFile(parserName,
|
||||
lexerName,
|
||||
listenerName,
|
||||
|
@ -423,7 +111,7 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
String... extraOptions)
|
||||
{
|
||||
ErrorQueue equeue =
|
||||
antlrOnString(getTmpDir(), "Cpp", grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
antlrOnString(getTempDirPath(), "Cpp", grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
if (!equeue.errors.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -457,7 +145,7 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
boolean debug,
|
||||
boolean trace)
|
||||
{
|
||||
this.stderrDuringParse = null;
|
||||
setParseErrors(null);
|
||||
if ( parserName==null ) {
|
||||
writeLexerTestFile(lexerName, false);
|
||||
}
|
||||
|
@ -476,26 +164,6 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
}
|
||||
|
||||
|
||||
private static String detectedOS;
|
||||
public static String getOS() {
|
||||
if (detectedOS == null) {
|
||||
String os = System.getProperty("os.name", "generic").toLowerCase(Locale.ENGLISH);
|
||||
if ((os.indexOf("mac") >= 0) || (os.indexOf("darwin") >= 0)) {
|
||||
detectedOS = "mac";
|
||||
}
|
||||
else if (os.indexOf("win") >= 0) {
|
||||
detectedOS = "windows";
|
||||
}
|
||||
else if (os.indexOf("nux") >= 0) {
|
||||
detectedOS = "linux";
|
||||
}
|
||||
else {
|
||||
detectedOS = "unknown";
|
||||
}
|
||||
}
|
||||
return detectedOS;
|
||||
}
|
||||
|
||||
public List<String> allCppFiles(String path) {
|
||||
ArrayList<String> files = new ArrayList<String>();
|
||||
File folder = new File(path);
|
||||
|
@ -510,7 +178,7 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
}
|
||||
|
||||
private String runProcess(ProcessBuilder builder, String description, boolean showStderr) throws Exception {
|
||||
// System.out.println("BUILDER: "+builder.command());
|
||||
// System.out.println("BUILDER: " + builder.command() + " @ " + builder.directory().toString());
|
||||
Process process = builder.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
|
@ -521,16 +189,16 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
stderrVacuum.join();
|
||||
String output = stdoutVacuum.toString();
|
||||
if ( stderrVacuum.toString().length()>0 ) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
if ( showStderr ) System.err.println(this.stderrDuringParse);
|
||||
setParseErrors(stderrVacuum.toString());
|
||||
if ( showStderr ) System.err.println(getParseErrors());
|
||||
}
|
||||
if (errcode != 0) {
|
||||
String err = "execution of '"+description+"' failed with error code: "+errcode;
|
||||
if ( this.stderrDuringParse!=null ) {
|
||||
this.stderrDuringParse += err;
|
||||
if ( getParseErrors()!=null ) {
|
||||
setParseErrors(getParseErrors() + err);
|
||||
}
|
||||
else {
|
||||
this.stderrDuringParse = err;
|
||||
setParseErrors(err);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -597,15 +265,15 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
public String execModule(String fileName) {
|
||||
String runtimePath = locateRuntime();
|
||||
String includePath = runtimePath + "/runtime/src";
|
||||
String binPath = new File(new File(tmpdir), "a.out").getAbsolutePath();
|
||||
String inputPath = new File(new File(tmpdir), "input").getAbsolutePath();
|
||||
String binPath = new File(getTempTestDir(), "a.out").getAbsolutePath();
|
||||
String inputPath = new File(getTempTestDir(), "input").getAbsolutePath();
|
||||
|
||||
// Build runtime using cmake once.
|
||||
synchronized (runtimeBuiltOnce) {
|
||||
if ( !runtimeBuiltOnce ) {
|
||||
try {
|
||||
String command[] = {"clang++", "--version"};
|
||||
String output = runCommand(command, tmpdir, "printing compiler version", false);
|
||||
String output = runCommand(command, getTempDirPath(), "printing compiler version", false);
|
||||
System.out.println("Compiler version is: "+output);
|
||||
}
|
||||
catch (Exception e) {
|
||||
|
@ -625,7 +293,7 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
String libExtension = (getOS().equals("mac")) ? "dylib" : "so";
|
||||
try {
|
||||
String command[] = { "ln", "-s", runtimePath + "/dist/libantlr4-runtime." + libExtension };
|
||||
if (runCommand(command, tmpdir, "sym linking C++ runtime", true) == null)
|
||||
if (runCommand(command, getTempDirPath(), "sym linking C++ runtime", true) == null)
|
||||
return null;
|
||||
}
|
||||
catch (Exception e) {
|
||||
|
@ -636,8 +304,8 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
|
||||
try {
|
||||
List<String> command2 = new ArrayList<String>(Arrays.asList("clang++", "-std=c++11", "-I", includePath, "-L.", "-lantlr4-runtime", "-o", "a.out"));
|
||||
command2.addAll(allCppFiles(tmpdir));
|
||||
if (runCommand(command2.toArray(new String[0]), tmpdir, "building test binary", true) == null) {
|
||||
command2.addAll(allCppFiles(getTempDirPath()));
|
||||
if (runCommand(command2.toArray(new String[0]), getTempDirPath(), "building test binary", true) == null) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
@ -648,10 +316,10 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
}
|
||||
|
||||
// Now run the newly minted binary. Reset the error output, as we could have got compiler warnings which are not relevant here.
|
||||
this.stderrDuringParse = null;
|
||||
setParseErrors(null);
|
||||
try {
|
||||
ProcessBuilder builder = new ProcessBuilder(binPath, inputPath);
|
||||
builder.directory(new File(tmpdir));
|
||||
builder.directory(getTempTestDir());
|
||||
Map<String, String> env = builder.environment();
|
||||
env.put("LD_PRELOAD", runtimePath + "/dist/libantlr4-runtime." + libExtension);
|
||||
String output = runProcess(builder, "running test binary", false);
|
||||
|
@ -688,156 +356,11 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
p = Paths.get(runtimeURL.toURI()).toFile().toString();
|
||||
}
|
||||
catch (URISyntaxException use) {
|
||||
p = "Can't find runtime";
|
||||
p = "Can't find runtime at " + runtimeURL.toString();
|
||||
}
|
||||
return p;
|
||||
}
|
||||
|
||||
List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs, Class<? extends ANTLRMessage> c) {
|
||||
List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
|
||||
for (ANTLRMessage m : msgs) {
|
||||
if ( m.getClass() == c ) filtered.add(m);
|
||||
}
|
||||
return filtered;
|
||||
}
|
||||
|
||||
void checkRuleATN(Grammar g, String ruleName, String expecting) {
|
||||
ParserATNFactory f = new ParserATNFactory(g);
|
||||
ATN atn = f.createATN();
|
||||
|
||||
DOTGenerator dot = new DOTGenerator(g);
|
||||
System.out.println(dot.getDOT(atn.ruleToStartState[g.getRule(ruleName).index]));
|
||||
|
||||
Rule r = g.getRule(ruleName);
|
||||
ATNState startState = atn.ruleToStartState[r.index];
|
||||
ATNPrinter serializer = new ATNPrinter(g, startState);
|
||||
String result = serializer.asString();
|
||||
|
||||
//System.out.print(result);
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
public void testActions(String templates, String actionName, String action, String expected) throws org.antlr.runtime.RecognitionException {
|
||||
int lp = templates.indexOf('(');
|
||||
String name = templates.substring(0, lp);
|
||||
STGroup group = new STGroupString(templates);
|
||||
ST st = group.getInstanceOf(name);
|
||||
st.add(actionName, action);
|
||||
String grammar = st.render();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(grammar, equeue);
|
||||
if ( g.ast!=null && !g.ast.hasErrors ) {
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
|
||||
ATNFactory factory = new ParserATNFactory(g);
|
||||
if ( g.isLexer() ) factory = new LexerATNFactory((LexerGrammar)g);
|
||||
g.atn = factory.createATN();
|
||||
|
||||
CodeGenerator gen = new CodeGenerator(g);
|
||||
ST outputFileST = gen.generateParser();
|
||||
String output = outputFileST.render();
|
||||
//System.out.println(output);
|
||||
String b = "#" + actionName + "#";
|
||||
int start = output.indexOf(b);
|
||||
String e = "#end-" + actionName + "#";
|
||||
int end = output.indexOf(e);
|
||||
String snippet = output.substring(start+b.length(),end);
|
||||
assertEquals(expected, snippet);
|
||||
}
|
||||
if ( equeue.size()>0 ) {
|
||||
System.err.println(equeue.toString());
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkGrammarSemanticsError(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage)
|
||||
throws Exception
|
||||
{
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = equeue.errors.get(i);
|
||||
if (m.getErrorType()==expectedMessage.getErrorType() ) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg);
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
|
||||
if ( equeue.size()!=1 ) {
|
||||
System.err.println(equeue);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkGrammarSemanticsWarning(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage)
|
||||
throws Exception
|
||||
{
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.warnings.size(); i++) {
|
||||
ANTLRMessage m = equeue.warnings.get(i);
|
||||
if (m.getErrorType()==expectedMessage.getErrorType() ) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg);
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
|
||||
if ( equeue.size()!=1 ) {
|
||||
System.err.println(equeue);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkError(ErrorQueue equeue,
|
||||
ANTLRMessage expectedMessage)
|
||||
throws Exception
|
||||
{
|
||||
//System.out.println("errors="+equeue);
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = equeue.errors.get(i);
|
||||
if (m.getErrorType()==expectedMessage.getErrorType() ) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertTrue("no error; "+expectedMessage.getErrorType()+" expected", !equeue.errors.isEmpty());
|
||||
assertTrue("too many errors; "+equeue.errors, equeue.errors.size()<=1);
|
||||
assertNotNull("couldn't find expected error: "+expectedMessage.getErrorType(), foundMsg);
|
||||
/*
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
*/
|
||||
assertArrayEquals(expectedMessage.getArgs(), foundMsg.getArgs());
|
||||
}
|
||||
|
||||
public static class FilteringTokenStream extends CommonTokenStream {
|
||||
public FilteringTokenStream(TokenSource src) { super(src); }
|
||||
Set<Integer> hide = new HashSet<Integer>();
|
||||
@Override
|
||||
protected boolean sync(int i) {
|
||||
if (!super.sync(i)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Token t = get(i);
|
||||
if ( hide.contains(t.getType()) ) {
|
||||
((WritableToken)t).setChannel(Token.HIDDEN_CHANNEL);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
public void setTokenTypeChannel(int ttype, int channel) {
|
||||
hide.add(ttype);
|
||||
}
|
||||
}
|
||||
|
||||
protected void mkdir(String dir) {
|
||||
File f = new File(dir);
|
||||
f.mkdirs();
|
||||
}
|
||||
|
||||
protected void writeParserTestFile(String parserName, String lexerName,
|
||||
String listenerName, String visitorName,
|
||||
String parserStartRuleName, boolean debug, boolean trace) {
|
||||
|
@ -871,7 +394,8 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
+ "\n"
|
||||
+ "\n"
|
||||
+ "int main(int argc, const char* argv[]) {\n"
|
||||
+ " ANTLRFileStream input(argv[1]);\n"
|
||||
+ " ANTLRFileStream input;\n"
|
||||
+ " input.loadFromFile(argv[1]);\n"
|
||||
+ " <lexerName> lexer(&input);\n"
|
||||
+ " CommonTokenStream tokens(&lexer);\n"
|
||||
+ "<createParser>"
|
||||
|
@ -898,7 +422,7 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
outputFileST.add("listenerName", listenerName);
|
||||
outputFileST.add("visitorName", visitorName);
|
||||
outputFileST.add("parserStartRuleName", parserStartRuleName);
|
||||
writeFile(tmpdir, "Test.cpp", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.cpp", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
|
||||
|
@ -913,7 +437,8 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
+ "using namespace antlr4;\n"
|
||||
+ "\n"
|
||||
+ "int main(int argc, const char* argv[]) {\n"
|
||||
+ " ANTLRFileStream input(argv[1]);\n"
|
||||
+ " ANTLRFileStream input;\n"
|
||||
+ " input.loadFromFile(argv[1]);\n"
|
||||
+ " <lexerName> lexer(&input);\n"
|
||||
+ " CommonTokenStream tokens(&lexer);\n"
|
||||
+ " tokens.fill();\n"
|
||||
|
@ -923,208 +448,8 @@ public class BaseCppTest implements RuntimeTestSupport {
|
|||
+ " return 0;\n"
|
||||
+ "}\n");
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
writeFile(tmpdir, "Test.cpp", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.cpp", outputFileST.render());
|
||||
}
|
||||
|
||||
public void writeRecognizer(String parserName, String lexerName,
|
||||
String listenerName, String visitorName,
|
||||
String parserStartRuleName, boolean debug, boolean trace) {
|
||||
if ( parserName==null ) {
|
||||
writeLexerTestFile(lexerName, debug);
|
||||
}
|
||||
else {
|
||||
writeParserTestFile(parserName,
|
||||
lexerName,
|
||||
listenerName,
|
||||
visitorName,
|
||||
parserStartRuleName,
|
||||
debug,
|
||||
trace);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected void eraseFiles(final String filesEndingWith) {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
String[] files = tmpdirF.list();
|
||||
for(int i = 0; files!=null && i < files.length; i++) {
|
||||
if ( files[i].endsWith(filesEndingWith) ) {
|
||||
new File(tmpdir+"/"+files[i]).delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void eraseFiles(File dir) {
|
||||
String[] files = dir.list();
|
||||
for(int i = 0; files!=null && i < files.length; i++) {
|
||||
new File(dir,files[i]).delete();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void eraseTempDir() {
|
||||
boolean doErase = true;
|
||||
String propName = getPropertyPrefix() + "-erase-test-dir";
|
||||
String prop = System.getProperty(propName);
|
||||
if(prop!=null && prop.length()>0)
|
||||
doErase = Boolean.getBoolean(prop);
|
||||
if(doErase) {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
if ( tmpdirF.exists() ) {
|
||||
eraseFiles(tmpdirF);
|
||||
tmpdirF.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public String getFirstLineOfException() {
|
||||
if ( this.stderrDuringParse ==null ) {
|
||||
return null;
|
||||
}
|
||||
String[] lines = this.stderrDuringParse.split("\n");
|
||||
String prefix="Exception in thread \"main\" ";
|
||||
return lines[0].substring(prefix.length(),lines[0].length());
|
||||
}
|
||||
|
||||
/**
|
||||
* When looking at a result set that consists of a Map/HashTable
|
||||
* we cannot rely on the output order, as the hashing algorithm or other aspects
|
||||
* of the implementation may be different on different JDKs or platforms. Hence
|
||||
* we take the Map, convert the keys to a List, sort them and Stringify the Map, which is a
|
||||
* bit of a hack, but guarantees that we get the same order on all systems. We assume that
|
||||
* the keys are strings.
|
||||
*
|
||||
* @param m The Map that contains keys we wish to return in sorted order
|
||||
* @return A string that represents all the keys in sorted order.
|
||||
*/
|
||||
public <K, V> String sortMapToString(Map<K, V> m) {
|
||||
// Pass in crap, and get nothing back
|
||||
//
|
||||
if (m == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
System.out.println("Map toString looks like: " + m.toString());
|
||||
|
||||
// Sort the keys in the Map
|
||||
//
|
||||
TreeMap<K, V> nset = new TreeMap<K, V>(m);
|
||||
|
||||
System.out.println("Tree map looks like: " + nset.toString());
|
||||
return nset.toString();
|
||||
}
|
||||
|
||||
public List<String> realElements(List<String> elements) {
|
||||
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String message, String text) {
|
||||
assertNotNull(message, text);
|
||||
assertFalse(message, text.isEmpty());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String text) {
|
||||
assertNotNull(text);
|
||||
assertFalse(text.isEmpty());
|
||||
}
|
||||
|
||||
public static class IntTokenStream implements TokenStream {
|
||||
IntegerList types;
|
||||
int p=0;
|
||||
public IntTokenStream(IntegerList types) { this.types = types; }
|
||||
|
||||
@Override
|
||||
public void consume() { p++; }
|
||||
|
||||
@Override
|
||||
public int LA(int i) { return LT(i).getType(); }
|
||||
|
||||
@Override
|
||||
public int mark() {
|
||||
return index();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int index() { return p; }
|
||||
|
||||
@Override
|
||||
public void release(int marker) {
|
||||
seek(marker);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void seek(int index) {
|
||||
p = index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return types.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSourceName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token LT(int i) {
|
||||
CommonToken t;
|
||||
int rawIndex = p + i - 1;
|
||||
if ( rawIndex>=types.size() ) t = new CommonToken(Token.EOF);
|
||||
else t = new CommonToken(types.get(rawIndex));
|
||||
t.setTokenIndex(rawIndex);
|
||||
return t;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token get(int i) {
|
||||
return new org.antlr.v4.runtime.CommonToken(types.get(i));
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenSource getTokenSource() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText() {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText(Interval interval) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText(RuleContext ctx) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText(Token start, Token stop) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
}
|
||||
|
||||
/** Sort a list */
|
||||
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
|
||||
List<T> dup = new ArrayList<T>();
|
||||
dup.addAll(data);
|
||||
Collections.sort(dup);
|
||||
return dup;
|
||||
}
|
||||
|
||||
/** Return map sorted by key */
|
||||
public <K extends Comparable<? super K>,V> LinkedHashMap<K,V> sort(Map<K,V> data) {
|
||||
LinkedHashMap<K,V> dup = new LinkedHashMap<K, V>();
|
||||
List<K> keys = new ArrayList<K>();
|
||||
keys.addAll(data.keySet());
|
||||
Collections.sort(keys);
|
||||
for (K k : keys) {
|
||||
dup.put(k, data.get(k));
|
||||
}
|
||||
return dup;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -5,39 +5,14 @@
|
|||
*/
|
||||
package org.antlr.v4.test.runtime.csharp;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.runtime.CommonTokenStream;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.TokenSource;
|
||||
import org.antlr.v4.runtime.WritableToken;
|
||||
import org.antlr.v4.runtime.misc.Utils;
|
||||
import org.antlr.v4.test.runtime.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.RuntimeTestSupport;
|
||||
import org.antlr.v4.test.runtime.StreamVacuum;
|
||||
import org.antlr.v4.test.runtime.TestOutputReading;
|
||||
import org.antlr.v4.tool.ANTLRMessage;
|
||||
import org.antlr.v4.tool.GrammarSemanticsMessage;
|
||||
import org.junit.rules.TestRule;
|
||||
import org.junit.rules.TestWatcher;
|
||||
import org.junit.runner.Description;
|
||||
import org.antlr.v4.test.runtime.*;
|
||||
import org.stringtemplate.v4.ST;
|
||||
import org.w3c.dom.Document;
|
||||
import org.w3c.dom.Element;
|
||||
|
||||
import javax.xml.parsers.DocumentBuilderFactory;
|
||||
import javax.xml.transform.OutputKeys;
|
||||
import javax.xml.transform.Transformer;
|
||||
import javax.xml.transform.TransformerFactory;
|
||||
import javax.xml.transform.dom.DOMSource;
|
||||
import javax.xml.transform.stream.StreamResult;
|
||||
import javax.xml.xpath.XPathConstants;
|
||||
import javax.xml.xpath.XPathExpression;
|
||||
import javax.xml.xpath.XPathFactory;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Path;
|
||||
|
@ -56,114 +31,11 @@ import static org.junit.Assert.assertFalse;
|
|||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class BaseCSharpTest implements RuntimeTestSupport {
|
||||
public static final String newline = System.getProperty("line.separator");
|
||||
|
||||
/**
|
||||
* When the {@code antlr.preserve-test-dir} runtime property is set to
|
||||
* {@code true}, the temporary directories created by the test run will not
|
||||
* be removed at the end of the test run, even for tests that completed
|
||||
* successfully.
|
||||
*
|
||||
* <p>
|
||||
* The default behavior (used in all other cases) is removing the temporary
|
||||
* directories for all tests which completed successfully, and preserving
|
||||
* the directories for tests which failed.</p>
|
||||
*/
|
||||
public static final boolean PRESERVE_TEST_DIR = Boolean.parseBoolean(System.getProperty("antlr-preserve-csharp-test-dir"));
|
||||
|
||||
/**
|
||||
* The base test directory is the directory where generated files get placed
|
||||
* during unit test execution.
|
||||
*
|
||||
* <p>
|
||||
* The default value for this property is the {@code java.io.tmpdir} system
|
||||
* property, and can be overridden by setting the
|
||||
* {@code antlr.java-test-dir} property to a custom location. Note that the
|
||||
* {@code antlr.java-test-dir} property directly affects the
|
||||
* {@link #CREATE_PER_TEST_DIRECTORIES} value as well.</p>
|
||||
*/
|
||||
public static final String BASE_TEST_DIR;
|
||||
|
||||
/**
|
||||
* When {@code true}, a temporary directory will be created for each test
|
||||
* executed during the test run.
|
||||
*
|
||||
* <p>
|
||||
* This value is {@code true} when the {@code antlr.java-test-dir} system
|
||||
* property is set, and otherwise {@code false}.</p>
|
||||
*/
|
||||
public static final boolean CREATE_PER_TEST_DIRECTORIES;
|
||||
|
||||
static {
|
||||
String baseTestDir = System.getProperty("antlr-csharp-test-dir");
|
||||
boolean perTestDirectories = false;
|
||||
if (baseTestDir == null || baseTestDir.isEmpty()) {
|
||||
baseTestDir = System.getProperty("java.io.tmpdir");
|
||||
perTestDirectories = true;
|
||||
}
|
||||
|
||||
if (!new File(baseTestDir).isDirectory()) {
|
||||
throw new UnsupportedOperationException("The specified base test directory does not exist: " + baseTestDir);
|
||||
}
|
||||
|
||||
BASE_TEST_DIR = baseTestDir;
|
||||
CREATE_PER_TEST_DIRECTORIES = perTestDirectories;
|
||||
}
|
||||
|
||||
public String tmpdir = null;
|
||||
|
||||
/**
|
||||
* If error during parser execution, store stderr here; can't return
|
||||
* stdout and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
protected String stderrDuringParse;
|
||||
|
||||
/**
|
||||
* Errors found while running antlr
|
||||
*/
|
||||
protected StringBuilder antlrToolErrors;
|
||||
public class BaseCSharpTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
|
||||
|
||||
@Override
|
||||
public void testSetUp() throws Exception {
|
||||
if (CREATE_PER_TEST_DIRECTORIES) {
|
||||
// new output dir for each test
|
||||
String testDirectory = getClass().getSimpleName() + "-" + Thread.currentThread().getName() + "-" + System.currentTimeMillis();
|
||||
tmpdir = new File(BASE_TEST_DIR, testDirectory).getAbsolutePath();
|
||||
} else {
|
||||
tmpdir = new File(BASE_TEST_DIR).getAbsolutePath();
|
||||
if (!PRESERVE_TEST_DIR && new File(tmpdir).exists()) {
|
||||
eraseDirectory(new File(tmpdir));
|
||||
}
|
||||
}
|
||||
antlrToolErrors = new StringBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testTearDown() throws Exception {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTmpDir() {
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStdout() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getParseErrors() {
|
||||
return stderrDuringParse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getANTLRToolErrors() {
|
||||
if (antlrToolErrors.length() == 0) {
|
||||
return null;
|
||||
}
|
||||
return antlrToolErrors.toString();
|
||||
protected String getPropertyPrefix() {
|
||||
return "antlr4-csharp";
|
||||
}
|
||||
|
||||
protected String execLexer(String grammarFileName,
|
||||
|
@ -184,12 +56,12 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
null,
|
||||
lexerName);
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
addSourceFiles("Test.cs");
|
||||
if (!compile()) {
|
||||
System.err.println("Failed to compile!");
|
||||
return stderrDuringParse;
|
||||
return getParseErrors();
|
||||
}
|
||||
String output = execTest();
|
||||
if (output != null && output.length() == 0) {
|
||||
|
@ -201,8 +73,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
Set<String> sourceFiles = new HashSet<>();
|
||||
|
||||
private void addSourceFiles(String... files) {
|
||||
for (String file : files)
|
||||
this.sourceFiles.add(file);
|
||||
Collections.addAll(sourceFiles, files);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -221,7 +92,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
lexerName,
|
||||
"-visitor");
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
return rawExecRecognizer(parserName,
|
||||
lexerName,
|
||||
startRuleName,
|
||||
|
@ -248,7 +119,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
String lexerName,
|
||||
boolean defaultListener,
|
||||
String... extraOptions) {
|
||||
ErrorQueue equeue = antlrOnString(getTmpDir(), "CSharp", grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
ErrorQueue equeue = antlrOnString(getTempDirPath(), "CSharp", grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
if (!equeue.errors.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -278,7 +149,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
String lexerName,
|
||||
String parserStartRuleName,
|
||||
boolean debug) {
|
||||
this.stderrDuringParse = null;
|
||||
setParseErrors(null);
|
||||
if (parserName == null) {
|
||||
writeLexerTestFile(lexerName, false);
|
||||
} else {
|
||||
|
@ -313,18 +184,18 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
}
|
||||
|
||||
private String locateExec() {
|
||||
return new File(tmpdir, "bin/Release/netcoreapp3.1/Test.dll").getAbsolutePath();
|
||||
return new File(getTempTestDir(), "bin/Release/netcoreapp3.1/Test.dll").getAbsolutePath();
|
||||
}
|
||||
|
||||
public boolean buildProject() {
|
||||
try {
|
||||
// save auxiliary files
|
||||
String pack = BaseCSharpTest.class.getPackage().getName().replace(".", "/") + "/";
|
||||
saveResourceAsFile(pack + "Antlr4.Test.csproj", new File(tmpdir, "Antlr4.Test.csproj"));
|
||||
saveResourceAsFile(pack + "Antlr4.Test.csproj", new File(getTempTestDir(), "Antlr4.Test.csproj"));
|
||||
|
||||
// find runtime package
|
||||
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
|
||||
final URL runtimeProj = loader.getResource("CSharp/Antlr4.csproj");
|
||||
final URL runtimeProj = loader.getResource("CSharp/src/Antlr4.csproj");
|
||||
if (runtimeProj == null) {
|
||||
throw new RuntimeException("C# runtime project file not found!");
|
||||
}
|
||||
|
@ -339,7 +210,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
"reference",
|
||||
runtimeProjPath
|
||||
};
|
||||
boolean success = runProcess(args, tmpdir);
|
||||
boolean success = runProcess(args, getTempDirPath());
|
||||
assertTrue(success);
|
||||
|
||||
// build test
|
||||
|
@ -350,7 +221,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
"-c",
|
||||
"Release"
|
||||
};
|
||||
success = runProcess(args, tmpdir);
|
||||
success = runProcess(args, getTempDirPath());
|
||||
assertTrue(success);
|
||||
} catch (Exception e) {
|
||||
e.printStackTrace(System.err);
|
||||
|
@ -378,11 +249,11 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
int exitValue = process.exitValue();
|
||||
boolean success = (exitValue == 0);
|
||||
if (!success) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
setParseErrors(stderrVacuum.toString());
|
||||
System.err.println("runProcess command: " + Utils.join(args, " "));
|
||||
System.err.println("runProcess exitValue: " + exitValue);
|
||||
System.err.println("runProcess stdoutVacuum: " + stdoutVacuum.toString());
|
||||
System.err.println("runProcess stderrVacuum: " + stderrDuringParse);
|
||||
System.err.println("runProcess stderrVacuum: " + getParseErrors());
|
||||
}
|
||||
if (exitValue == 132) {
|
||||
// Retry after SIGILL. We are seeing this intermittently on
|
||||
|
@ -417,7 +288,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
public String execTest() {
|
||||
String exec = locateExec();
|
||||
try {
|
||||
File tmpdirFile = new File(tmpdir);
|
||||
File tmpdirFile = new File(getTempDirPath());
|
||||
Path output = tmpdirFile.toPath().resolve("output");
|
||||
Path errorOutput = tmpdirFile.toPath().resolve("error-output");
|
||||
String[] args = getExecTestArgs(exec, output, errorOutput);
|
||||
|
@ -432,7 +303,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
String writtenOutput = TestOutputReading.read(output);
|
||||
this.stderrDuringParse = TestOutputReading.read(errorOutput);
|
||||
setParseErrors(TestOutputReading.read(errorOutput));
|
||||
int exitValue = process.exitValue();
|
||||
String stdoutString = stdoutVacuum.toString().trim();
|
||||
String stderrString = stderrVacuum.toString().trim();
|
||||
|
@ -456,7 +327,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
|
||||
private String[] getExecTestArgs(String exec, Path output, Path errorOutput) {
|
||||
return new String[]{
|
||||
"dotnet", exec, new File(tmpdir, "input").getAbsolutePath(),
|
||||
"dotnet", exec, new File(getTempTestDir(), "input").getAbsolutePath(),
|
||||
output.toAbsolutePath().toString(),
|
||||
errorOutput.toAbsolutePath().toString()
|
||||
};
|
||||
|
@ -516,7 +387,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
outputFileST.add("parserName", parserName);
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
outputFileST.add("parserStartRuleName", parserStartRuleName);
|
||||
writeFile(tmpdir, "Test.cs", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.cs", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
|
||||
|
@ -545,32 +416,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
);
|
||||
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
writeFile(tmpdir, "Test.cs", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void eraseDirectory(File dir) {
|
||||
File[] files = dir.listFiles();
|
||||
if (files != null) {
|
||||
for (File file : files) {
|
||||
if (file.isDirectory()) {
|
||||
eraseDirectory(file);
|
||||
} else {
|
||||
file.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
dir.delete();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void eraseTempDir() {
|
||||
if (!PRESERVE_TEST_DIR) {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
if (tmpdirF.exists()) {
|
||||
eraseDirectory(tmpdirF);
|
||||
tmpdirF.delete();
|
||||
}
|
||||
}
|
||||
writeFile(getTempDirPath(), "Test.cs", outputFileST.render());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -578,8 +424,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
|
|||
*/
|
||||
public <K extends Comparable<? super K>, V> LinkedHashMap<K, V> sort(Map<K, V> data) {
|
||||
LinkedHashMap<K, V> dup = new LinkedHashMap<K, V>();
|
||||
List<K> keys = new ArrayList<K>();
|
||||
keys.addAll(data.keySet());
|
||||
List<K> keys = new ArrayList<K>(data.keySet());
|
||||
Collections.sort(keys);
|
||||
for (K k : keys) {
|
||||
dup.put(k, data.get(k));
|
||||
|
|
|
@ -6,39 +6,14 @@
|
|||
|
||||
package org.antlr.v4.test.runtime.dart;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.analysis.AnalysisPipeline;
|
||||
import org.antlr.v4.automata.ATNFactory;
|
||||
import org.antlr.v4.automata.ATNPrinter;
|
||||
import org.antlr.v4.automata.LexerATNFactory;
|
||||
import org.antlr.v4.automata.ParserATNFactory;
|
||||
import org.antlr.v4.codegen.CodeGenerator;
|
||||
import org.antlr.v4.misc.Utils;
|
||||
import org.antlr.v4.runtime.*;
|
||||
import org.antlr.v4.runtime.atn.*;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
import org.antlr.v4.runtime.misc.IntegerList;
|
||||
import org.antlr.v4.runtime.misc.Interval;
|
||||
import org.antlr.v4.runtime.misc.Pair;
|
||||
import org.antlr.v4.runtime.tree.ParseTree;
|
||||
import org.antlr.v4.semantics.SemanticPipeline;
|
||||
import org.antlr.v4.test.runtime.BaseRuntimeTest;
|
||||
import org.antlr.v4.test.runtime.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.RuntimeTestSupport;
|
||||
import org.antlr.v4.test.runtime.StreamVacuum;
|
||||
import org.antlr.v4.test.runtime.*;
|
||||
import org.antlr.v4.test.runtime.descriptors.LexerExecDescriptors;
|
||||
import org.antlr.v4.test.runtime.descriptors.PerformanceDescriptors;
|
||||
import org.antlr.v4.tool.*;
|
||||
import org.stringtemplate.v4.ST;
|
||||
import org.stringtemplate.v4.STGroup;
|
||||
import org.stringtemplate.v4.STGroupString;
|
||||
|
||||
import java.io.*;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.net.URL;
|
||||
import java.net.URLClassLoader;
|
||||
import java.util.*;
|
||||
|
||||
import static junit.framework.TestCase.*;
|
||||
|
@ -47,333 +22,19 @@ import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
|
|||
import static org.junit.Assert.assertArrayEquals;
|
||||
|
||||
|
||||
public class BaseDartTest implements RuntimeTestSupport {
|
||||
public class BaseDartTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
|
||||
|
||||
private static final List<String> AOT_COMPILE_TESTS = Arrays.asList(
|
||||
new PerformanceDescriptors.DropLoopEntryBranchInLRRule_4().input,
|
||||
new LexerExecDescriptors.LargeLexer().input
|
||||
);
|
||||
|
||||
public static final String newline = System.getProperty("line.separator");
|
||||
public static final String pathSep = System.getProperty("path.separator");
|
||||
|
||||
|
||||
/**
|
||||
* When the {@code antlr.preserve-test-dir} runtime property is set to
|
||||
* {@code true}, the temporary directories created by the test run will not
|
||||
* be removed at the end of the test run, even for tests that completed
|
||||
* successfully.
|
||||
* <p>
|
||||
* <p>
|
||||
* The default behavior (used in all other cases) is removing the temporary
|
||||
* directories for all tests which completed successfully, and preserving
|
||||
* the directories for tests which failed.</p>
|
||||
*/
|
||||
public static final boolean PRESERVE_TEST_DIR = Boolean.parseBoolean(System.getProperty("antlr.preserve-test-dir", "false"));
|
||||
|
||||
/**
|
||||
* The base test directory is the directory where generated files get placed
|
||||
* during unit test execution.
|
||||
* <p>
|
||||
* <p>
|
||||
* The default value for this property is the {@code java.io.tmpdir} system
|
||||
* property, and can be overridden by setting the
|
||||
* {@code antlr.java-test-dir} property to a custom location. Note that the
|
||||
* {@code antlr.java-test-dir} property directly affects the
|
||||
* {@link #CREATE_PER_TEST_DIRECTORIES} value as well.</p>
|
||||
*/
|
||||
public static final String BASE_TEST_DIR;
|
||||
|
||||
/**
|
||||
* When {@code true}, a temporary directory will be created for each test
|
||||
* executed during the test run.
|
||||
* <p>
|
||||
* <p>
|
||||
* This value is {@code true} when the {@code antlr.java-test-dir} system
|
||||
* property is set, and otherwise {@code false}.</p>
|
||||
*/
|
||||
public static final boolean CREATE_PER_TEST_DIRECTORIES;
|
||||
|
||||
static {
|
||||
String baseTestDir = System.getProperty("antlr.dart-test-dir");
|
||||
boolean perTestDirectories = false;
|
||||
if (baseTestDir == null || baseTestDir.isEmpty()) {
|
||||
baseTestDir = System.getProperty("java.io.tmpdir");
|
||||
perTestDirectories = true;
|
||||
}
|
||||
|
||||
if (!new File(baseTestDir).isDirectory()) {
|
||||
throw new UnsupportedOperationException("The specified base test directory does not exist: " + baseTestDir);
|
||||
}
|
||||
|
||||
BASE_TEST_DIR = baseTestDir;
|
||||
CREATE_PER_TEST_DIRECTORIES = perTestDirectories;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build up the full classpath we need, including the surefire path (if present)
|
||||
*/
|
||||
public static final String CLASSPATH = System.getProperty("java.class.path");
|
||||
|
||||
public String tmpdir = null;
|
||||
|
||||
/**
|
||||
* If error during parser execution, store stderr here; can't return
|
||||
* stdout and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
protected String stderrDuringParse;
|
||||
|
||||
/**
|
||||
* Errors found while running antlr
|
||||
*/
|
||||
protected StringBuilder antlrToolErrors;
|
||||
|
||||
private static String cacheDartPackages;
|
||||
|
||||
private String getPropertyPrefix() {
|
||||
public String getPropertyPrefix() {
|
||||
return "antlr-dart";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testSetUp() throws Exception {
|
||||
if (CREATE_PER_TEST_DIRECTORIES) {
|
||||
// new output dir for each test
|
||||
String threadName = Thread.currentThread().getName();
|
||||
String testDirectory = getClass().getSimpleName() + "-" + threadName + "-" + System.nanoTime();
|
||||
tmpdir = new File(BASE_TEST_DIR, testDirectory).getAbsolutePath();
|
||||
} else {
|
||||
tmpdir = new File(BASE_TEST_DIR).getAbsolutePath();
|
||||
if (!PRESERVE_TEST_DIR && new File(tmpdir).exists()) {
|
||||
eraseFiles();
|
||||
}
|
||||
}
|
||||
antlrToolErrors = new StringBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testTearDown() throws Exception {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTmpDir() {
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStdout() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getParseErrors() {
|
||||
return stderrDuringParse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getANTLRToolErrors() {
|
||||
if (antlrToolErrors.length() == 0) {
|
||||
return null;
|
||||
}
|
||||
return antlrToolErrors.toString();
|
||||
}
|
||||
|
||||
protected Tool newTool(String[] args) {
|
||||
Tool tool = new Tool(args);
|
||||
return tool;
|
||||
}
|
||||
|
||||
protected ATN createATN(Grammar g, boolean useSerializer) {
|
||||
if (g.atn == null) {
|
||||
semanticProcess(g);
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
|
||||
ParserATNFactory f;
|
||||
if (g.isLexer()) {
|
||||
f = new LexerATNFactory((LexerGrammar) g);
|
||||
} else {
|
||||
f = new ParserATNFactory(g);
|
||||
}
|
||||
|
||||
g.atn = f.createATN();
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
}
|
||||
|
||||
ATN atn = g.atn;
|
||||
if (useSerializer) {
|
||||
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
|
||||
return new ATNDeserializer().deserialize(serialized);
|
||||
}
|
||||
|
||||
return atn;
|
||||
}
|
||||
|
||||
protected void semanticProcess(Grammar g) {
|
||||
if (g.ast != null && !g.ast.hasErrors) {
|
||||
// System.out.println(g.ast.toStringTree());
|
||||
Tool antlr = new Tool();
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
if (g.getImportedGrammars() != null) { // process imported grammars (if any)
|
||||
for (Grammar imp : g.getImportedGrammars()) {
|
||||
antlr.processNonCombinedGrammar(imp, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public DFA createDFA(Grammar g, DecisionState s) {
|
||||
// PredictionDFAFactory conv = new PredictionDFAFactory(g, s);
|
||||
// DFA dfa = conv.createDFA();
|
||||
// conv.issueAmbiguityWarnings();
|
||||
// System.out.print("DFA="+dfa);
|
||||
// return dfa;
|
||||
return null;
|
||||
}
|
||||
|
||||
// public void minimizeDFA(DFA dfa) {
|
||||
// DFAMinimizer dmin = new DFAMinimizer(dfa);
|
||||
// dfa.minimized = dmin.minimize();
|
||||
// }
|
||||
|
||||
IntegerList getTypesFromString(Grammar g, String expecting) {
|
||||
IntegerList expectingTokenTypes = new IntegerList();
|
||||
if (expecting != null && !expecting.trim().isEmpty()) {
|
||||
for (String tname : expecting.replace(" ", "").split(",")) {
|
||||
int ttype = g.getTokenType(tname);
|
||||
expectingTokenTypes.add(ttype);
|
||||
}
|
||||
}
|
||||
return expectingTokenTypes;
|
||||
}
|
||||
|
||||
public IntegerList getTokenTypesViaATN(String input, LexerATNSimulator lexerATN) {
|
||||
ANTLRInputStream in = new ANTLRInputStream(input);
|
||||
IntegerList tokenTypes = new IntegerList();
|
||||
int ttype;
|
||||
do {
|
||||
ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
|
||||
tokenTypes.add(ttype);
|
||||
} while (ttype != Token.EOF);
|
||||
return tokenTypes;
|
||||
}
|
||||
|
||||
public List<String> getTokenTypes(LexerGrammar lg,
|
||||
ATN atn,
|
||||
CharStream input) {
|
||||
LexerATNSimulator interp = new LexerATNSimulator(atn, new DFA[]{new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE))}, null);
|
||||
List<String> tokenTypes = new ArrayList<String>();
|
||||
int ttype;
|
||||
boolean hitEOF = false;
|
||||
do {
|
||||
if (hitEOF) {
|
||||
tokenTypes.add("EOF");
|
||||
break;
|
||||
}
|
||||
int t = input.LA(1);
|
||||
ttype = interp.match(input, Lexer.DEFAULT_MODE);
|
||||
if (ttype == Token.EOF) {
|
||||
tokenTypes.add("EOF");
|
||||
} else {
|
||||
tokenTypes.add(lg.typeToTokenList.get(ttype));
|
||||
}
|
||||
|
||||
if (t == IntStream.EOF) {
|
||||
hitEOF = true;
|
||||
}
|
||||
} while (ttype != Token.EOF);
|
||||
return tokenTypes;
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkRuleDFA(String gtext, String ruleName, String expecting)
|
||||
throws Exception {
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(gtext, equeue);
|
||||
ATN atn = createATN(g, false);
|
||||
ATNState s = atn.ruleToStartState[g.getRule(ruleName).index];
|
||||
if (s == null) {
|
||||
System.err.println("no such rule: " + ruleName);
|
||||
return null;
|
||||
}
|
||||
ATNState t = s.transition(0).target;
|
||||
if (!(t instanceof DecisionState)) {
|
||||
System.out.println(ruleName + " has no decision");
|
||||
return null;
|
||||
}
|
||||
DecisionState blk = (DecisionState) t;
|
||||
checkRuleDFA(g, blk, expecting);
|
||||
return equeue.all;
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkRuleDFA(String gtext, int decision, String expecting)
|
||||
throws Exception {
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(gtext, equeue);
|
||||
ATN atn = createATN(g, false);
|
||||
DecisionState blk = atn.decisionToState.get(decision);
|
||||
checkRuleDFA(g, blk, expecting);
|
||||
return equeue.all;
|
||||
}
|
||||
|
||||
void checkRuleDFA(Grammar g, DecisionState blk, String expecting)
|
||||
throws Exception {
|
||||
DFA dfa = createDFA(g, blk);
|
||||
String result = null;
|
||||
if (dfa != null) result = dfa.toString();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkLexerDFA(String gtext, String expecting)
|
||||
throws Exception {
|
||||
return checkLexerDFA(gtext, LexerGrammar.DEFAULT_MODE_NAME, expecting);
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkLexerDFA(String gtext, String modeName, String expecting)
|
||||
throws Exception {
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
LexerGrammar g = new LexerGrammar(gtext, equeue);
|
||||
g.atn = createATN(g, false);
|
||||
// LexerATNToDFAConverter conv = new LexerATNToDFAConverter(g);
|
||||
// DFA dfa = conv.createDFA(modeName);
|
||||
// g.setLookaheadDFA(0, dfa); // only one decision to worry about
|
||||
//
|
||||
// String result = null;
|
||||
// if ( dfa!=null ) result = dfa.toString();
|
||||
// assertEquals(expecting, result);
|
||||
//
|
||||
// return equeue.all;
|
||||
return null;
|
||||
}
|
||||
|
||||
protected String load(String fileName, String encoding)
|
||||
throws IOException {
|
||||
if (fileName == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
String fullFileName = getClass().getPackage().getName().replace('.', '/') + '/' + fileName;
|
||||
int size = 65000;
|
||||
InputStreamReader isr;
|
||||
InputStream fis = getClass().getClassLoader().getResourceAsStream(fullFileName);
|
||||
if (encoding != null) {
|
||||
isr = new InputStreamReader(fis, encoding);
|
||||
} else {
|
||||
isr = new InputStreamReader(fis);
|
||||
}
|
||||
try {
|
||||
char[] data = new char[size];
|
||||
int n = isr.read(data);
|
||||
return new String(data, 0, n);
|
||||
} finally {
|
||||
isr.close();
|
||||
}
|
||||
}
|
||||
|
||||
protected String execLexer(String grammarFileName,
|
||||
String grammarStr,
|
||||
String lexerName,
|
||||
String input) {
|
||||
return execLexer(grammarFileName, grammarStr, lexerName, input, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String execLexer(String grammarFileName,
|
||||
String grammarStr,
|
||||
|
@ -385,71 +46,12 @@ public class BaseDartTest implements RuntimeTestSupport {
|
|||
null,
|
||||
lexerName);
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
String output = execClass("Test", AOT_COMPILE_TESTS.contains(input));
|
||||
return output;
|
||||
}
|
||||
|
||||
public ParseTree execParser(String startRuleName, String input,
|
||||
String parserName, String lexerName)
|
||||
throws Exception {
|
||||
Pair<Parser, Lexer> pl = getParserAndLexer(input, parserName, lexerName);
|
||||
Parser parser = pl.a;
|
||||
return execStartRule(startRuleName, parser);
|
||||
}
|
||||
|
||||
public ParseTree execStartRule(String startRuleName, Parser parser)
|
||||
throws IllegalAccessException, InvocationTargetException,
|
||||
NoSuchMethodException {
|
||||
Method startRule = null;
|
||||
Object[] args = null;
|
||||
try {
|
||||
startRule = parser.getClass().getMethod(startRuleName);
|
||||
} catch (NoSuchMethodException nsme) {
|
||||
// try with int _p arg for recursive func
|
||||
startRule = parser.getClass().getMethod(startRuleName, int.class);
|
||||
args = new Integer[]{0};
|
||||
}
|
||||
ParseTree result = (ParseTree) startRule.invoke(parser, args);
|
||||
// System.out.println("parse tree = "+result.toStringTree(parser));
|
||||
return result;
|
||||
}
|
||||
|
||||
public Pair<Parser, Lexer> getParserAndLexer(String input,
|
||||
String parserName, String lexerName)
|
||||
throws Exception {
|
||||
final Class<? extends Lexer> lexerClass = loadLexerClassFromTempDir(lexerName);
|
||||
final Class<? extends Parser> parserClass = loadParserClassFromTempDir(parserName);
|
||||
|
||||
ANTLRInputStream in = new ANTLRInputStream(new StringReader(input));
|
||||
|
||||
Class<? extends Lexer> c = lexerClass.asSubclass(Lexer.class);
|
||||
Constructor<? extends Lexer> ctor = c.getConstructor(CharStream.class);
|
||||
Lexer lexer = ctor.newInstance(in);
|
||||
|
||||
Class<? extends Parser> pc = parserClass.asSubclass(Parser.class);
|
||||
Constructor<? extends Parser> pctor = pc.getConstructor(TokenStream.class);
|
||||
CommonTokenStream tokens = new CommonTokenStream(lexer);
|
||||
Parser parser = pctor.newInstance(tokens);
|
||||
return new Pair<Parser, Lexer>(parser, lexer);
|
||||
}
|
||||
|
||||
public Class<?> loadClassFromTempDir(String name) throws Exception {
|
||||
ClassLoader loader =
|
||||
new URLClassLoader(new URL[]{new File(tmpdir).toURI().toURL()},
|
||||
ClassLoader.getSystemClassLoader());
|
||||
return loader.loadClass(name);
|
||||
}
|
||||
|
||||
public Class<? extends Lexer> loadLexerClassFromTempDir(String name) throws Exception {
|
||||
return loadClassFromTempDir(name).asSubclass(Lexer.class);
|
||||
}
|
||||
|
||||
public Class<? extends Parser> loadParserClassFromTempDir(String name) throws Exception {
|
||||
return loadClassFromTempDir(name).asSubclass(Parser.class);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String execParser(String grammarFileName,
|
||||
String grammarStr,
|
||||
|
@ -480,7 +82,7 @@ public class BaseDartTest implements RuntimeTestSupport {
|
|||
lexerName,
|
||||
"-visitor");
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
return rawExecRecognizer(parserName,
|
||||
lexerName,
|
||||
startRuleName,
|
||||
|
@ -510,7 +112,7 @@ public class BaseDartTest implements RuntimeTestSupport {
|
|||
boolean defaultListener,
|
||||
String... extraOptions) {
|
||||
ErrorQueue equeue =
|
||||
BaseRuntimeTest.antlrOnString(getTmpDir(), "Dart", grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
BaseRuntimeTest.antlrOnString(getTempDirPath(), "Dart", grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
if (!equeue.errors.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -534,17 +136,29 @@ public class BaseDartTest implements RuntimeTestSupport {
|
|||
}
|
||||
|
||||
String runtime = locateRuntime();
|
||||
writeFile(tmpdir, "pubspec.yaml",
|
||||
writeFile(getTempDirPath(), "pubspec.yaml",
|
||||
"name: \"test\"\n" +
|
||||
"dependencies:\n" +
|
||||
" antlr4:\n" +
|
||||
" path: " + runtime + "\n");
|
||||
if (cacheDartPackages == null) {
|
||||
try {
|
||||
Process process = Runtime.getRuntime().exec(new String[]{locatePub(), "get"}, null, new File(tmpdir));
|
||||
final Process process = Runtime.getRuntime().exec(new String[]{locatePub(), "get"}, null, getTempTestDir());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
stderrVacuum.start();
|
||||
Timer timer = new Timer();
|
||||
timer.schedule(new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
process.destroy();
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
}, 30_000);
|
||||
process.waitFor();
|
||||
timer.cancel();
|
||||
stderrVacuum.join();
|
||||
String stderrDuringPubGet = stderrVacuum.toString();
|
||||
if (!stderrDuringPubGet.isEmpty()) {
|
||||
|
@ -554,9 +168,9 @@ public class BaseDartTest implements RuntimeTestSupport {
|
|||
e.printStackTrace();
|
||||
return false;
|
||||
}
|
||||
cacheDartPackages = readFile(tmpdir, ".packages");
|
||||
cacheDartPackages = readFile(getTempDirPath(), ".packages");
|
||||
} else {
|
||||
writeFile(tmpdir, ".packages", cacheDartPackages);
|
||||
writeFile(getTempDirPath(), ".packages", cacheDartPackages);
|
||||
}
|
||||
return true; // allIsWell: no compile
|
||||
}
|
||||
|
@ -567,7 +181,7 @@ public class BaseDartTest implements RuntimeTestSupport {
|
|||
boolean debug,
|
||||
boolean profile,
|
||||
boolean aotCompile) {
|
||||
this.stderrDuringParse = null;
|
||||
setParseErrors(null);
|
||||
if (parserName == null) {
|
||||
writeLexerTestFile(lexerName, false);
|
||||
} else {
|
||||
|
@ -590,11 +204,23 @@ public class BaseDartTest implements RuntimeTestSupport {
|
|||
};
|
||||
String cmdLine = Utils.join(args, " ");
|
||||
System.err.println("Compile: " + cmdLine);
|
||||
Process process =
|
||||
Runtime.getRuntime().exec(args, null, new File(tmpdir));
|
||||
final Process process =
|
||||
Runtime.getRuntime().exec(args, null, getTempTestDir());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
stderrVacuum.start();
|
||||
Timer timer = new Timer();
|
||||
timer.schedule(new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
process.destroy();
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
}, 30_000);
|
||||
int result = process.waitFor();
|
||||
timer.cancel();
|
||||
if (result != 0) {
|
||||
stderrVacuum.join();
|
||||
System.err.print("Error compiling dart file: " + stderrVacuum.toString());
|
||||
|
@ -604,23 +230,35 @@ public class BaseDartTest implements RuntimeTestSupport {
|
|||
String[] args;
|
||||
if (compile) {
|
||||
args = new String[]{
|
||||
new File(tmpdir, className).getAbsolutePath(), new File(tmpdir, "input").getAbsolutePath()
|
||||
new File(getTempTestDir(), className).getAbsolutePath(), new File(getTempTestDir(), "input").getAbsolutePath()
|
||||
};
|
||||
} else {
|
||||
args = new String[]{
|
||||
locateDart(),
|
||||
className + ".dart", new File(tmpdir, "input").getAbsolutePath()
|
||||
className + ".dart", new File(getTempTestDir(), "input").getAbsolutePath()
|
||||
};
|
||||
}
|
||||
//String cmdLine = Utils.join(args, " ");
|
||||
//System.err.println("execParser: " + cmdLine);
|
||||
Process process =
|
||||
Runtime.getRuntime().exec(args, null, new File(tmpdir));
|
||||
final Process process =
|
||||
Runtime.getRuntime().exec(args, null, getTempTestDir());
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
stdoutVacuum.start();
|
||||
stderrVacuum.start();
|
||||
Timer timer = new Timer();
|
||||
timer.schedule(new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
process.destroy();
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
}, 30_000);
|
||||
process.waitFor();
|
||||
timer.cancel();
|
||||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
String output = stdoutVacuum.toString();
|
||||
|
@ -628,7 +266,7 @@ public class BaseDartTest implements RuntimeTestSupport {
|
|||
output = null;
|
||||
}
|
||||
if (stderrVacuum.toString().length() > 0) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
setParseErrors(stderrVacuum.toString());
|
||||
}
|
||||
return output;
|
||||
} catch (Exception e) {
|
||||
|
@ -731,187 +369,6 @@ public class BaseDartTest implements RuntimeTestSupport {
|
|||
return runtimeSrc.getPath();
|
||||
}
|
||||
|
||||
private boolean isWindows() {
|
||||
return System.getProperty("os.name").toLowerCase().contains("windows");
|
||||
}
|
||||
|
||||
// void ambig(List<Message> msgs, int[] expectedAmbigAlts, String expectedAmbigInput)
|
||||
// throws Exception
|
||||
// {
|
||||
// ambig(msgs, 0, expectedAmbigAlts, expectedAmbigInput);
|
||||
// }
|
||||
|
||||
// void ambig(List<Message> msgs, int i, int[] expectedAmbigAlts, String expectedAmbigInput)
|
||||
// throws Exception
|
||||
// {
|
||||
// List<Message> amsgs = getMessagesOfType(msgs, AmbiguityMessage.class);
|
||||
// AmbiguityMessage a = (AmbiguityMessage)amsgs.get(i);
|
||||
// if ( a==null ) assertNull(expectedAmbigAlts);
|
||||
// else {
|
||||
// assertEquals(a.conflictingAlts.toString(), Arrays.toString(expectedAmbigAlts));
|
||||
// }
|
||||
// assertEquals(expectedAmbigInput, a.input);
|
||||
// }
|
||||
|
||||
// void unreachable(List<Message> msgs, int[] expectedUnreachableAlts)
|
||||
// throws Exception
|
||||
// {
|
||||
// unreachable(msgs, 0, expectedUnreachableAlts);
|
||||
// }
|
||||
|
||||
// void unreachable(List<Message> msgs, int i, int[] expectedUnreachableAlts)
|
||||
// throws Exception
|
||||
// {
|
||||
// List<Message> amsgs = getMessagesOfType(msgs, UnreachableAltsMessage.class);
|
||||
// UnreachableAltsMessage u = (UnreachableAltsMessage)amsgs.get(i);
|
||||
// if ( u==null ) assertNull(expectedUnreachableAlts);
|
||||
// else {
|
||||
// assertEquals(u.conflictingAlts.toString(), Arrays.toString(expectedUnreachableAlts));
|
||||
// }
|
||||
// }
|
||||
|
||||
List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs, Class<? extends ANTLRMessage> c) {
|
||||
List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
|
||||
for (ANTLRMessage m : msgs) {
|
||||
if (m.getClass() == c) filtered.add(m);
|
||||
}
|
||||
return filtered;
|
||||
}
|
||||
|
||||
public void checkRuleATN(Grammar g, String ruleName, String expecting) {
|
||||
// DOTGenerator dot = new DOTGenerator(g);
|
||||
// System.out.println(dot.getDOT(g.atn.ruleToStartState[g.getRule(ruleName).index]));
|
||||
|
||||
Rule r = g.getRule(ruleName);
|
||||
ATNState startState = g.getATN().ruleToStartState[r.index];
|
||||
ATNPrinter serializer = new ATNPrinter(g, startState);
|
||||
String result = serializer.asString();
|
||||
|
||||
//System.out.print(result);
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
public void testActions(String templates, String actionName, String action, String expected) throws org.antlr.runtime.RecognitionException {
|
||||
int lp = templates.indexOf('(');
|
||||
String name = templates.substring(0, lp);
|
||||
STGroup group = new STGroupString(templates);
|
||||
ST st = group.getInstanceOf(name);
|
||||
st.add(actionName, action);
|
||||
String grammar = st.render();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(grammar, equeue);
|
||||
if (g.ast != null && !g.ast.hasErrors) {
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
|
||||
ATNFactory factory = new ParserATNFactory(g);
|
||||
if (g.isLexer()) factory = new LexerATNFactory((LexerGrammar) g);
|
||||
g.atn = factory.createATN();
|
||||
|
||||
AnalysisPipeline anal = new AnalysisPipeline(g);
|
||||
anal.process();
|
||||
|
||||
CodeGenerator gen = new CodeGenerator(g);
|
||||
ST outputFileST = gen.generateParser(false);
|
||||
String output = outputFileST.render();
|
||||
//System.out.println(output);
|
||||
String b = "#" + actionName + "#";
|
||||
int start = output.indexOf(b);
|
||||
String e = "#end-" + actionName + "#";
|
||||
int end = output.indexOf(e);
|
||||
String snippet = output.substring(start + b.length(), end);
|
||||
assertEquals(expected, snippet);
|
||||
}
|
||||
if (equeue.size() > 0) {
|
||||
// System.err.println(equeue.toString());
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkGrammarSemanticsError(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage)
|
||||
throws Exception {
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = equeue.errors.get(i);
|
||||
if (m.getErrorType() == expectedMessage.getErrorType()) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertNotNull("no error; " + expectedMessage.getErrorType() + " expected", foundMsg);
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
|
||||
if (equeue.size() != 1) {
|
||||
System.err.println(equeue);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkGrammarSemanticsWarning(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage)
|
||||
throws Exception {
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.warnings.size(); i++) {
|
||||
ANTLRMessage m = equeue.warnings.get(i);
|
||||
if (m.getErrorType() == expectedMessage.getErrorType()) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertNotNull("no error; " + expectedMessage.getErrorType() + " expected", foundMsg);
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
|
||||
if (equeue.size() != 1) {
|
||||
System.err.println(equeue);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkError(ErrorQueue equeue,
|
||||
ANTLRMessage expectedMessage)
|
||||
throws Exception {
|
||||
//System.out.println("errors="+equeue);
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = equeue.errors.get(i);
|
||||
if (m.getErrorType() == expectedMessage.getErrorType()) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertTrue("no error; " + expectedMessage.getErrorType() + " expected", !equeue.errors.isEmpty());
|
||||
assertTrue("too many errors; " + equeue.errors, equeue.errors.size() <= 1);
|
||||
assertNotNull("couldn't find expected error: " + expectedMessage.getErrorType(), foundMsg);
|
||||
/*
|
||||
* assertTrue("error is not a GrammarSemanticsMessage", foundMsg
|
||||
* instanceof GrammarSemanticsMessage);
|
||||
*/
|
||||
assertArrayEquals(expectedMessage.getArgs(), foundMsg.getArgs());
|
||||
}
|
||||
|
||||
public static class FilteringTokenStream extends CommonTokenStream {
|
||||
public FilteringTokenStream(TokenSource src) {
|
||||
super(src);
|
||||
}
|
||||
|
||||
Set<Integer> hide = new HashSet<Integer>();
|
||||
|
||||
@Override
|
||||
protected boolean sync(int i) {
|
||||
if (!super.sync(i)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Token t = get(i);
|
||||
if (hide.contains(t.getType())) {
|
||||
((WritableToken) t).setChannel(Token.HIDDEN_CHANNEL);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public void setTokenTypeChannel(int ttype, int channel) {
|
||||
hide.add(ttype);
|
||||
}
|
||||
}
|
||||
|
||||
protected void writeTestFile(String parserName,
|
||||
String lexerName,
|
||||
String parserStartRuleName,
|
||||
|
@ -971,7 +428,7 @@ public class BaseDartTest implements RuntimeTestSupport {
|
|||
outputFileST.add("parserName", parserName);
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
outputFileST.add("parserStartRuleName", parserStartRuleName);
|
||||
writeFile(tmpdir, "Test.dart", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.dart", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
|
||||
|
@ -995,191 +452,7 @@ public class BaseDartTest implements RuntimeTestSupport {
|
|||
);
|
||||
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
writeFile(tmpdir, "Test.dart", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.dart", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void eraseFiles() {
|
||||
if (tmpdir == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
File tmpdirF = new File(tmpdir);
|
||||
String[] files = tmpdirF.list();
|
||||
for (int i = 0; files != null && i < files.length; i++) {
|
||||
new File(tmpdir + "/" + files[i]).delete();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void eraseTempDir() {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
if (tmpdirF.exists()) {
|
||||
eraseFiles();
|
||||
tmpdirF.delete();
|
||||
}
|
||||
}
|
||||
|
||||
public String getFirstLineOfException() {
|
||||
if (this.stderrDuringParse == null) {
|
||||
return null;
|
||||
}
|
||||
String[] lines = this.stderrDuringParse.split("\n");
|
||||
String prefix = "Exception in thread \"main\" ";
|
||||
return lines[0].substring(prefix.length(), lines[0].length());
|
||||
}
|
||||
|
||||
/**
|
||||
* When looking at a result set that consists of a Map/HashTable
|
||||
* we cannot rely on the output order, as the hashing algorithm or other aspects
|
||||
* of the implementation may be different on differnt JDKs or platforms. Hence
|
||||
* we take the Map, convert the keys to a List, sort them and Stringify the Map, which is a
|
||||
* bit of a hack, but guarantees that we get the same order on all systems. We assume that
|
||||
* the keys are strings.
|
||||
*
|
||||
* @param m The Map that contains keys we wish to return in sorted order
|
||||
* @return A string that represents all the keys in sorted order.
|
||||
*/
|
||||
public <K, V> String sortMapToString(Map<K, V> m) {
|
||||
// Pass in crap, and get nothing back
|
||||
//
|
||||
if (m == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
System.out.println("Map toString looks like: " + m.toString());
|
||||
|
||||
// Sort the keys in the Map
|
||||
//
|
||||
TreeMap<K, V> nset = new TreeMap<K, V>(m);
|
||||
|
||||
System.out.println("Tree map looks like: " + nset.toString());
|
||||
return nset.toString();
|
||||
}
|
||||
|
||||
public List<String> realElements(List<String> elements) {
|
||||
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String message, String text) {
|
||||
assertNotNull(message, text);
|
||||
assertFalse(message, text.isEmpty());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String text) {
|
||||
assertNotNull(text);
|
||||
assertFalse(text.isEmpty());
|
||||
}
|
||||
|
||||
public static class IntTokenStream implements TokenStream {
|
||||
public IntegerList types;
|
||||
int p = 0;
|
||||
|
||||
public IntTokenStream(IntegerList types) {
|
||||
this.types = types;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void consume() {
|
||||
p++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int LA(int i) {
|
||||
return LT(i).getType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int mark() {
|
||||
return index();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int index() {
|
||||
return p;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release(int marker) {
|
||||
seek(marker);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void seek(int index) {
|
||||
p = index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return types.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSourceName() {
|
||||
return UNKNOWN_SOURCE_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token LT(int i) {
|
||||
CommonToken t;
|
||||
int rawIndex = p + i - 1;
|
||||
if (rawIndex >= types.size()) t = new CommonToken(Token.EOF);
|
||||
else t = new CommonToken(types.get(rawIndex));
|
||||
t.setTokenIndex(rawIndex);
|
||||
return t;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token get(int i) {
|
||||
return new CommonToken(types.get(i));
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenSource getTokenSource() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText() {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText(Interval interval) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText(RuleContext ctx) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText(Token start, Token stop) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort a list
|
||||
*/
|
||||
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
|
||||
List<T> dup = new ArrayList<T>();
|
||||
dup.addAll(data);
|
||||
Collections.sort(dup);
|
||||
return dup;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return map sorted by key
|
||||
*/
|
||||
public <K extends Comparable<? super K>, V> LinkedHashMap<K, V> sort(Map<K, V> data) {
|
||||
LinkedHashMap<K, V> dup = new LinkedHashMap<K, V>();
|
||||
List<K> keys = new ArrayList<K>();
|
||||
keys.addAll(data.keySet());
|
||||
Collections.sort(keys);
|
||||
for (K k : keys) {
|
||||
dup.put(k, data.get(k));
|
||||
}
|
||||
return dup;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,92 +5,36 @@
|
|||
*/
|
||||
package org.antlr.v4.test.runtime.go;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.automata.ATNFactory;
|
||||
import org.antlr.v4.automata.ATNPrinter;
|
||||
import org.antlr.v4.automata.LexerATNFactory;
|
||||
import org.antlr.v4.automata.ParserATNFactory;
|
||||
import org.antlr.v4.codegen.CodeGenerator;
|
||||
import org.antlr.v4.runtime.ANTLRInputStream;
|
||||
import org.antlr.v4.runtime.CharStream;
|
||||
import org.antlr.v4.runtime.CommonToken;
|
||||
import org.antlr.v4.runtime.CommonTokenStream;
|
||||
import org.antlr.v4.runtime.IntStream;
|
||||
import org.antlr.v4.runtime.Lexer;
|
||||
import org.antlr.v4.runtime.RuleContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.TokenSource;
|
||||
import org.antlr.v4.runtime.TokenStream;
|
||||
import org.antlr.v4.runtime.WritableToken;
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.runtime.atn.ATNDeserializer;
|
||||
import org.antlr.v4.runtime.atn.ATNSerializer;
|
||||
import org.antlr.v4.runtime.atn.ATNState;
|
||||
import org.antlr.v4.runtime.atn.LexerATNSimulator;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
import org.antlr.v4.runtime.misc.IntegerList;
|
||||
import org.antlr.v4.runtime.misc.Interval;
|
||||
import org.antlr.v4.semantics.SemanticPipeline;
|
||||
import org.antlr.v4.test.runtime.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.RuntimeTestSupport;
|
||||
import org.antlr.v4.test.runtime.StreamVacuum;
|
||||
import org.antlr.v4.tool.ANTLRMessage;
|
||||
import org.antlr.v4.tool.DOTGenerator;
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
import org.antlr.v4.tool.GrammarSemanticsMessage;
|
||||
import org.antlr.v4.tool.LexerGrammar;
|
||||
import org.antlr.v4.tool.Rule;
|
||||
|
||||
import org.antlr.v4.test.runtime.*;
|
||||
import org.stringtemplate.v4.ST;
|
||||
import org.stringtemplate.v4.STGroup;
|
||||
import org.stringtemplate.v4.STGroupString;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static junit.framework.TestCase.assertEquals;
|
||||
import static junit.framework.TestCase.assertFalse;
|
||||
import static junit.framework.TestCase.assertNotNull;
|
||||
import static junit.framework.TestCase.assertTrue;
|
||||
import static junit.framework.TestCase.*;
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
|
||||
public class BaseGoTest implements RuntimeTestSupport {
|
||||
public File overall_tmpdir = null;
|
||||
public File tmpdir = null; // this is where the parser package is stored, typically inside the tmpdir
|
||||
public class BaseGoTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
|
||||
|
||||
private static File tmpGopath = null;
|
||||
private static final String GO_RUNTIME_IMPORT_PATH = "github.com/antlr/antlr4/runtime/Go/antlr"; // TODO: Change this before merging with upstream
|
||||
|
||||
/**
|
||||
* If error during parser execution, store stderr here; can't return stdout
|
||||
* and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
protected String stderrDuringParse;
|
||||
private File parserTempDir; // "parser" with tempDir
|
||||
|
||||
/** Errors found while running antlr */
|
||||
protected StringBuilder antlrToolErrors;
|
||||
@Override
|
||||
protected String getPropertyPrefix() {
|
||||
return "antlr4-go";
|
||||
}
|
||||
|
||||
/**
|
||||
* Copies all files from go runtime to a temporary folder that is inside a valid GOPATH project structure.
|
||||
*/
|
||||
public static void groupSetUp() throws Exception {
|
||||
tmpGopath = new File(System.getProperty("java.io.tmpdir"), "antlr-goruntime-tmpgopath-"
|
||||
+ Long.toHexString(System.currentTimeMillis()));
|
||||
tmpGopath = new File(System.getProperty("java.io.tmpdir"), "antlr-goruntime-tmpgopath-" + Long.toHexString(System.currentTimeMillis()));
|
||||
|
||||
ArrayList<String> pathsegments = new ArrayList<String>();
|
||||
pathsegments.add("src");
|
||||
|
@ -110,39 +54,12 @@ public class BaseGoTest implements RuntimeTestSupport {
|
|||
}
|
||||
for (File runtimeFile : runtimeFiles) {
|
||||
File dest = new File(tmpPackageDir, runtimeFile.getName());
|
||||
copyFile(runtimeFile, dest);
|
||||
RuntimeTestUtils.copyFile(runtimeFile, dest);
|
||||
}
|
||||
|
||||
cacheGoRuntime(tmpPackageDir);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testTearDown() throws Exception {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTmpDir() {
|
||||
return tmpdir.getPath();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStdout() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getParseErrors() {
|
||||
return stderrDuringParse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getANTLRToolErrors() {
|
||||
if ( antlrToolErrors.length()==0 ) {
|
||||
return null;
|
||||
}
|
||||
return antlrToolErrors.toString();
|
||||
}
|
||||
|
||||
public static void groupTearDown() throws Exception {
|
||||
eraseDirectory(tmpGopath);
|
||||
}
|
||||
|
@ -163,139 +80,22 @@ public class BaseGoTest implements RuntimeTestSupport {
|
|||
}
|
||||
}
|
||||
|
||||
private static void copyFile(File source, File dest) throws IOException {
|
||||
InputStream is = new FileInputStream(source);
|
||||
OutputStream os = new FileOutputStream(dest);
|
||||
byte[] buf = new byte[4 << 10];
|
||||
int l;
|
||||
while ((l = is.read(buf)) > -1) {
|
||||
os.write(buf, 0, l);
|
||||
}
|
||||
is.close();
|
||||
os.close();
|
||||
}
|
||||
|
||||
public void testSetUp() throws Exception {
|
||||
// new output dir for each test
|
||||
String prop = System.getProperty("antlr-go-test-dir");
|
||||
if (prop != null && prop.length() > 0) {
|
||||
overall_tmpdir = new File(prop);
|
||||
}
|
||||
else {
|
||||
String threadName = Thread.currentThread().getName();
|
||||
overall_tmpdir = new File(System.getProperty("java.io.tmpdir"),
|
||||
getClass().getSimpleName()+"-"+threadName+"-"+System.currentTimeMillis());
|
||||
}
|
||||
|
||||
if ( overall_tmpdir.exists())
|
||||
this.eraseDirectory(overall_tmpdir);
|
||||
|
||||
tmpdir = new File(overall_tmpdir, "parser");
|
||||
|
||||
if ( tmpdir.exists()) {
|
||||
this.eraseDirectory(tmpdir);
|
||||
}
|
||||
antlrToolErrors = new StringBuilder();
|
||||
eraseParserTempDir();
|
||||
super.testSetUp();
|
||||
parserTempDir = new File(getTempTestDir(), "parser");
|
||||
}
|
||||
|
||||
protected org.antlr.v4.Tool newTool(String[] args) {
|
||||
return new Tool(args);
|
||||
@Override
|
||||
public File getTempParserDir() {
|
||||
return parserTempDir;
|
||||
}
|
||||
|
||||
protected Tool newTool() {
|
||||
return new Tool(new String[]{"-o", tmpdir.getPath()});
|
||||
}
|
||||
|
||||
protected ATN createATN(Grammar g, boolean useSerializer) {
|
||||
if (g.atn == null) {
|
||||
semanticProcess(g);
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
|
||||
ParserATNFactory f;
|
||||
if (g.isLexer()) {
|
||||
f = new LexerATNFactory((LexerGrammar) g);
|
||||
}
|
||||
else {
|
||||
f = new ParserATNFactory(g);
|
||||
}
|
||||
|
||||
g.atn = f.createATN();
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
private void eraseParserTempDir() {
|
||||
if(parserTempDir != null) {
|
||||
eraseDirectory(parserTempDir);
|
||||
parserTempDir = null;
|
||||
}
|
||||
|
||||
ATN atn = g.atn;
|
||||
if (useSerializer) {
|
||||
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
|
||||
return new ATNDeserializer().deserialize(serialized);
|
||||
}
|
||||
|
||||
return atn;
|
||||
}
|
||||
|
||||
protected void semanticProcess(Grammar g) {
|
||||
if (g.ast != null && !g.ast.hasErrors) {
|
||||
System.out.println(g.ast.toStringTree());
|
||||
Tool antlr = new Tool();
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
if (g.getImportedGrammars() != null) { // process imported grammars
|
||||
// (if any)
|
||||
for (Grammar imp : g.getImportedGrammars()) {
|
||||
antlr.processNonCombinedGrammar(imp, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
IntegerList getTypesFromString(Grammar g, String expecting) {
|
||||
IntegerList expectingTokenTypes = new IntegerList();
|
||||
if (expecting != null && !expecting.trim().isEmpty()) {
|
||||
for (String tname : expecting.replace(" ", "").split(",")) {
|
||||
int ttype = g.getTokenType(tname);
|
||||
expectingTokenTypes.add(ttype);
|
||||
}
|
||||
}
|
||||
return expectingTokenTypes;
|
||||
}
|
||||
|
||||
public IntegerList getTokenTypesViaATN(String input,
|
||||
LexerATNSimulator lexerATN) {
|
||||
ANTLRInputStream in = new ANTLRInputStream(input);
|
||||
IntegerList tokenTypes = new IntegerList();
|
||||
int ttype;
|
||||
do {
|
||||
ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
|
||||
tokenTypes.add(ttype);
|
||||
} while (ttype != Token.EOF);
|
||||
return tokenTypes;
|
||||
}
|
||||
|
||||
public List<String> getTokenTypes(LexerGrammar lg, ATN atn, CharStream input) {
|
||||
LexerATNSimulator interp = new LexerATNSimulator(atn,
|
||||
new DFA[] { new DFA(
|
||||
atn.modeToStartState.get(Lexer.DEFAULT_MODE)) }, null);
|
||||
List<String> tokenTypes = new ArrayList<String>();
|
||||
int ttype;
|
||||
boolean hitEOF = false;
|
||||
do {
|
||||
if (hitEOF) {
|
||||
tokenTypes.add("EOF");
|
||||
break;
|
||||
}
|
||||
int t = input.LA(1);
|
||||
ttype = interp.match(input, Lexer.DEFAULT_MODE);
|
||||
if (ttype == Token.EOF) {
|
||||
tokenTypes.add("EOF");
|
||||
}
|
||||
else {
|
||||
tokenTypes.add(lg.typeToTokenList.get(ttype));
|
||||
}
|
||||
|
||||
if (t == IntStream.EOF) {
|
||||
hitEOF = true;
|
||||
}
|
||||
} while (ttype != Token.EOF);
|
||||
return tokenTypes;
|
||||
}
|
||||
|
||||
protected String execLexer(String grammarFileName, String grammarStr,
|
||||
|
@ -309,25 +109,10 @@ public class BaseGoTest implements RuntimeTestSupport {
|
|||
boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
|
||||
grammarStr, null, lexerName, "-no-listener");
|
||||
assertTrue(success);
|
||||
writeFile(overall_tmpdir.toString(), "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
String output = execModule("Test.go");
|
||||
return output;
|
||||
return execModule("Test.go");
|
||||
}
|
||||
//
|
||||
// public String execParser(String grammarFileName, String grammarStr,
|
||||
// String parserName, String lexerName, String listenerName,
|
||||
// String visitorName, String startRuleName, String input,
|
||||
// boolean debug)
|
||||
// {
|
||||
// boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
|
||||
// grammarStr, parserName, lexerName, "-visitor");
|
||||
// assertTrue(success);
|
||||
// writeFile(overall_tmpdir, "input", input);
|
||||
// rawBuildRecognizerTestFile(parserName, lexerName, listenerName,
|
||||
// visitorName, startRuleName, debug);
|
||||
// return execRecognizer();
|
||||
// }
|
||||
|
||||
@Override
|
||||
public String execParser(String grammarFileName, String grammarStr,
|
||||
|
@ -338,7 +123,7 @@ public class BaseGoTest implements RuntimeTestSupport {
|
|||
boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
|
||||
grammarStr, parserName, lexerName, "-visitor");
|
||||
assertTrue(success);
|
||||
writeFile(overall_tmpdir.toString(), "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
rawBuildRecognizerTestFile(parserName, lexerName, listenerName,
|
||||
visitorName, startRuleName, showDiagnosticErrors);
|
||||
return execRecognizer();
|
||||
|
@ -356,7 +141,7 @@ public class BaseGoTest implements RuntimeTestSupport {
|
|||
protected boolean rawGenerateAndBuildRecognizer(String grammarFileName,
|
||||
String grammarStr, String parserName, String lexerName,
|
||||
boolean defaultListener, String... extraOptions) {
|
||||
ErrorQueue equeue = antlrOnString(getTmpDir(), "Go", grammarFileName, grammarStr,
|
||||
ErrorQueue equeue = antlrOnString(getTempParserDirPath(), "Go", grammarFileName, grammarStr,
|
||||
defaultListener, extraOptions);
|
||||
if (!equeue.errors.isEmpty()) {
|
||||
return false;
|
||||
|
@ -367,7 +152,7 @@ public class BaseGoTest implements RuntimeTestSupport {
|
|||
protected void rawBuildRecognizerTestFile(String parserName,
|
||||
String lexerName, String listenerName, String visitorName,
|
||||
String parserStartRuleName, boolean debug) {
|
||||
this.stderrDuringParse = null;
|
||||
setParseErrors(null);
|
||||
if (parserName == null) {
|
||||
writeLexerTestFile(lexerName, false);
|
||||
}
|
||||
|
@ -383,12 +168,12 @@ public class BaseGoTest implements RuntimeTestSupport {
|
|||
|
||||
public String execModule(String fileName) {
|
||||
String goExecutable = locateGo();
|
||||
String modulePath = new File(overall_tmpdir, fileName).getAbsolutePath();
|
||||
String inputPath = new File(overall_tmpdir, "input").getAbsolutePath();
|
||||
String modulePath = new File(getTempTestDir(), fileName).getAbsolutePath();
|
||||
String inputPath = new File(getTempTestDir(), "input").getAbsolutePath();
|
||||
try {
|
||||
ProcessBuilder builder = new ProcessBuilder(goExecutable, "run", modulePath, inputPath);
|
||||
builder.environment().put("GOPATH", tmpGopath.getPath());
|
||||
builder.directory(overall_tmpdir);
|
||||
builder.directory(getTempTestDir());
|
||||
Process process = builder.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
|
@ -402,7 +187,7 @@ public class BaseGoTest implements RuntimeTestSupport {
|
|||
output = null;
|
||||
}
|
||||
if (stderrVacuum.toString().length() > 0) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
setParseErrors(stderrVacuum.toString());
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
@ -469,203 +254,6 @@ public class BaseGoTest implements RuntimeTestSupport {
|
|||
return runtimeDir;
|
||||
}
|
||||
|
||||
// void ambig(List<Message> msgs, int[] expectedAmbigAlts, String
|
||||
// expectedAmbigInput)
|
||||
// throws Exception
|
||||
// {
|
||||
// ambig(msgs, 0, expectedAmbigAlts, expectedAmbigInput);
|
||||
// }
|
||||
|
||||
// void ambig(List<Message> msgs, int i, int[] expectedAmbigAlts, String
|
||||
// expectedAmbigInput)
|
||||
// throws Exception
|
||||
// {
|
||||
// List<Message> amsgs = getMessagesOfType(msgs, AmbiguityMessage.class);
|
||||
// AmbiguityMessage a = (AmbiguityMessage)amsgs.get(i);
|
||||
// if ( a==null ) assertNull(expectedAmbigAlts);
|
||||
// else {
|
||||
// assertEquals(a.conflictingAlts.toString(),
|
||||
// Arrays.toString(expectedAmbigAlts));
|
||||
// }
|
||||
// assertEquals(expectedAmbigInput, a.input);
|
||||
// }
|
||||
|
||||
// void unreachable(List<Message> msgs, int[] expectedUnreachableAlts)
|
||||
// throws Exception
|
||||
// {
|
||||
// unreachable(msgs, 0, expectedUnreachableAlts);
|
||||
// }
|
||||
|
||||
// void unreachable(List<Message> msgs, int i, int[]
|
||||
// expectedUnreachableAlts)
|
||||
// throws Exception
|
||||
// {
|
||||
// List<Message> amsgs = getMessagesOfType(msgs,
|
||||
// UnreachableAltsMessage.class);
|
||||
// UnreachableAltsMessage u = (UnreachableAltsMessage)amsgs.get(i);
|
||||
// if ( u==null ) assertNull(expectedUnreachableAlts);
|
||||
// else {
|
||||
// assertEquals(u.conflictingAlts.toString(),
|
||||
// Arrays.toString(expectedUnreachableAlts));
|
||||
// }
|
||||
// }
|
||||
|
||||
List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs,
|
||||
Class<? extends ANTLRMessage> c) {
|
||||
List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
|
||||
for (ANTLRMessage m : msgs) {
|
||||
if (m.getClass() == c)
|
||||
filtered.add(m);
|
||||
}
|
||||
return filtered;
|
||||
}
|
||||
|
||||
void checkRuleATN(Grammar g, String ruleName, String expecting) {
|
||||
ParserATNFactory f = new ParserATNFactory(g);
|
||||
ATN atn = f.createATN();
|
||||
|
||||
DOTGenerator dot = new DOTGenerator(g);
|
||||
System.out
|
||||
.println(dot.getDOT(atn.ruleToStartState[g.getRule(ruleName).index]));
|
||||
|
||||
Rule r = g.getRule(ruleName);
|
||||
ATNState startState = atn.ruleToStartState[r.index];
|
||||
ATNPrinter serializer = new ATNPrinter(g, startState);
|
||||
String result = serializer.asString();
|
||||
|
||||
// System.out.print(result);
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
public void testActions(String templates, String actionName, String action,
|
||||
String expected) throws org.antlr.runtime.RecognitionException {
|
||||
int lp = templates.indexOf('(');
|
||||
String name = templates.substring(0, lp);
|
||||
STGroup group = new STGroupString(templates);
|
||||
ST st = group.getInstanceOf(name);
|
||||
st.add(actionName, action);
|
||||
String grammar = st.render();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(grammar, equeue);
|
||||
if (g.ast != null && !g.ast.hasErrors) {
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
|
||||
ATNFactory factory = new ParserATNFactory(g);
|
||||
if (g.isLexer())
|
||||
factory = new LexerATNFactory((LexerGrammar) g);
|
||||
g.atn = factory.createATN();
|
||||
|
||||
CodeGenerator gen = new CodeGenerator(g);
|
||||
ST outputFileST = gen.generateParser();
|
||||
String output = outputFileST.render();
|
||||
// System.out.println(output);
|
||||
String b = "#" + actionName + "#";
|
||||
int start = output.indexOf(b);
|
||||
String e = "#end-" + actionName + "#";
|
||||
int end = output.indexOf(e);
|
||||
String snippet = output.substring(start + b.length(), end);
|
||||
assertEquals(expected, snippet);
|
||||
}
|
||||
if (equeue.size() > 0) {
|
||||
System.err.println(equeue.toString());
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkGrammarSemanticsError(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage) throws Exception {
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = equeue.errors.get(i);
|
||||
if (m.getErrorType() == expectedMessage.getErrorType()) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertNotNull("no error; " + expectedMessage.getErrorType()
|
||||
+ " expected", foundMsg);
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
assertEquals(Arrays.toString(expectedMessage.getArgs()),
|
||||
Arrays.toString(foundMsg.getArgs()));
|
||||
if (equeue.size() != 1) {
|
||||
System.err.println(equeue);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkGrammarSemanticsWarning(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage) throws Exception {
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.warnings.size(); i++) {
|
||||
ANTLRMessage m = equeue.warnings.get(i);
|
||||
if (m.getErrorType() == expectedMessage.getErrorType()) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertNotNull("no error; " + expectedMessage.getErrorType()
|
||||
+ " expected", foundMsg);
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
assertEquals(Arrays.toString(expectedMessage.getArgs()),
|
||||
Arrays.toString(foundMsg.getArgs()));
|
||||
if (equeue.size() != 1) {
|
||||
System.err.println(equeue);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkError(ErrorQueue equeue, ANTLRMessage expectedMessage)
|
||||
throws Exception {
|
||||
// System.out.println("errors="+equeue);
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = equeue.errors.get(i);
|
||||
if (m.getErrorType() == expectedMessage.getErrorType()) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertTrue("no error; " + expectedMessage.getErrorType() + " expected",
|
||||
!equeue.errors.isEmpty());
|
||||
assertTrue("too many errors; " + equeue.errors,
|
||||
equeue.errors.size() <= 1);
|
||||
assertNotNull(
|
||||
"couldn't find expected error: "
|
||||
+ expectedMessage.getErrorType(), foundMsg);
|
||||
/*
|
||||
* assertTrue("error is not a GrammarSemanticsMessage", foundMsg
|
||||
* instanceof GrammarSemanticsMessage);
|
||||
*/
|
||||
assertArrayEquals(expectedMessage.getArgs(), foundMsg.getArgs());
|
||||
}
|
||||
|
||||
public static class FilteringTokenStream extends CommonTokenStream {
|
||||
public FilteringTokenStream(TokenSource src) {
|
||||
super(src);
|
||||
}
|
||||
|
||||
Set<Integer> hide = new HashSet<Integer>();
|
||||
|
||||
@Override
|
||||
protected boolean sync(int i) {
|
||||
if (!super.sync(i)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Token t = get(i);
|
||||
if (hide.contains(t.getType())) {
|
||||
((WritableToken) t).setChannel(Token.HIDDEN_CHANNEL);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
public void setTokenTypeChannel(int ttype, int channel) {
|
||||
hide.add(ttype);
|
||||
}
|
||||
}
|
||||
|
||||
protected void mkdir(File dir) {
|
||||
dir.mkdirs();
|
||||
}
|
||||
|
||||
protected void writeParserTestFile(String parserName, String lexerName,
|
||||
String listenerName, String visitorName,
|
||||
String parserStartRuleName, boolean debug) {
|
||||
|
@ -723,7 +311,7 @@ public class BaseGoTest implements RuntimeTestSupport {
|
|||
outputFileST.add("listenerName", listenerName);
|
||||
outputFileST.add("visitorName", visitorName);
|
||||
outputFileST.add("parserStartRuleName", parserStartRuleName.substring(0, 1).toUpperCase() + parserStartRuleName.substring(1) );
|
||||
writeFile(overall_tmpdir.toString(), "Test.go", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.go", outputFileST.render());
|
||||
}
|
||||
|
||||
|
||||
|
@ -755,222 +343,7 @@ public class BaseGoTest implements RuntimeTestSupport {
|
|||
+ "}\n"
|
||||
+ "\n");
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
writeFile(overall_tmpdir.toString(), "Test.go", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.go", outputFileST.render());
|
||||
}
|
||||
|
||||
public void writeRecognizer(String parserName, String lexerName,
|
||||
String listenerName, String visitorName,
|
||||
String parserStartRuleName, boolean debug) {
|
||||
if (parserName == null) {
|
||||
writeLexerTestFile(lexerName, debug);
|
||||
}
|
||||
else {
|
||||
writeParserTestFile(parserName, lexerName, listenerName,
|
||||
visitorName, parserStartRuleName, debug);
|
||||
}
|
||||
}
|
||||
|
||||
protected void eraseFilesEndingWith(final String filesEndingWith) {
|
||||
File[] files = overall_tmpdir.listFiles(new FileFilter() {
|
||||
@Override
|
||||
public boolean accept(File pathname) {
|
||||
return pathname.getName().endsWith(filesEndingWith);
|
||||
}
|
||||
});
|
||||
for (File file : files) {
|
||||
file.delete();
|
||||
}
|
||||
}
|
||||
|
||||
protected static void eraseDirectory(File dir) {
|
||||
File[] files = dir.listFiles();
|
||||
if (files != null) {
|
||||
for (File file : files) {
|
||||
if (file.isDirectory()) {
|
||||
eraseDirectory(file);
|
||||
}
|
||||
else {
|
||||
file.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
dir.delete();
|
||||
}
|
||||
|
||||
public void eraseTempDir() {
|
||||
boolean doErase = true;
|
||||
String propName = "antlr-go-erase-test-dir";
|
||||
String prop = System.getProperty(propName);
|
||||
if (prop != null && prop.length() > 0)
|
||||
doErase = Boolean.getBoolean(prop);
|
||||
if (doErase) {
|
||||
if ( overall_tmpdir.exists()) {
|
||||
eraseDirectory(overall_tmpdir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public String getFirstLineOfException() {
|
||||
if (this.stderrDuringParse == null) {
|
||||
return null;
|
||||
}
|
||||
String[] lines = this.stderrDuringParse.split("\n");
|
||||
String prefix = "Exception in thread \"main\" ";
|
||||
return lines[0].substring(prefix.length(), lines[0].length());
|
||||
}
|
||||
|
||||
/**
|
||||
* When looking at a result set that consists of a Map/HashTable we cannot
|
||||
* rely on the output order, as the hashing algorithm or other aspects of
|
||||
* the implementation may be different on differnt JDKs or platforms. Hence
|
||||
* we take the Map, convert the keys to a List, sort them and Stringify the
|
||||
* Map, which is a bit of a hack, but guarantees that we get the same order
|
||||
* on all systems. We assume that the keys are strings.
|
||||
*
|
||||
* @param m
|
||||
* The Map that contains keys we wish to return in sorted order
|
||||
* @return A string that represents all the keys in sorted order.
|
||||
*/
|
||||
public <K, V> String sortMapToString(Map<K, V> m) {
|
||||
// Pass in crap, and get nothing back
|
||||
//
|
||||
if (m == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
System.out.println("Map toString looks like: " + m.toString());
|
||||
|
||||
// Sort the keys in the Map
|
||||
//
|
||||
TreeMap<K, V> nset = new TreeMap<K, V>(m);
|
||||
|
||||
System.out.println("Tree map looks like: " + nset.toString());
|
||||
return nset.toString();
|
||||
}
|
||||
|
||||
public List<String> realElements(List<String> elements) {
|
||||
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String message, String text) {
|
||||
assertNotNull(message, text);
|
||||
assertFalse(message, text.isEmpty());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String text) {
|
||||
assertNotNull(text);
|
||||
assertFalse(text.isEmpty());
|
||||
}
|
||||
|
||||
public static class IntTokenStream implements TokenStream {
|
||||
IntegerList types;
|
||||
int p = 0;
|
||||
|
||||
public IntTokenStream(IntegerList types) {
|
||||
this.types = types;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void consume() {
|
||||
p++;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int LA(int i) {
|
||||
return LT(i).getType();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int mark() {
|
||||
return index();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int index() {
|
||||
return p;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void release(int marker) {
|
||||
seek(marker);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void seek(int index) {
|
||||
p = index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return types.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSourceName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token LT(int i) {
|
||||
CommonToken t;
|
||||
int rawIndex = p + i - 1;
|
||||
if (rawIndex >= types.size())
|
||||
t = new CommonToken(Token.EOF);
|
||||
else
|
||||
t = new CommonToken(types.get(rawIndex));
|
||||
t.setTokenIndex(rawIndex);
|
||||
return t;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token get(int i) {
|
||||
return new org.antlr.v4.runtime.CommonToken(types.get(i));
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenSource getTokenSource() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText() {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText(Interval interval) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText(RuleContext ctx) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText(Token start, Token stop) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
}
|
||||
|
||||
/** Sort a list */
|
||||
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
|
||||
List<T> dup = new ArrayList<T>();
|
||||
dup.addAll(data);
|
||||
Collections.sort(dup);
|
||||
return dup;
|
||||
}
|
||||
|
||||
/** Return map sorted by key */
|
||||
public <K extends Comparable<? super K>, V> LinkedHashMap<K, V> sort(
|
||||
Map<K, V> data) {
|
||||
LinkedHashMap<K, V> dup = new LinkedHashMap<K, V>();
|
||||
List<K> keys = new ArrayList<K>();
|
||||
keys.addAll(data.keySet());
|
||||
Collections.sort(keys);
|
||||
for (K k : keys) {
|
||||
dup.put(k, data.get(k));
|
||||
}
|
||||
return dup;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,6 @@
|
|||
*/
|
||||
package org.antlr.v4.test.runtime.java;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.analysis.AnalysisPipeline;
|
||||
import org.antlr.v4.automata.ATNFactory;
|
||||
import org.antlr.v4.automata.ATNPrinter;
|
||||
|
@ -14,32 +13,16 @@ import org.antlr.v4.automata.ParserATNFactory;
|
|||
import org.antlr.v4.codegen.CodeGenerator;
|
||||
import org.antlr.v4.runtime.ANTLRInputStream;
|
||||
import org.antlr.v4.runtime.CharStream;
|
||||
import org.antlr.v4.runtime.CommonToken;
|
||||
import org.antlr.v4.runtime.CommonTokenStream;
|
||||
import org.antlr.v4.runtime.IntStream;
|
||||
import org.antlr.v4.runtime.Lexer;
|
||||
import org.antlr.v4.runtime.Parser;
|
||||
import org.antlr.v4.runtime.RuleContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.TokenSource;
|
||||
import org.antlr.v4.runtime.TokenStream;
|
||||
import org.antlr.v4.runtime.WritableToken;
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.runtime.atn.ATNDeserializer;
|
||||
import org.antlr.v4.runtime.atn.ATNSerializer;
|
||||
import org.antlr.v4.runtime.atn.ATNState;
|
||||
import org.antlr.v4.runtime.atn.DecisionState;
|
||||
import org.antlr.v4.runtime.atn.LexerATNSimulator;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
import org.antlr.v4.runtime.misc.IntegerList;
|
||||
import org.antlr.v4.runtime.misc.Interval;
|
||||
import org.antlr.v4.runtime.misc.Pair;
|
||||
import org.antlr.v4.runtime.tree.ParseTree;
|
||||
import org.antlr.v4.semantics.SemanticPipeline;
|
||||
import org.antlr.v4.test.runtime.BaseRuntimeTest;
|
||||
import org.antlr.v4.test.runtime.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.RuntimeTestSupport;
|
||||
import org.antlr.v4.test.runtime.StreamVacuum;
|
||||
import org.antlr.v4.test.runtime.*;
|
||||
import org.antlr.v4.tool.ANTLRMessage;
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
import org.antlr.v4.tool.GrammarSemanticsMessage;
|
||||
|
@ -68,13 +51,9 @@ import java.net.URL;
|
|||
import java.net.URLClassLoader;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static junit.framework.TestCase.assertEquals;
|
||||
import static junit.framework.TestCase.assertFalse;
|
||||
|
@ -83,9 +62,7 @@ import static junit.framework.TestCase.assertTrue;
|
|||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
|
||||
public class BaseJavaTest implements RuntimeTestSupport {
|
||||
public static final String newline = System.getProperty("line.separator");
|
||||
public static final String pathSep = System.getProperty("path.separator");
|
||||
public class BaseJavaTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
|
||||
|
||||
/**
|
||||
* When the {@code antlr.testinprocess} runtime property is set to
|
||||
|
@ -103,289 +80,14 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
*/
|
||||
public static final boolean TEST_IN_SAME_PROCESS = Boolean.parseBoolean(System.getProperty("antlr.testinprocess"));
|
||||
|
||||
/**
|
||||
* When the {@code antlr.preserve-test-dir} runtime property is set to
|
||||
* {@code true}, the temporary directories created by the test run will not
|
||||
* be removed at the end of the test run, even for tests that completed
|
||||
* successfully.
|
||||
* <p>
|
||||
* <p>
|
||||
* The default behavior (used in all other cases) is removing the temporary
|
||||
* directories for all tests which completed successfully, and preserving
|
||||
* the directories for tests which failed.</p>
|
||||
*/
|
||||
public static final boolean PRESERVE_TEST_DIR = true; //Boolean.parseBoolean(System.getProperty("antlr.preserve-test-dir"));
|
||||
|
||||
/**
|
||||
* The base test directory is the directory where generated files get placed
|
||||
* during unit test execution.
|
||||
* <p>
|
||||
* <p>
|
||||
* The default value for this property is the {@code java.io.tmpdir} system
|
||||
* property, and can be overridden by setting the
|
||||
* {@code antlr.java-test-dir} property to a custom location. Note that the
|
||||
* {@code antlr.java-test-dir} property directly affects the
|
||||
* {@link #CREATE_PER_TEST_DIRECTORIES} value as well.</p>
|
||||
*/
|
||||
public static final String BASE_TEST_DIR;
|
||||
|
||||
/**
|
||||
* When {@code true}, a temporary directory will be created for each test
|
||||
* executed during the test run.
|
||||
* <p>
|
||||
* <p>
|
||||
* This value is {@code true} when the {@code antlr.java-test-dir} system
|
||||
* property is set, and otherwise {@code false}.</p>
|
||||
*/
|
||||
public static final boolean CREATE_PER_TEST_DIRECTORIES;
|
||||
|
||||
static {
|
||||
String baseTestDir = System.getProperty("antlr.java-test-dir");
|
||||
boolean perTestDirectories = false;
|
||||
if ( baseTestDir==null || baseTestDir.isEmpty() ) {
|
||||
baseTestDir = System.getProperty("java.io.tmpdir");
|
||||
perTestDirectories = true;
|
||||
}
|
||||
|
||||
if ( !new File(baseTestDir).isDirectory() ) {
|
||||
throw new UnsupportedOperationException("The specified base test directory does not exist: "+baseTestDir);
|
||||
}
|
||||
|
||||
BASE_TEST_DIR = baseTestDir;
|
||||
CREATE_PER_TEST_DIRECTORIES = perTestDirectories;
|
||||
}
|
||||
|
||||
/**
|
||||
* Build up the full classpath we need, including the surefire path (if present)
|
||||
*/
|
||||
public static final String CLASSPATH = System.getProperty("java.class.path");
|
||||
|
||||
public String tmpdir = null;
|
||||
|
||||
/**
|
||||
* If error during parser execution, store stderr here; can't return
|
||||
* stdout and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
protected String stderrDuringParse;
|
||||
|
||||
/**
|
||||
* Errors found while running antlr
|
||||
*/
|
||||
protected StringBuilder antlrToolErrors;
|
||||
|
||||
@Override
|
||||
public void testSetUp() throws Exception {
|
||||
// STGroup.verbose = true;
|
||||
// System.err.println("testSetUp "+Thread.currentThread().getName());
|
||||
if ( CREATE_PER_TEST_DIRECTORIES ) {
|
||||
// new output dir for each test
|
||||
String threadName = Thread.currentThread().getName();
|
||||
String testDirectory = getClass().getSimpleName()+"-"+threadName+"-"+System.nanoTime();
|
||||
tmpdir = new File(BASE_TEST_DIR, testDirectory).getAbsolutePath();
|
||||
}
|
||||
else {
|
||||
tmpdir = new File(BASE_TEST_DIR).getAbsolutePath();
|
||||
if ( !PRESERVE_TEST_DIR && new File(tmpdir).exists() ) {
|
||||
eraseFiles();
|
||||
}
|
||||
}
|
||||
antlrToolErrors = new StringBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testTearDown() throws Exception {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTmpDir() {
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStdout() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getParseErrors() {
|
||||
return stderrDuringParse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getANTLRToolErrors() {
|
||||
if ( antlrToolErrors.length()==0 ) {
|
||||
return null;
|
||||
}
|
||||
return antlrToolErrors.toString();
|
||||
}
|
||||
|
||||
protected org.antlr.v4.Tool newTool(String[] args) {
|
||||
Tool tool = new Tool(args);
|
||||
return tool;
|
||||
}
|
||||
|
||||
protected ATN createATN(Grammar g, boolean useSerializer) {
|
||||
if ( g.atn==null ) {
|
||||
semanticProcess(g);
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
|
||||
ParserATNFactory f;
|
||||
if ( g.isLexer() ) {
|
||||
f = new LexerATNFactory((LexerGrammar) g);
|
||||
}
|
||||
else {
|
||||
f = new ParserATNFactory(g);
|
||||
}
|
||||
|
||||
g.atn = f.createATN();
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
}
|
||||
|
||||
ATN atn = g.atn;
|
||||
if ( useSerializer ) {
|
||||
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
|
||||
return new ATNDeserializer().deserialize(serialized);
|
||||
}
|
||||
|
||||
return atn;
|
||||
}
|
||||
|
||||
protected void semanticProcess(Grammar g) {
|
||||
if ( g.ast!=null && !g.ast.hasErrors ) {
|
||||
// System.out.println(g.ast.toStringTree());
|
||||
Tool antlr = new Tool();
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
|
||||
for (Grammar imp : g.getImportedGrammars()) {
|
||||
antlr.processNonCombinedGrammar(imp, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public DFA createDFA(Grammar g, DecisionState s) {
|
||||
// PredictionDFAFactory conv = new PredictionDFAFactory(g, s);
|
||||
// DFA dfa = conv.createDFA();
|
||||
// conv.issueAmbiguityWarnings();
|
||||
// System.out.print("DFA="+dfa);
|
||||
// return dfa;
|
||||
return null;
|
||||
}
|
||||
|
||||
// public void minimizeDFA(DFA dfa) {
|
||||
// DFAMinimizer dmin = new DFAMinimizer(dfa);
|
||||
// dfa.minimized = dmin.minimize();
|
||||
// }
|
||||
|
||||
IntegerList getTypesFromString(Grammar g, String expecting) {
|
||||
IntegerList expectingTokenTypes = new IntegerList();
|
||||
if ( expecting!=null && !expecting.trim().isEmpty() ) {
|
||||
for (String tname : expecting.replace(" ", "").split(",")) {
|
||||
int ttype = g.getTokenType(tname);
|
||||
expectingTokenTypes.add(ttype);
|
||||
}
|
||||
}
|
||||
return expectingTokenTypes;
|
||||
}
|
||||
|
||||
public IntegerList getTokenTypesViaATN(String input, LexerATNSimulator lexerATN) {
|
||||
ANTLRInputStream in = new ANTLRInputStream(input);
|
||||
IntegerList tokenTypes = new IntegerList();
|
||||
int ttype;
|
||||
do {
|
||||
ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
|
||||
tokenTypes.add(ttype);
|
||||
} while ( ttype!=Token.EOF );
|
||||
return tokenTypes;
|
||||
}
|
||||
|
||||
public List<String> getTokenTypes(LexerGrammar lg,
|
||||
ATN atn,
|
||||
CharStream input) {
|
||||
LexerATNSimulator interp = new LexerATNSimulator(atn, new DFA[]{new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE))}, null);
|
||||
List<String> tokenTypes = new ArrayList<String>();
|
||||
int ttype;
|
||||
boolean hitEOF = false;
|
||||
do {
|
||||
if ( hitEOF ) {
|
||||
tokenTypes.add("EOF");
|
||||
break;
|
||||
}
|
||||
int t = input.LA(1);
|
||||
ttype = interp.match(input, Lexer.DEFAULT_MODE);
|
||||
if ( ttype==Token.EOF ) {
|
||||
tokenTypes.add("EOF");
|
||||
}
|
||||
else {
|
||||
tokenTypes.add(lg.typeToTokenList.get(ttype));
|
||||
}
|
||||
|
||||
if ( t==IntStream.EOF ) {
|
||||
hitEOF = true;
|
||||
}
|
||||
} while ( ttype!=Token.EOF );
|
||||
return tokenTypes;
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkRuleDFA(String gtext, String ruleName, String expecting)
|
||||
throws Exception {
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(gtext, equeue);
|
||||
ATN atn = createATN(g, false);
|
||||
ATNState s = atn.ruleToStartState[g.getRule(ruleName).index];
|
||||
if ( s==null ) {
|
||||
System.err.println("no such rule: "+ruleName);
|
||||
return null;
|
||||
}
|
||||
ATNState t = s.transition(0).target;
|
||||
if ( !(t instanceof DecisionState) ) {
|
||||
System.out.println(ruleName+" has no decision");
|
||||
return null;
|
||||
}
|
||||
DecisionState blk = (DecisionState) t;
|
||||
checkRuleDFA(g, blk, expecting);
|
||||
return equeue.all;
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkRuleDFA(String gtext, int decision, String expecting)
|
||||
throws Exception {
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(gtext, equeue);
|
||||
ATN atn = createATN(g, false);
|
||||
DecisionState blk = atn.decisionToState.get(decision);
|
||||
checkRuleDFA(g, blk, expecting);
|
||||
return equeue.all;
|
||||
}
|
||||
|
||||
void checkRuleDFA(Grammar g, DecisionState blk, String expecting)
|
||||
throws Exception {
|
||||
DFA dfa = createDFA(g, blk);
|
||||
String result = null;
|
||||
if ( dfa!=null ) result = dfa.toString();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkLexerDFA(String gtext, String expecting)
|
||||
throws Exception {
|
||||
return checkLexerDFA(gtext, LexerGrammar.DEFAULT_MODE_NAME, expecting);
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkLexerDFA(String gtext, String modeName, String expecting)
|
||||
throws Exception {
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
LexerGrammar g = new LexerGrammar(gtext, equeue);
|
||||
g.atn = createATN(g, false);
|
||||
// LexerATNToDFAConverter conv = new LexerATNToDFAConverter(g);
|
||||
// DFA dfa = conv.createDFA(modeName);
|
||||
// g.setLookaheadDFA(0, dfa); // only one decision to worry about
|
||||
//
|
||||
// String result = null;
|
||||
// if ( dfa!=null ) result = dfa.toString();
|
||||
// assertEquals(expecting, result);
|
||||
//
|
||||
// return equeue.all;
|
||||
return null;
|
||||
protected String getPropertyPrefix() {
|
||||
return "antrl4-java";
|
||||
}
|
||||
|
||||
protected String load(String fileName, String encoding)
|
||||
|
@ -420,7 +122,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
protected boolean compile(String... fileNames) {
|
||||
List<File> files = new ArrayList<File>();
|
||||
for (String fileName : fileNames) {
|
||||
File f = new File(tmpdir, fileName);
|
||||
File f = new File(getTempTestDir(), fileName);
|
||||
files.add(f);
|
||||
}
|
||||
|
||||
|
@ -435,7 +137,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
fileManager.getJavaFileObjectsFromFiles(files);
|
||||
|
||||
Iterable<String> compileOptions =
|
||||
Arrays.asList("-g", "-source", "1.6", "-target", "1.6", "-implicit:class", "-Xlint:-options", "-d", tmpdir, "-cp", tmpdir+pathSep+CLASSPATH);
|
||||
Arrays.asList("-g", "-source", "1.6", "-target", "1.6", "-implicit:class", "-Xlint:-options", "-d", getTempDirPath(), "-cp", getTempDirPath() + PATH_SEP + CLASSPATH);
|
||||
|
||||
JavaCompiler.CompilationTask task =
|
||||
compiler.getTask(null, fileManager, null, compileOptions, null,
|
||||
|
@ -469,11 +171,10 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
null,
|
||||
lexerName);
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
compile("Test.java");
|
||||
String output = execClass("Test");
|
||||
return output;
|
||||
return execClass("Test");
|
||||
}
|
||||
|
||||
public ParseTree execParser(String startRuleName, String input,
|
||||
|
@ -523,7 +224,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
|
||||
public Class<?> loadClassFromTempDir(String name) throws Exception {
|
||||
ClassLoader loader =
|
||||
new URLClassLoader(new URL[]{new File(tmpdir).toURI().toURL()},
|
||||
new URLClassLoader(new URL[]{getTempTestDir().toURI().toURL()},
|
||||
ClassLoader.getSystemClassLoader());
|
||||
return loader.loadClass(name);
|
||||
}
|
||||
|
@ -571,7 +272,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
lexerName,
|
||||
"-visitor");
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
return rawExecRecognizer(parserName,
|
||||
lexerName,
|
||||
startRuleName,
|
||||
|
@ -598,7 +299,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
String... extraOptions)
|
||||
{
|
||||
ErrorQueue equeue =
|
||||
BaseRuntimeTest.antlrOnString(getTmpDir(), "Java", grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
BaseRuntimeTest.antlrOnString(getTempDirPath(), "Java", grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
if (!equeue.errors.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -620,8 +321,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
files.add(grammarName+"BaseVisitor.java");
|
||||
}
|
||||
}
|
||||
boolean allIsWell = compile(files.toArray(new String[files.size()]));
|
||||
return allIsWell;
|
||||
return compile(files.toArray(new String[0]));
|
||||
}
|
||||
|
||||
protected String rawExecRecognizer(String parserName,
|
||||
|
@ -630,7 +330,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
boolean debug,
|
||||
boolean profile)
|
||||
{
|
||||
this.stderrDuringParse = null;
|
||||
setParseErrors(null);
|
||||
if ( parserName==null ) {
|
||||
writeLexerTestFile(lexerName, false);
|
||||
}
|
||||
|
@ -653,7 +353,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
public String execClass(String className) {
|
||||
if (TEST_IN_SAME_PROCESS) {
|
||||
try {
|
||||
ClassLoader loader = new URLClassLoader(new URL[] { new File(tmpdir).toURI().toURL() }, ClassLoader.getSystemClassLoader());
|
||||
ClassLoader loader = new URLClassLoader(new URL[] { getTempTestDir().toURI().toURL() }, ClassLoader.getSystemClassLoader());
|
||||
final Class<?> mainClass = (Class<?>)loader.loadClass(className);
|
||||
final Method mainMethod = mainClass.getDeclaredMethod("main", String[].class);
|
||||
PipedInputStream stdoutIn = new PipedInputStream();
|
||||
|
@ -671,7 +371,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
System.setErr(new PrintStream(stderrOut));
|
||||
stdoutVacuum.start();
|
||||
stderrVacuum.start();
|
||||
mainMethod.invoke(null, (Object)new String[] { new File(tmpdir, "input").getAbsolutePath() });
|
||||
mainMethod.invoke(null, (Object)new String[] { new File(getTempTestDir(), "input").getAbsolutePath() });
|
||||
}
|
||||
finally {
|
||||
System.setErr(originalErr);
|
||||
|
@ -690,7 +390,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
output = null;
|
||||
}
|
||||
if ( stderrVacuum.toString().length()>0 ) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
setParseErrors(stderrVacuum.toString());
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
@ -701,14 +401,14 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
|
||||
try {
|
||||
String[] args = new String[] {
|
||||
"java", "-classpath", tmpdir+pathSep+CLASSPATH,
|
||||
"java", "-classpath", getTempDirPath() + PATH_SEP + CLASSPATH,
|
||||
"-Dfile.encoding=UTF-8",
|
||||
className, new File(tmpdir, "input").getAbsolutePath()
|
||||
className, new File(getTempTestDir(), "input").getAbsolutePath()
|
||||
};
|
||||
// String cmdLine = Utils.join(args, " ");
|
||||
// System.err.println("execParser: "+cmdLine);
|
||||
Process process =
|
||||
Runtime.getRuntime().exec(args, null, new File(tmpdir));
|
||||
Runtime.getRuntime().exec(args, null, getTempTestDir());
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
stdoutVacuum.start();
|
||||
|
@ -721,7 +421,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
output = null;
|
||||
}
|
||||
if ( stderrVacuum.toString().length()>0 ) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
setParseErrors(stderrVacuum.toString());
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
@ -732,49 +432,6 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
return null;
|
||||
}
|
||||
|
||||
// void ambig(List<Message> msgs, int[] expectedAmbigAlts, String expectedAmbigInput)
|
||||
// throws Exception
|
||||
// {
|
||||
// ambig(msgs, 0, expectedAmbigAlts, expectedAmbigInput);
|
||||
// }
|
||||
|
||||
// void ambig(List<Message> msgs, int i, int[] expectedAmbigAlts, String expectedAmbigInput)
|
||||
// throws Exception
|
||||
// {
|
||||
// List<Message> amsgs = getMessagesOfType(msgs, AmbiguityMessage.class);
|
||||
// AmbiguityMessage a = (AmbiguityMessage)amsgs.get(i);
|
||||
// if ( a==null ) assertNull(expectedAmbigAlts);
|
||||
// else {
|
||||
// assertEquals(a.conflictingAlts.toString(), Arrays.toString(expectedAmbigAlts));
|
||||
// }
|
||||
// assertEquals(expectedAmbigInput, a.input);
|
||||
// }
|
||||
|
||||
// void unreachable(List<Message> msgs, int[] expectedUnreachableAlts)
|
||||
// throws Exception
|
||||
// {
|
||||
// unreachable(msgs, 0, expectedUnreachableAlts);
|
||||
// }
|
||||
|
||||
// void unreachable(List<Message> msgs, int i, int[] expectedUnreachableAlts)
|
||||
// throws Exception
|
||||
// {
|
||||
// List<Message> amsgs = getMessagesOfType(msgs, UnreachableAltsMessage.class);
|
||||
// UnreachableAltsMessage u = (UnreachableAltsMessage)amsgs.get(i);
|
||||
// if ( u==null ) assertNull(expectedUnreachableAlts);
|
||||
// else {
|
||||
// assertEquals(u.conflictingAlts.toString(), Arrays.toString(expectedUnreachableAlts));
|
||||
// }
|
||||
// }
|
||||
|
||||
List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs, Class<? extends ANTLRMessage> c) {
|
||||
List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
|
||||
for (ANTLRMessage m : msgs) {
|
||||
if ( m.getClass() == c ) filtered.add(m);
|
||||
}
|
||||
return filtered;
|
||||
}
|
||||
|
||||
public void checkRuleATN(Grammar g, String ruleName, String expecting) {
|
||||
// DOTGenerator dot = new DOTGenerator(g);
|
||||
// System.out.println(dot.getDOT(g.atn.ruleToStartState[g.getRule(ruleName).index]));
|
||||
|
@ -824,25 +481,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
}
|
||||
}
|
||||
|
||||
protected void checkGrammarSemanticsError(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage)
|
||||
throws Exception
|
||||
{
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = equeue.errors.get(i);
|
||||
if (m.getErrorType()==expectedMessage.getErrorType() ) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg);
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
|
||||
if ( equeue.size()!=1 ) {
|
||||
System.err.println(equeue);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected void checkGrammarSemanticsWarning(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage)
|
||||
|
@ -864,49 +503,6 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
}
|
||||
}
|
||||
|
||||
protected void checkError(ErrorQueue equeue,
|
||||
ANTLRMessage expectedMessage)
|
||||
throws Exception
|
||||
{
|
||||
//System.out.println("errors="+equeue);
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = equeue.errors.get(i);
|
||||
if (m.getErrorType()==expectedMessage.getErrorType() ) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertTrue("no error; "+expectedMessage.getErrorType()+" expected", !equeue.errors.isEmpty());
|
||||
assertTrue("too many errors; "+equeue.errors, equeue.errors.size()<=1);
|
||||
assertNotNull("couldn't find expected error: "+expectedMessage.getErrorType(), foundMsg);
|
||||
/*
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
*/
|
||||
assertArrayEquals(expectedMessage.getArgs(), foundMsg.getArgs());
|
||||
}
|
||||
|
||||
public static class FilteringTokenStream extends CommonTokenStream {
|
||||
public FilteringTokenStream(TokenSource src) { super(src); }
|
||||
Set<Integer> hide = new HashSet<Integer>();
|
||||
@Override
|
||||
protected boolean sync(int i) {
|
||||
if (!super.sync(i)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Token t = get(i);
|
||||
if ( hide.contains(t.getType()) ) {
|
||||
((WritableToken)t).setChannel(Token.HIDDEN_CHANNEL);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
public void setTokenTypeChannel(int ttype, int channel) {
|
||||
hide.add(ttype);
|
||||
}
|
||||
}
|
||||
|
||||
protected void writeTestFile(String parserName,
|
||||
String lexerName,
|
||||
String parserStartRuleName,
|
||||
|
@ -969,7 +565,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
outputFileST.add("parserName", parserName);
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
outputFileST.add("parserStartRuleName", parserStartRuleName);
|
||||
writeFile(tmpdir, "Test.java", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.java", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
|
||||
|
@ -990,210 +586,13 @@ public class BaseJavaTest implements RuntimeTestSupport {
|
|||
);
|
||||
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
writeFile(tmpdir, "Test.java", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.java", outputFileST.render());
|
||||
}
|
||||
|
||||
public void writeRecognizerAndCompile(String parserName, String lexerName,
|
||||
String parserStartRuleName,
|
||||
boolean debug,
|
||||
boolean profile) {
|
||||
if ( parserName==null ) {
|
||||
writeLexerTestFile(lexerName, debug);
|
||||
}
|
||||
else {
|
||||
writeTestFile(parserName,
|
||||
lexerName,
|
||||
parserStartRuleName,
|
||||
debug,
|
||||
profile);
|
||||
}
|
||||
|
||||
compile("Test.java");
|
||||
}
|
||||
|
||||
|
||||
protected void eraseFiles(final String filesEndingWith) {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
String[] files = tmpdirF.list();
|
||||
for(int i = 0; files!=null && i < files.length; i++) {
|
||||
if ( files[i].endsWith(filesEndingWith) ) {
|
||||
new File(tmpdir+"/"+files[i]).delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void eraseFiles() {
|
||||
if (tmpdir == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
File tmpdirF = new File(tmpdir);
|
||||
String[] files = tmpdirF.list();
|
||||
for(int i = 0; files!=null && i < files.length; i++) {
|
||||
new File(tmpdir+"/"+files[i]).delete();
|
||||
}
|
||||
}
|
||||
|
||||
public void eraseTempDir() {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
if ( tmpdirF.exists() ) {
|
||||
eraseFiles();
|
||||
tmpdirF.delete();
|
||||
}
|
||||
}
|
||||
|
||||
public String getFirstLineOfException() {
|
||||
if ( this.stderrDuringParse ==null ) {
|
||||
return null;
|
||||
}
|
||||
String[] lines = this.stderrDuringParse.split("\n");
|
||||
String prefix="Exception in thread \"main\" ";
|
||||
return lines[0].substring(prefix.length(),lines[0].length());
|
||||
}
|
||||
|
||||
/**
|
||||
* When looking at a result set that consists of a Map/HashTable
|
||||
* we cannot rely on the output order, as the hashing algorithm or other aspects
|
||||
* of the implementation may be different on differnt JDKs or platforms. Hence
|
||||
* we take the Map, convert the keys to a List, sort them and Stringify the Map, which is a
|
||||
* bit of a hack, but guarantees that we get the same order on all systems. We assume that
|
||||
* the keys are strings.
|
||||
*
|
||||
* @param m The Map that contains keys we wish to return in sorted order
|
||||
* @return A string that represents all the keys in sorted order.
|
||||
*/
|
||||
public <K, V> String sortMapToString(Map<K, V> m) {
|
||||
// Pass in crap, and get nothing back
|
||||
//
|
||||
if (m == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
System.out.println("Map toString looks like: " + m.toString());
|
||||
|
||||
// Sort the keys in the Map
|
||||
//
|
||||
TreeMap<K, V> nset = new TreeMap<K, V>(m);
|
||||
|
||||
System.out.println("Tree map looks like: " + nset.toString());
|
||||
return nset.toString();
|
||||
}
|
||||
|
||||
public List<String> realElements(List<String> elements) {
|
||||
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String message, String text) {
|
||||
assertNotNull(message, text);
|
||||
assertFalse(message, text.isEmpty());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String text) {
|
||||
assertNotNull(text);
|
||||
assertFalse(text.isEmpty());
|
||||
}
|
||||
|
||||
public static class IntTokenStream implements TokenStream {
|
||||
public IntegerList types;
|
||||
int p=0;
|
||||
public IntTokenStream(IntegerList types) { this.types = types; }
|
||||
|
||||
@Override
|
||||
public void consume() { p++; }
|
||||
|
||||
@Override
|
||||
public int LA(int i) { return LT(i).getType(); }
|
||||
|
||||
@Override
|
||||
public int mark() {
|
||||
return index();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int index() { return p; }
|
||||
|
||||
@Override
|
||||
public void release(int marker) {
|
||||
seek(marker);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void seek(int index) {
|
||||
p = index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return types.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSourceName() {
|
||||
return UNKNOWN_SOURCE_NAME;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token LT(int i) {
|
||||
CommonToken t;
|
||||
int rawIndex = p + i - 1;
|
||||
if ( rawIndex>=types.size() ) t = new CommonToken(Token.EOF);
|
||||
else t = new CommonToken(types.get(rawIndex));
|
||||
t.setTokenIndex(rawIndex);
|
||||
return t;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token get(int i) {
|
||||
return new org.antlr.v4.runtime.CommonToken(types.get(i));
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenSource getTokenSource() {
|
||||
return null;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getText() {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getText(Interval interval) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getText(RuleContext ctx) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public String getText(Token start, Token stop) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
}
|
||||
|
||||
/** Sort a list */
|
||||
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
|
||||
List<T> dup = new ArrayList<T>();
|
||||
dup.addAll(data);
|
||||
Collections.sort(dup);
|
||||
return dup;
|
||||
}
|
||||
|
||||
/** Return map sorted by key */
|
||||
public <K extends Comparable<? super K>,V> LinkedHashMap<K,V> sort(Map<K,V> data) {
|
||||
LinkedHashMap<K,V> dup = new LinkedHashMap<K, V>();
|
||||
List<K> keys = new ArrayList<K>();
|
||||
keys.addAll(data.keySet());
|
||||
Collections.sort(keys);
|
||||
for (K k : keys) {
|
||||
dup.put(k, data.get(k));
|
||||
}
|
||||
return dup;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,171 +5,24 @@
|
|||
*/
|
||||
package org.antlr.v4.test.runtime.javascript;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.automata.ATNFactory;
|
||||
import org.antlr.v4.automata.ATNPrinter;
|
||||
import org.antlr.v4.automata.LexerATNFactory;
|
||||
import org.antlr.v4.automata.ParserATNFactory;
|
||||
import org.antlr.v4.codegen.CodeGenerator;
|
||||
import org.antlr.v4.runtime.ANTLRInputStream;
|
||||
import org.antlr.v4.runtime.CharStream;
|
||||
import org.antlr.v4.runtime.CommonToken;
|
||||
import org.antlr.v4.runtime.CommonTokenStream;
|
||||
import org.antlr.v4.runtime.IntStream;
|
||||
import org.antlr.v4.runtime.Lexer;
|
||||
import org.antlr.v4.runtime.RuleContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.TokenSource;
|
||||
import org.antlr.v4.runtime.TokenStream;
|
||||
import org.antlr.v4.runtime.WritableToken;
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.runtime.atn.ATNDeserializer;
|
||||
import org.antlr.v4.runtime.atn.ATNSerializer;
|
||||
import org.antlr.v4.runtime.atn.ATNState;
|
||||
import org.antlr.v4.runtime.atn.LexerATNSimulator;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
import org.antlr.v4.runtime.misc.IntegerList;
|
||||
import org.antlr.v4.runtime.misc.Interval;
|
||||
import org.antlr.v4.semantics.SemanticPipeline;
|
||||
import org.antlr.v4.test.runtime.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.RuntimeTestSupport;
|
||||
import org.antlr.v4.test.runtime.StreamVacuum;
|
||||
import org.antlr.v4.test.runtime.TestContext;
|
||||
import org.antlr.v4.tool.ANTLRMessage;
|
||||
import org.antlr.v4.tool.DOTGenerator;
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
import org.antlr.v4.tool.GrammarSemanticsMessage;
|
||||
import org.antlr.v4.tool.LexerGrammar;
|
||||
import org.antlr.v4.tool.Rule;
|
||||
import org.antlr.v4.runtime.misc.Utils;
|
||||
import org.antlr.v4.test.runtime.*;
|
||||
import org.stringtemplate.v4.ST;
|
||||
import org.stringtemplate.v4.STGroup;
|
||||
import org.stringtemplate.v4.STGroupString;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import java.util.*;
|
||||
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public class BaseNodeTest implements RuntimeTestSupport {
|
||||
// -J-Dorg.antlr.v4.test.BaseTest.level=FINE
|
||||
// private static final Logger LOGGER =
|
||||
// Logger.getLogger(BaseTest.class.getName());
|
||||
|
||||
public static final String newline = System.getProperty("line.separator");
|
||||
public static final String pathSep = System.getProperty("path.separator");
|
||||
|
||||
public String tmpdir = null;
|
||||
|
||||
/**
|
||||
* If error during parser execution, store stderr here; can't return stdout
|
||||
* and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
protected String stderrDuringParse;
|
||||
|
||||
/** Errors found while running antlr */
|
||||
protected StringBuilder antlrToolErrors;
|
||||
public class BaseNodeTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
|
||||
|
||||
@Override
|
||||
public void testSetUp() throws Exception {
|
||||
// new output dir for each test
|
||||
String prop = System.getProperty("antlr-javascript-test-dir");
|
||||
if (prop != null && prop.length() > 0) {
|
||||
tmpdir = prop;
|
||||
}
|
||||
else {
|
||||
tmpdir = new File(System.getProperty("java.io.tmpdir"), getClass()
|
||||
.getSimpleName()+"-"+Thread.currentThread().getName()+"-"+System.currentTimeMillis())
|
||||
.getAbsolutePath();
|
||||
}
|
||||
File dir = new File(tmpdir);
|
||||
if (dir.exists())
|
||||
this.eraseFiles(dir);
|
||||
antlrToolErrors = new StringBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testTearDown() throws Exception {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTmpDir() {
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStdout() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getParseErrors() {
|
||||
return stderrDuringParse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getANTLRToolErrors() {
|
||||
if ( antlrToolErrors.length()==0 ) {
|
||||
return null;
|
||||
}
|
||||
return antlrToolErrors.toString();
|
||||
}
|
||||
|
||||
protected ATN createATN(Grammar g, boolean useSerializer) {
|
||||
if (g.atn == null) {
|
||||
semanticProcess(g);
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
|
||||
ParserATNFactory f;
|
||||
if (g.isLexer()) {
|
||||
f = new LexerATNFactory((LexerGrammar) g);
|
||||
}
|
||||
else {
|
||||
f = new ParserATNFactory(g);
|
||||
}
|
||||
|
||||
g.atn = f.createATN();
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
}
|
||||
|
||||
ATN atn = g.atn;
|
||||
if (useSerializer) {
|
||||
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
|
||||
return new ATNDeserializer().deserialize(serialized);
|
||||
}
|
||||
|
||||
return atn;
|
||||
}
|
||||
|
||||
protected void semanticProcess(Grammar g) {
|
||||
if (g.ast != null && !g.ast.hasErrors) {
|
||||
System.out.println(g.ast.toStringTree());
|
||||
Tool antlr = new Tool();
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
if (g.getImportedGrammars() != null) { // process imported grammars
|
||||
// (if any)
|
||||
for (Grammar imp : g.getImportedGrammars()) {
|
||||
antlr.processNonCombinedGrammar(imp, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
protected String getPropertyPrefix() {
|
||||
return "antlr4-javascript";
|
||||
}
|
||||
|
||||
protected String execLexer(String grammarFileName, String grammarStr,
|
||||
|
@ -183,9 +36,9 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
|
||||
grammarStr, null, lexerName, "-no-listener");
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
writeFile(tmpdir, "package.json", "{\"type\": \"module\"}");
|
||||
writeFile(getTempDirPath(), "package.json", "{\"type\": \"module\"}");
|
||||
String output = execModule("Test.js");
|
||||
if ( output!=null && output.length()==0 ) {
|
||||
output = null;
|
||||
|
@ -202,10 +55,10 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
|
||||
grammarStr, parserName, lexerName, "-visitor");
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
rawBuildRecognizerTestFile(parserName, lexerName, listenerName,
|
||||
visitorName, startRuleName, showDiagnosticErrors);
|
||||
writeFile(tmpdir, "package.json", "{\"type\": \"module\"}");
|
||||
writeFile(getTempDirPath(), "package.json", "{\"type\": \"module\"}");
|
||||
return execRecognizer();
|
||||
}
|
||||
|
||||
|
@ -221,7 +74,7 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
protected boolean rawGenerateAndBuildRecognizer(String grammarFileName,
|
||||
String grammarStr, String parserName, String lexerName,
|
||||
boolean defaultListener, String... extraOptions) {
|
||||
ErrorQueue equeue = antlrOnString(getTmpDir(), "JavaScript", grammarFileName, grammarStr,
|
||||
ErrorQueue equeue = antlrOnString(getTempDirPath(), "JavaScript", grammarFileName, grammarStr,
|
||||
defaultListener, extraOptions);
|
||||
if (!equeue.errors.isEmpty()) {
|
||||
return false;
|
||||
|
@ -252,7 +105,7 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
protected void rawBuildRecognizerTestFile(String parserName,
|
||||
String lexerName, String listenerName, String visitorName,
|
||||
String parserStartRuleName, boolean debug) {
|
||||
this.stderrDuringParse = null;
|
||||
setParseErrors(null);
|
||||
if (parserName == null) {
|
||||
writeLexerTestFile(lexerName, false);
|
||||
}
|
||||
|
@ -269,20 +122,20 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
public String execModule(String fileName) {
|
||||
try {
|
||||
String npmPath = locateNpm();
|
||||
if(!TestContext.isTravisCI()) {
|
||||
if(!TestContext.isCI()) {
|
||||
installRuntime(npmPath);
|
||||
registerRuntime(npmPath);
|
||||
}
|
||||
String modulePath = new File(new File(tmpdir), fileName)
|
||||
String modulePath = new File(getTempTestDir(), fileName)
|
||||
.getAbsolutePath();
|
||||
linkRuntime(npmPath);
|
||||
String nodejsPath = locateNodeJS();
|
||||
String inputPath = new File(new File(tmpdir), "input")
|
||||
String inputPath = new File(getTempTestDir(), "input")
|
||||
.getAbsolutePath();
|
||||
ProcessBuilder builder = new ProcessBuilder(nodejsPath, modulePath,
|
||||
inputPath);
|
||||
builder.environment().put("NODE_PATH", tmpdir);
|
||||
builder.directory(new File(tmpdir));
|
||||
builder.environment().put("NODE_PATH", getTempDirPath());
|
||||
builder.directory(getTempTestDir());
|
||||
Process process = builder.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(
|
||||
process.getInputStream());
|
||||
|
@ -301,7 +154,7 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
output = null;
|
||||
}
|
||||
if (stderrVacuum.toString().length() > 0) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
setParseErrors(stderrVacuum.toString());
|
||||
}
|
||||
return output;
|
||||
} catch (Exception e) {
|
||||
|
@ -316,8 +169,8 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
String runtimePath = locateRuntime();
|
||||
ProcessBuilder builder = new ProcessBuilder(npmPath, "install");
|
||||
builder.directory(new File(runtimePath));
|
||||
builder.redirectError(new File(tmpdir, "error.txt"));
|
||||
builder.redirectOutput(new File(tmpdir, "output.txt"));
|
||||
builder.redirectError(new File(getTempTestDir(), "error.txt"));
|
||||
builder.redirectOutput(new File(getTempTestDir(), "output.txt"));
|
||||
Process process = builder.start();
|
||||
// TODO switch to jdk 8
|
||||
process.waitFor();
|
||||
|
@ -332,8 +185,8 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
String runtimePath = locateRuntime();
|
||||
ProcessBuilder builder = new ProcessBuilder(npmPath, "link");
|
||||
builder.directory(new File(runtimePath));
|
||||
builder.redirectError(new File(tmpdir, "error.txt"));
|
||||
builder.redirectOutput(new File(tmpdir, "output.txt"));
|
||||
builder.redirectError(new File(getTempTestDir(), "error.txt"));
|
||||
builder.redirectOutput(new File(getTempTestDir(), "output.txt"));
|
||||
Process process = builder.start();
|
||||
// TODO switch to jdk 8
|
||||
process.waitFor();
|
||||
|
@ -345,18 +198,25 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
}
|
||||
|
||||
private void linkRuntime(String npmPath) throws IOException, InterruptedException {
|
||||
ProcessBuilder builder = new ProcessBuilder(npmPath, "link", "antlr4");
|
||||
builder.directory(new File(tmpdir));
|
||||
builder.redirectError(new File(tmpdir, "error.txt"));
|
||||
builder.redirectOutput(new File(tmpdir, "output.txt"));
|
||||
List<String> args = new ArrayList<>();
|
||||
if(TestContext.isCircleCI())
|
||||
args.add("sudo");
|
||||
args.addAll(Arrays.asList(npmPath, "link", "antlr4"));
|
||||
ProcessBuilder builder = new ProcessBuilder(args.toArray(new String[0]));
|
||||
builder.directory(getTempTestDir());
|
||||
File errorFile = new File(getTempTestDir(), "error.txt");
|
||||
builder.redirectError(errorFile);
|
||||
builder.redirectOutput(new File(getTempTestDir(), "output.txt"));
|
||||
Process process = builder.start();
|
||||
// TODO switch to jdk 8
|
||||
process.waitFor();
|
||||
// if(!process.waitFor(30L, TimeUnit.SECONDS))
|
||||
// process.destroyForcibly();
|
||||
int error = process.exitValue();
|
||||
if(error!=0)
|
||||
throw new IOException("'npm link antlr4' failed");
|
||||
if(error!=0) {
|
||||
char[] errors = Utils.readFile(errorFile.getAbsolutePath());
|
||||
throw new IOException("'npm link antlr4' failed: " + new String(errors));
|
||||
}
|
||||
}
|
||||
|
||||
private boolean canExecute(String tool) {
|
||||
|
@ -381,6 +241,8 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
// typically /usr/local/bin/npm
|
||||
String prop = System.getProperty("antlr-javascript-npm");
|
||||
if ( prop!=null && prop.length()!=0 ) {
|
||||
if(prop.contains(" "))
|
||||
prop = "\"" + prop + "\"";
|
||||
return prop;
|
||||
}
|
||||
return "npm"; // everywhere
|
||||
|
@ -390,6 +252,8 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
// typically /usr/local/bin/node
|
||||
String prop = System.getProperty("antlr-javascript-nodejs");
|
||||
if ( prop!=null && prop.length()!=0 ) {
|
||||
if(prop.contains(" "))
|
||||
prop = "\"" + prop + "\"";
|
||||
return prop;
|
||||
}
|
||||
if (canExecute("nodejs")) {
|
||||
|
@ -410,11 +274,6 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
return runtimeSrc.getPath();
|
||||
}
|
||||
|
||||
private boolean isWindows() {
|
||||
return System.getProperty("os.name").toLowerCase().contains("windows");
|
||||
}
|
||||
|
||||
|
||||
protected void writeParserTestFile(String parserName, String lexerName,
|
||||
String listenerName, String visitorName,
|
||||
String parserStartRuleName, boolean debug) {
|
||||
|
@ -465,7 +324,7 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
outputFileST.add("listenerName", listenerName);
|
||||
outputFileST.add("visitorName", visitorName);
|
||||
outputFileST.add("parserStartRuleName", parserStartRuleName);
|
||||
writeFile(tmpdir, "Test.js", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.js", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
|
||||
|
@ -485,49 +344,7 @@ public class BaseNodeTest implements RuntimeTestSupport {
|
|||
: "") + "}\n" + "\n" + "main(process.argv);\n"
|
||||
+ "\n");
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
writeFile(tmpdir, "Test.js", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.js", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void eraseFiles(File dir) {
|
||||
String[] files = dir.list();
|
||||
for (int i = 0; files != null && i < files.length; i++) {
|
||||
new File(dir, files[i]).delete();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void eraseTempDir() {
|
||||
boolean doErase = true;
|
||||
String propName = "antlr-javascript-erase-test-dir";
|
||||
String prop = System.getProperty(propName);
|
||||
if (prop != null && prop.length() > 0)
|
||||
doErase = Boolean.getBoolean(prop);
|
||||
if (doErase) {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
if (tmpdirF.exists()) {
|
||||
eraseFiles(tmpdirF);
|
||||
tmpdirF.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/** Sort a list */
|
||||
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
|
||||
List<T> dup = new ArrayList<T>(data);
|
||||
Collections.sort(dup);
|
||||
return dup;
|
||||
}
|
||||
|
||||
/** Return map sorted by key */
|
||||
public <K extends Comparable<? super K>, V> LinkedHashMap<K, V> sort(
|
||||
Map<K, V> data) {
|
||||
LinkedHashMap<K, V> dup = new LinkedHashMap<K, V>();
|
||||
List<K> keys = new ArrayList<K>(data.keySet());
|
||||
Collections.sort(keys);
|
||||
for (K k : keys) {
|
||||
dup.put(k, data.get(k));
|
||||
}
|
||||
return dup;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -11,24 +11,11 @@ import java.io.IOException;
|
|||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.automata.LexerATNFactory;
|
||||
import org.antlr.v4.automata.ParserATNFactory;
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.runtime.atn.ATNDeserializer;
|
||||
import org.antlr.v4.runtime.atn.ATNSerializer;
|
||||
import org.antlr.v4.semantics.SemanticPipeline;
|
||||
import org.antlr.v4.test.runtime.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.RuntimeTestSupport;
|
||||
import org.antlr.v4.test.runtime.StreamVacuum;
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
import org.antlr.v4.tool.LexerGrammar;
|
||||
|
||||
import org.antlr.v4.test.runtime.*;
|
||||
import org.stringtemplate.v4.ST;
|
||||
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
|
||||
|
@ -36,124 +23,12 @@ import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
|
|||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class BasePHPTest implements RuntimeTestSupport {
|
||||
public static final String newline = System.getProperty("line.separator");
|
||||
public class BasePHPTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
|
||||
|
||||
public String tmpdir = null;
|
||||
|
||||
/**
|
||||
* If error during parser execution, store stderr here; can't return
|
||||
* stdout and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
protected String stderrDuringParse;
|
||||
|
||||
/**
|
||||
* Errors found while running antlr
|
||||
*/
|
||||
protected StringBuilder antlrToolErrors;
|
||||
|
||||
private String getPropertyPrefix() {
|
||||
public String getPropertyPrefix() {
|
||||
return "antlr-php";
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testSetUp() throws Exception {
|
||||
// new output dir for each test
|
||||
String propName = getPropertyPrefix() + "-test-dir";
|
||||
String prop = System.getProperty(propName);
|
||||
|
||||
if (prop != null && prop.length() > 0) {
|
||||
tmpdir = prop;
|
||||
} else {
|
||||
String classSimpleName = getClass().getSimpleName();
|
||||
String threadName = Thread.currentThread().getName();
|
||||
String childPath = String.format("%s-%s-%s", classSimpleName, threadName, System.currentTimeMillis());
|
||||
tmpdir = new File(System.getProperty("java.io.tmpdir"), childPath).getAbsolutePath();
|
||||
}
|
||||
|
||||
antlrToolErrors = new StringBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testTearDown() throws Exception {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTmpDir() {
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStdout() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getParseErrors() {
|
||||
return stderrDuringParse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getANTLRToolErrors() {
|
||||
if (antlrToolErrors.length() == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return antlrToolErrors.toString();
|
||||
}
|
||||
|
||||
protected ATN createATN(Grammar g, boolean useSerializer) {
|
||||
if (g.atn == null) {
|
||||
semanticProcess(g);
|
||||
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
|
||||
ParserATNFactory f;
|
||||
|
||||
if (g.isLexer()) {
|
||||
f = new LexerATNFactory((LexerGrammar) g);
|
||||
} else {
|
||||
f = new ParserATNFactory(g);
|
||||
}
|
||||
|
||||
g.atn = f.createATN();
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
}
|
||||
|
||||
ATN atn = g.atn;
|
||||
|
||||
if (useSerializer) {
|
||||
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
|
||||
|
||||
return new ATNDeserializer().deserialize(serialized);
|
||||
}
|
||||
|
||||
return atn;
|
||||
}
|
||||
|
||||
protected void semanticProcess(Grammar g) {
|
||||
if (g.ast != null && !g.ast.hasErrors) {
|
||||
Tool antlr = new Tool();
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
|
||||
if (g.getImportedGrammars() != null) {
|
||||
for (Grammar imp: g.getImportedGrammars()) {
|
||||
antlr.processNonCombinedGrammar(imp, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected String execLexer(
|
||||
String grammarFileName,
|
||||
String grammarStr,
|
||||
String lexerName,
|
||||
String input
|
||||
) {
|
||||
return execLexer(grammarFileName, grammarStr, lexerName, input, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String execLexer(
|
||||
String grammarFileName,
|
||||
|
@ -170,11 +45,9 @@ public class BasePHPTest implements RuntimeTestSupport {
|
|||
"-no-listener"
|
||||
);
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
String output = execModule("Test.php");
|
||||
|
||||
return output;
|
||||
return execModule("Test.php");
|
||||
}
|
||||
|
||||
public String execParser(
|
||||
|
@ -224,7 +97,7 @@ public class BasePHPTest implements RuntimeTestSupport {
|
|||
|
||||
assertTrue(success);
|
||||
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
|
||||
rawBuildRecognizerTestFile(
|
||||
parserName,
|
||||
|
@ -270,7 +143,7 @@ public class BasePHPTest implements RuntimeTestSupport {
|
|||
boolean defaultListener,
|
||||
String... extraOptions
|
||||
) {
|
||||
ErrorQueue equeue = antlrOnString(getTmpDir(), "PHP", grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
ErrorQueue equeue = antlrOnString(getTempDirPath(), "PHP", grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
|
||||
if (!equeue.errors.isEmpty()) {
|
||||
return false;
|
||||
|
@ -307,7 +180,7 @@ public class BasePHPTest implements RuntimeTestSupport {
|
|||
boolean debug,
|
||||
boolean trace
|
||||
) {
|
||||
this.stderrDuringParse = null;
|
||||
setParseErrors(null);
|
||||
if (parserName == null) {
|
||||
writeLexerTestFile(lexerName, false);
|
||||
} else {
|
||||
|
@ -331,15 +204,14 @@ public class BasePHPTest implements RuntimeTestSupport {
|
|||
String phpPath = locatePhp();
|
||||
String runtimePath = locateRuntime();
|
||||
|
||||
File tmpdirFile = new File(tmpdir);
|
||||
String modulePath = new File(tmpdirFile, fileName).getAbsolutePath();
|
||||
String inputPath = new File(tmpdirFile, "input").getAbsolutePath();
|
||||
Path outputPath = tmpdirFile.toPath().resolve("output").toAbsolutePath();
|
||||
String modulePath = new File(getTempTestDir(), fileName).getAbsolutePath();
|
||||
String inputPath = new File(getTempTestDir(), "input").getAbsolutePath();
|
||||
Path outputPath = getTempTestDir().toPath().resolve("output").toAbsolutePath();
|
||||
|
||||
try {
|
||||
ProcessBuilder builder = new ProcessBuilder(phpPath, modulePath, inputPath, outputPath.toString());
|
||||
builder.environment().put("RUNTIME", runtimePath);
|
||||
builder.directory(tmpdirFile);
|
||||
builder.directory(getTempTestDir());
|
||||
Process process = builder.start();
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
|
@ -355,7 +227,7 @@ public class BasePHPTest implements RuntimeTestSupport {
|
|||
}
|
||||
|
||||
if (stderrVacuum.toString().length() > 0) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
setParseErrors(stderrVacuum.toString());
|
||||
}
|
||||
|
||||
return output;
|
||||
|
@ -464,7 +336,7 @@ public class BasePHPTest implements RuntimeTestSupport {
|
|||
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
|
||||
writeFile(tmpdir, "Test.php", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.php", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void writeParserTestFile(
|
||||
|
@ -546,54 +418,7 @@ public class BasePHPTest implements RuntimeTestSupport {
|
|||
outputFileST.add("visitorName", visitorName);
|
||||
outputFileST.add("parserStartRuleName", parserStartRuleName);
|
||||
|
||||
writeFile(tmpdir, "Test.php", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.php", outputFileST.render());
|
||||
}
|
||||
|
||||
protected void eraseFiles(File dir) {
|
||||
String[] files = dir.list();
|
||||
for (int i = 0; files != null && i < files.length; i++) {
|
||||
new File(dir, files[i]).delete();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void eraseTempDir() {
|
||||
boolean doErase = true;
|
||||
String propName = getPropertyPrefix() + "-erase-test-dir";
|
||||
String prop = System.getProperty(propName);
|
||||
if (prop != null && prop.length() > 0) {
|
||||
doErase = Boolean.getBoolean(prop);
|
||||
}
|
||||
if (doErase) {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
if (tmpdirF.exists()) {
|
||||
eraseFiles(tmpdirF);
|
||||
tmpdirF.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sort a list
|
||||
*/
|
||||
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
|
||||
List<T> dup = new ArrayList<T>();
|
||||
dup.addAll(data);
|
||||
Collections.sort(dup);
|
||||
return dup;
|
||||
}
|
||||
|
||||
/**
|
||||
* Return map sorted by key
|
||||
*/
|
||||
public <K extends Comparable<? super K>, V> LinkedHashMap<K, V> sort(Map<K, V> data) {
|
||||
LinkedHashMap<K, V> dup = new LinkedHashMap<K, V>();
|
||||
List<K> keys = new ArrayList<K>();
|
||||
keys.addAll(data.keySet());
|
||||
Collections.sort(keys);
|
||||
for (K k: keys) {
|
||||
dup.put(k, data.get(k));
|
||||
}
|
||||
return dup;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,339 +5,38 @@
|
|||
*/
|
||||
package org.antlr.v4.test.runtime.python;
|
||||
|
||||
import org.antlr.v4.Tool;
|
||||
import org.antlr.v4.automata.ATNFactory;
|
||||
import org.antlr.v4.automata.ATNPrinter;
|
||||
import org.antlr.v4.automata.LexerATNFactory;
|
||||
import org.antlr.v4.automata.ParserATNFactory;
|
||||
import org.antlr.v4.codegen.CodeGenerator;
|
||||
import org.antlr.v4.runtime.ANTLRInputStream;
|
||||
import org.antlr.v4.runtime.CharStream;
|
||||
import org.antlr.v4.runtime.CommonToken;
|
||||
import org.antlr.v4.runtime.CommonTokenStream;
|
||||
import org.antlr.v4.runtime.IntStream;
|
||||
import org.antlr.v4.runtime.Lexer;
|
||||
import org.antlr.v4.runtime.Parser;
|
||||
import org.antlr.v4.runtime.RuleContext;
|
||||
import org.antlr.v4.runtime.Token;
|
||||
import org.antlr.v4.runtime.TokenSource;
|
||||
import org.antlr.v4.runtime.TokenStream;
|
||||
import org.antlr.v4.runtime.WritableToken;
|
||||
import org.antlr.v4.runtime.atn.ATN;
|
||||
import org.antlr.v4.runtime.atn.ATNDeserializer;
|
||||
import org.antlr.v4.runtime.atn.ATNSerializer;
|
||||
import org.antlr.v4.runtime.atn.ATNState;
|
||||
import org.antlr.v4.runtime.atn.DecisionState;
|
||||
import org.antlr.v4.runtime.atn.LexerATNSimulator;
|
||||
import org.antlr.v4.runtime.dfa.DFA;
|
||||
import org.antlr.v4.runtime.misc.IntegerList;
|
||||
import org.antlr.v4.runtime.misc.Interval;
|
||||
import org.antlr.v4.runtime.tree.ParseTree;
|
||||
import org.antlr.v4.semantics.SemanticPipeline;
|
||||
import org.antlr.v4.test.runtime.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.RuntimeTestSupport;
|
||||
import org.antlr.v4.test.runtime.StreamVacuum;
|
||||
import org.antlr.v4.test.runtime.TestOutputReading;
|
||||
import org.antlr.v4.tool.ANTLRMessage;
|
||||
import org.antlr.v4.tool.DOTGenerator;
|
||||
import org.antlr.v4.tool.Grammar;
|
||||
import org.antlr.v4.tool.GrammarSemanticsMessage;
|
||||
import org.antlr.v4.tool.LexerGrammar;
|
||||
import org.antlr.v4.tool.Rule;
|
||||
import org.junit.rules.TestRule;
|
||||
import org.junit.rules.TestWatcher;
|
||||
import org.antlr.v4.test.runtime.*;
|
||||
import org.junit.runner.Description;
|
||||
import org.stringtemplate.v4.ST;
|
||||
import org.stringtemplate.v4.STGroup;
|
||||
import org.stringtemplate.v4.STGroupString;
|
||||
|
||||
import java.io.File;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.net.URL;
|
||||
import java.nio.file.Path;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.LinkedHashMap;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
public abstract class BasePythonTest implements RuntimeTestSupport {
|
||||
// -J-Dorg.antlr.v4.test.BaseTest.level=FINE
|
||||
// private static final Logger LOGGER = Logger.getLogger(BaseTest.class.getName());
|
||||
public static final String newline = System.getProperty("line.separator");
|
||||
public static final String pathSep = System.getProperty("path.separator");
|
||||
public abstract class BasePythonTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
|
||||
|
||||
public String tmpdir = null;
|
||||
@Override
|
||||
protected void testSucceeded(Description description) {
|
||||
eraseTempPyCache();
|
||||
eraseTempDir();
|
||||
}
|
||||
|
||||
/** If error during parser execution, store stderr here; can't return
|
||||
* stdout and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
protected String stderrDuringParse;
|
||||
|
||||
/** Errors found while running antlr */
|
||||
protected StringBuilder antlrToolErrors;
|
||||
|
||||
@org.junit.Rule
|
||||
public final TestRule testWatcher = new TestWatcher() {
|
||||
|
||||
@Override
|
||||
protected void succeeded(Description description) {
|
||||
// remove tmpdir if no error.
|
||||
eraseTempPyCache();
|
||||
eraseTempDir();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
private String getPropertyPrefix() {
|
||||
@Override
|
||||
protected String getPropertyPrefix() {
|
||||
return "antlr-" + getLanguage().toLowerCase();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testSetUp() throws Exception {
|
||||
// new output dir for each test
|
||||
String propName = getPropertyPrefix() + "-test-dir";
|
||||
String prop = System.getProperty(propName);
|
||||
if(prop!=null && prop.length()>0) {
|
||||
tmpdir = prop;
|
||||
}
|
||||
else {
|
||||
tmpdir = new File(System.getProperty("java.io.tmpdir"), getClass().getSimpleName()+
|
||||
"-"+Thread.currentThread().getName()+"-"+System.currentTimeMillis()).getAbsolutePath();
|
||||
}
|
||||
antlrToolErrors = new StringBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testTearDown() throws Exception {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTmpDir() {
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStdout() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getParseErrors() {
|
||||
return stderrDuringParse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getANTLRToolErrors() {
|
||||
if ( antlrToolErrors.length()==0 ) {
|
||||
return null;
|
||||
}
|
||||
return antlrToolErrors.toString();
|
||||
}
|
||||
|
||||
protected org.antlr.v4.Tool newTool(String[] args) {
|
||||
Tool tool = new Tool(args);
|
||||
return tool;
|
||||
}
|
||||
|
||||
protected Tool newTool() {
|
||||
org.antlr.v4.Tool tool = new Tool(new String[] {"-o", tmpdir});
|
||||
return tool;
|
||||
}
|
||||
|
||||
protected ATN createATN(Grammar g, boolean useSerializer) {
|
||||
if ( g.atn==null ) {
|
||||
semanticProcess(g);
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
|
||||
ParserATNFactory f;
|
||||
if ( g.isLexer() ) {
|
||||
f = new LexerATNFactory((LexerGrammar)g);
|
||||
}
|
||||
else {
|
||||
f = new ParserATNFactory(g);
|
||||
}
|
||||
|
||||
g.atn = f.createATN();
|
||||
assertEquals(0, g.tool.getNumErrors());
|
||||
}
|
||||
|
||||
ATN atn = g.atn;
|
||||
if (useSerializer) {
|
||||
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
|
||||
return new ATNDeserializer().deserialize(serialized);
|
||||
}
|
||||
|
||||
return atn;
|
||||
}
|
||||
|
||||
protected void semanticProcess(Grammar g) {
|
||||
if ( g.ast!=null && !g.ast.hasErrors ) {
|
||||
System.out.println(g.ast.toStringTree());
|
||||
Tool antlr = new Tool();
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
|
||||
for (Grammar imp : g.getImportedGrammars()) {
|
||||
antlr.processNonCombinedGrammar(imp, false);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public DFA createDFA(Grammar g, DecisionState s) {
|
||||
// PredictionDFAFactory conv = new PredictionDFAFactory(g, s);
|
||||
// DFA dfa = conv.createDFA();
|
||||
// conv.issueAmbiguityWarnings();
|
||||
// System.out.print("DFA="+dfa);
|
||||
// return dfa;
|
||||
return null;
|
||||
}
|
||||
|
||||
// public void minimizeDFA(DFA dfa) {
|
||||
// DFAMinimizer dmin = new DFAMinimizer(dfa);
|
||||
// dfa.minimized = dmin.minimize();
|
||||
// }
|
||||
|
||||
IntegerList getTypesFromString(Grammar g, String expecting) {
|
||||
IntegerList expectingTokenTypes = new IntegerList();
|
||||
if ( expecting!=null && !expecting.trim().isEmpty() ) {
|
||||
for (String tname : expecting.replace(" ", "").split(",")) {
|
||||
int ttype = g.getTokenType(tname);
|
||||
expectingTokenTypes.add(ttype);
|
||||
}
|
||||
}
|
||||
return expectingTokenTypes;
|
||||
}
|
||||
|
||||
public IntegerList getTokenTypesViaATN(String input, LexerATNSimulator lexerATN) {
|
||||
ANTLRInputStream in = new ANTLRInputStream(input);
|
||||
IntegerList tokenTypes = new IntegerList();
|
||||
int ttype;
|
||||
do {
|
||||
ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
|
||||
tokenTypes.add(ttype);
|
||||
} while ( ttype!= Token.EOF );
|
||||
return tokenTypes;
|
||||
}
|
||||
|
||||
public List<String> getTokenTypes(LexerGrammar lg,
|
||||
ATN atn,
|
||||
CharStream input)
|
||||
{
|
||||
LexerATNSimulator interp = new LexerATNSimulator(atn,new DFA[] { new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE)) },null);
|
||||
List<String> tokenTypes = new ArrayList<String>();
|
||||
int ttype;
|
||||
boolean hitEOF = false;
|
||||
do {
|
||||
if ( hitEOF ) {
|
||||
tokenTypes.add("EOF");
|
||||
break;
|
||||
}
|
||||
int t = input.LA(1);
|
||||
ttype = interp.match(input, Lexer.DEFAULT_MODE);
|
||||
if ( ttype == Token.EOF ) {
|
||||
tokenTypes.add("EOF");
|
||||
}
|
||||
else {
|
||||
tokenTypes.add(lg.typeToTokenList.get(ttype));
|
||||
}
|
||||
|
||||
if ( t== IntStream.EOF ) {
|
||||
hitEOF = true;
|
||||
}
|
||||
} while ( ttype!=Token.EOF );
|
||||
return tokenTypes;
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkRuleDFA(String gtext, String ruleName, String expecting)
|
||||
throws Exception
|
||||
{
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(gtext, equeue);
|
||||
ATN atn = createATN(g, false);
|
||||
ATNState s = atn.ruleToStartState[g.getRule(ruleName).index];
|
||||
if ( s==null ) {
|
||||
System.err.println("no such rule: "+ruleName);
|
||||
return null;
|
||||
}
|
||||
ATNState t = s.transition(0).target;
|
||||
if ( !(t instanceof DecisionState) ) {
|
||||
System.out.println(ruleName+" has no decision");
|
||||
return null;
|
||||
}
|
||||
DecisionState blk = (DecisionState)t;
|
||||
checkRuleDFA(g, blk, expecting);
|
||||
return equeue.all;
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkRuleDFA(String gtext, int decision, String expecting)
|
||||
throws Exception
|
||||
{
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(gtext, equeue);
|
||||
ATN atn = createATN(g, false);
|
||||
DecisionState blk = atn.decisionToState.get(decision);
|
||||
checkRuleDFA(g, blk, expecting);
|
||||
return equeue.all;
|
||||
}
|
||||
|
||||
void checkRuleDFA(Grammar g, DecisionState blk, String expecting)
|
||||
throws Exception
|
||||
{
|
||||
DFA dfa = createDFA(g, blk);
|
||||
String result = null;
|
||||
if ( dfa!=null ) result = dfa.toString();
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkLexerDFA(String gtext, String expecting)
|
||||
throws Exception
|
||||
{
|
||||
return checkLexerDFA(gtext, LexerGrammar.DEFAULT_MODE_NAME, expecting);
|
||||
}
|
||||
|
||||
List<ANTLRMessage> checkLexerDFA(String gtext, String modeName, String expecting)
|
||||
throws Exception
|
||||
{
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
LexerGrammar g = new LexerGrammar(gtext, equeue);
|
||||
g.atn = createATN(g, false);
|
||||
// LexerATNToDFAConverter conv = new LexerATNToDFAConverter(g);
|
||||
// DFA dfa = conv.createDFA(modeName);
|
||||
// g.setLookaheadDFA(0, dfa); // only one decision to worry about
|
||||
//
|
||||
// String result = null;
|
||||
// if ( dfa!=null ) result = dfa.toString();
|
||||
// assertEquals(expecting, result);
|
||||
//
|
||||
// return equeue.all;
|
||||
return null;
|
||||
}
|
||||
|
||||
protected abstract String getLanguage();
|
||||
|
||||
protected String execLexer(String grammarFileName,
|
||||
String grammarStr,
|
||||
String lexerName,
|
||||
String input)
|
||||
{
|
||||
return execLexer(grammarFileName, grammarStr, lexerName, input, false);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String execLexer(String grammarFileName,
|
||||
String grammarStr,
|
||||
|
@ -350,31 +49,12 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
|
|||
null,
|
||||
lexerName,"-no-listener");
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
String output = execModule("Test.py");
|
||||
return output;
|
||||
}
|
||||
|
||||
public ParseTree execStartRule(String startRuleName, Parser parser)
|
||||
throws IllegalAccessException, InvocationTargetException,
|
||||
NoSuchMethodException
|
||||
{
|
||||
Method startRule = null;
|
||||
Object[] args = null;
|
||||
try {
|
||||
startRule = parser.getClass().getMethod(startRuleName);
|
||||
}
|
||||
catch (NoSuchMethodException nsme) {
|
||||
// try with int _p arg for recursive func
|
||||
startRule = parser.getClass().getMethod(startRuleName, int.class);
|
||||
args = new Integer[] {0};
|
||||
}
|
||||
ParseTree result = (ParseTree)startRule.invoke(parser, args);
|
||||
// System.out.println("parse tree = "+result.toStringTree(parser));
|
||||
return result;
|
||||
return execModule("Test.py");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String execParser(String grammarFileName,
|
||||
String grammarStr,
|
||||
String parserName,
|
||||
|
@ -405,7 +85,7 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
|
|||
lexerName,
|
||||
"-visitor");
|
||||
assertTrue(success);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
rawBuildRecognizerTestFile(parserName,
|
||||
lexerName,
|
||||
listenerName,
|
||||
|
@ -434,8 +114,7 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
|
|||
boolean defaultListener,
|
||||
String... extraOptions)
|
||||
{
|
||||
ErrorQueue equeue =
|
||||
antlrOnString(getTmpDir(), getLanguage(), grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
ErrorQueue equeue = antlrOnString(getTempDirPath(), getLanguage(), grammarFileName, grammarStr, defaultListener, extraOptions);
|
||||
if (!equeue.errors.isEmpty()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -465,7 +144,7 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
|
|||
boolean debug,
|
||||
boolean trace)
|
||||
{
|
||||
this.stderrDuringParse = null;
|
||||
setParseErrors(null);
|
||||
if ( parserName==null ) {
|
||||
writeLexerTestFile(lexerName, false);
|
||||
}
|
||||
|
@ -486,7 +165,7 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
|
|||
public String execModule(String fileName) {
|
||||
String pythonPath = locatePython();
|
||||
String runtimePath = locateRuntime();
|
||||
File tmpdirFile = new File(tmpdir);
|
||||
File tmpdirFile = new File(getTempDirPath());
|
||||
String modulePath = new File(tmpdirFile, fileName).getAbsolutePath();
|
||||
String inputPath = new File(tmpdirFile, "input").getAbsolutePath();
|
||||
Path outputPath = tmpdirFile.toPath().resolve("output").toAbsolutePath();
|
||||
|
@ -502,7 +181,7 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
|
|||
stderrVacuum.join();
|
||||
String output = TestOutputReading.read(outputPath);
|
||||
if ( stderrVacuum.toString().length()>0 ) {
|
||||
this.stderrDuringParse = stderrVacuum.toString();
|
||||
setParseErrors(stderrVacuum.toString());
|
||||
}
|
||||
return output;
|
||||
}
|
||||
|
@ -516,7 +195,8 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
|
|||
private String locateTool(List<String> tools) {
|
||||
String[] roots = {
|
||||
"/opt/local/bin", "/usr/bin/", "/usr/local/bin/",
|
||||
"/Users/"+System.getProperty("user.name")+"/anaconda3/bin/"
|
||||
"/Users/"+System.getProperty("user.name")+"/anaconda3/bin/",
|
||||
"/Users/"+System.getProperty("user.name")+"/opt/anaconda3/bin/"
|
||||
};
|
||||
for(String root : roots) {
|
||||
for (String tool : tools) {
|
||||
|
@ -555,190 +235,6 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
|
|||
return runtimeSrc.getPath();
|
||||
}
|
||||
|
||||
private boolean isWindows() {
|
||||
return System.getProperty("os.name").toLowerCase().contains("windows");
|
||||
}
|
||||
|
||||
// void ambig(List<Message> msgs, int[] expectedAmbigAlts, String expectedAmbigInput)
|
||||
// throws Exception
|
||||
// {
|
||||
// ambig(msgs, 0, expectedAmbigAlts, expectedAmbigInput);
|
||||
// }
|
||||
|
||||
// void ambig(List<Message> msgs, int i, int[] expectedAmbigAlts, String expectedAmbigInput)
|
||||
// throws Exception
|
||||
// {
|
||||
// List<Message> amsgs = getMessagesOfType(msgs, AmbiguityMessage.class);
|
||||
// AmbiguityMessage a = (AmbiguityMessage)amsgs.get(i);
|
||||
// if ( a==null ) assertNull(expectedAmbigAlts);
|
||||
// else {
|
||||
// assertEquals(a.conflictingAlts.toString(), Arrays.toString(expectedAmbigAlts));
|
||||
// }
|
||||
// assertEquals(expectedAmbigInput, a.input);
|
||||
// }
|
||||
|
||||
// void unreachable(List<Message> msgs, int[] expectedUnreachableAlts)
|
||||
// throws Exception
|
||||
// {
|
||||
// unreachable(msgs, 0, expectedUnreachableAlts);
|
||||
// }
|
||||
|
||||
// void unreachable(List<Message> msgs, int i, int[] expectedUnreachableAlts)
|
||||
// throws Exception
|
||||
// {
|
||||
// List<Message> amsgs = getMessagesOfType(msgs, UnreachableAltsMessage.class);
|
||||
// UnreachableAltsMessage u = (UnreachableAltsMessage)amsgs.get(i);
|
||||
// if ( u==null ) assertNull(expectedUnreachableAlts);
|
||||
// else {
|
||||
// assertEquals(u.conflictingAlts.toString(), Arrays.toString(expectedUnreachableAlts));
|
||||
// }
|
||||
// }
|
||||
|
||||
List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs, Class<? extends ANTLRMessage> c) {
|
||||
List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
|
||||
for (ANTLRMessage m : msgs) {
|
||||
if ( m.getClass() == c ) filtered.add(m);
|
||||
}
|
||||
return filtered;
|
||||
}
|
||||
|
||||
void checkRuleATN(Grammar g, String ruleName, String expecting) {
|
||||
ParserATNFactory f = new ParserATNFactory(g);
|
||||
ATN atn = f.createATN();
|
||||
|
||||
DOTGenerator dot = new DOTGenerator(g);
|
||||
System.out.println(dot.getDOT(atn.ruleToStartState[g.getRule(ruleName).index]));
|
||||
|
||||
Rule r = g.getRule(ruleName);
|
||||
ATNState startState = atn.ruleToStartState[r.index];
|
||||
ATNPrinter serializer = new ATNPrinter(g, startState);
|
||||
String result = serializer.asString();
|
||||
|
||||
//System.out.print(result);
|
||||
assertEquals(expecting, result);
|
||||
}
|
||||
|
||||
public void testActions(String templates, String actionName, String action, String expected) throws org.antlr.runtime.RecognitionException {
|
||||
int lp = templates.indexOf('(');
|
||||
String name = templates.substring(0, lp);
|
||||
STGroup group = new STGroupString(templates);
|
||||
ST st = group.getInstanceOf(name);
|
||||
st.add(actionName, action);
|
||||
String grammar = st.render();
|
||||
ErrorQueue equeue = new ErrorQueue();
|
||||
Grammar g = new Grammar(grammar, equeue);
|
||||
if ( g.ast!=null && !g.ast.hasErrors ) {
|
||||
SemanticPipeline sem = new SemanticPipeline(g);
|
||||
sem.process();
|
||||
|
||||
ATNFactory factory = new ParserATNFactory(g);
|
||||
if ( g.isLexer() ) factory = new LexerATNFactory((LexerGrammar)g);
|
||||
g.atn = factory.createATN();
|
||||
|
||||
CodeGenerator gen = new CodeGenerator(g);
|
||||
ST outputFileST = gen.generateParser();
|
||||
String output = outputFileST.render();
|
||||
//System.out.println(output);
|
||||
String b = "#" + actionName + "#";
|
||||
int start = output.indexOf(b);
|
||||
String e = "#end-" + actionName + "#";
|
||||
int end = output.indexOf(e);
|
||||
String snippet = output.substring(start+b.length(),end);
|
||||
assertEquals(expected, snippet);
|
||||
}
|
||||
if ( equeue.size()>0 ) {
|
||||
System.err.println(equeue.toString());
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkGrammarSemanticsError(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage)
|
||||
throws Exception
|
||||
{
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = equeue.errors.get(i);
|
||||
if (m.getErrorType()==expectedMessage.getErrorType() ) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg);
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
|
||||
if ( equeue.size()!=1 ) {
|
||||
System.err.println(equeue);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkGrammarSemanticsWarning(ErrorQueue equeue,
|
||||
GrammarSemanticsMessage expectedMessage)
|
||||
throws Exception
|
||||
{
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.warnings.size(); i++) {
|
||||
ANTLRMessage m = equeue.warnings.get(i);
|
||||
if (m.getErrorType()==expectedMessage.getErrorType() ) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg);
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
|
||||
if ( equeue.size()!=1 ) {
|
||||
System.err.println(equeue);
|
||||
}
|
||||
}
|
||||
|
||||
protected void checkError(ErrorQueue equeue,
|
||||
ANTLRMessage expectedMessage)
|
||||
throws Exception
|
||||
{
|
||||
//System.out.println("errors="+equeue);
|
||||
ANTLRMessage foundMsg = null;
|
||||
for (int i = 0; i < equeue.errors.size(); i++) {
|
||||
ANTLRMessage m = equeue.errors.get(i);
|
||||
if (m.getErrorType()==expectedMessage.getErrorType() ) {
|
||||
foundMsg = m;
|
||||
}
|
||||
}
|
||||
assertTrue("no error; "+expectedMessage.getErrorType()+" expected", !equeue.errors.isEmpty());
|
||||
assertTrue("too many errors; "+equeue.errors, equeue.errors.size()<=1);
|
||||
assertNotNull("couldn't find expected error: "+expectedMessage.getErrorType(), foundMsg);
|
||||
/*
|
||||
assertTrue("error is not a GrammarSemanticsMessage",
|
||||
foundMsg instanceof GrammarSemanticsMessage);
|
||||
*/
|
||||
assertArrayEquals(expectedMessage.getArgs(), foundMsg.getArgs());
|
||||
}
|
||||
|
||||
public static class FilteringTokenStream extends CommonTokenStream {
|
||||
public FilteringTokenStream(TokenSource src) { super(src); }
|
||||
Set<Integer> hide = new HashSet<Integer>();
|
||||
@Override
|
||||
protected boolean sync(int i) {
|
||||
if (!super.sync(i)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
Token t = get(i);
|
||||
if ( hide.contains(t.getType()) ) {
|
||||
((WritableToken)t).setChannel(Token.HIDDEN_CHANNEL);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
public void setTokenTypeChannel(int ttype, int channel) {
|
||||
hide.add(ttype);
|
||||
}
|
||||
}
|
||||
|
||||
protected void mkdir(String dir) {
|
||||
File f = new File(dir);
|
||||
f.mkdirs();
|
||||
}
|
||||
|
||||
protected abstract void writeParserTestFile(String parserName,
|
||||
String lexerName,
|
||||
String listenerName,
|
||||
|
@ -751,213 +247,13 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
|
|||
|
||||
protected abstract void writeLexerTestFile(String lexerName, boolean showDFA);
|
||||
|
||||
public void writeRecognizer(String parserName, String lexerName,
|
||||
String listenerName, String visitorName,
|
||||
String parserStartRuleName, boolean debug, boolean trace) {
|
||||
if ( parserName==null ) {
|
||||
writeLexerTestFile(lexerName, debug);
|
||||
}
|
||||
else {
|
||||
writeParserTestFile(parserName,
|
||||
lexerName,
|
||||
listenerName,
|
||||
visitorName,
|
||||
parserStartRuleName,
|
||||
debug,
|
||||
trace);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
protected void eraseFiles(final String filesEndingWith) {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
String[] files = tmpdirF.list();
|
||||
for(int i = 0; files!=null && i < files.length; i++) {
|
||||
if ( files[i].endsWith(filesEndingWith) ) {
|
||||
new File(tmpdir+"/"+files[i]).delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void eraseFiles(File dir) {
|
||||
String[] files = dir.list();
|
||||
for(int i = 0; files!=null && i < files.length; i++) {
|
||||
new File(dir,files[i]).delete();
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void eraseTempDir() {
|
||||
boolean doErase = true;
|
||||
String propName = getPropertyPrefix() + "-erase-test-dir";
|
||||
String prop = System.getProperty(propName);
|
||||
if(prop!=null && prop.length()>0)
|
||||
doErase = Boolean.getBoolean(prop);
|
||||
if(doErase) {
|
||||
File tmpdirF = new File(tmpdir);
|
||||
if ( tmpdirF.exists() ) {
|
||||
eraseFiles(tmpdirF);
|
||||
tmpdirF.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
protected void eraseTempPyCache() {
|
||||
File tmpdirF = new File(tmpdir+"/__pycache__");
|
||||
File tmpdirF = new File(getTempTestDir() + "/__pycache__");
|
||||
if ( tmpdirF.exists() ) {
|
||||
eraseFiles(tmpdirF);
|
||||
eraseFilesInDir(tmpdirF);
|
||||
tmpdirF.delete();
|
||||
}
|
||||
}
|
||||
|
||||
public String getFirstLineOfException() {
|
||||
if ( this.stderrDuringParse ==null ) {
|
||||
return null;
|
||||
}
|
||||
String[] lines = this.stderrDuringParse.split("\n");
|
||||
String prefix="Exception in thread \"main\" ";
|
||||
return lines[0].substring(prefix.length(),lines[0].length());
|
||||
}
|
||||
|
||||
/**
|
||||
* When looking at a result set that consists of a Map/HashTable
|
||||
* we cannot rely on the output order, as the hashing algorithm or other aspects
|
||||
* of the implementation may be different on differnt JDKs or platforms. Hence
|
||||
* we take the Map, convert the keys to a List, sort them and Stringify the Map, which is a
|
||||
* bit of a hack, but guarantees that we get the same order on all systems. We assume that
|
||||
* the keys are strings.
|
||||
*
|
||||
* @param m The Map that contains keys we wish to return in sorted order
|
||||
* @return A string that represents all the keys in sorted order.
|
||||
*/
|
||||
public <K, V> String sortMapToString(Map<K, V> m) {
|
||||
// Pass in crap, and get nothing back
|
||||
//
|
||||
if (m == null) {
|
||||
return null;
|
||||
}
|
||||
|
||||
System.out.println("Map toString looks like: " + m.toString());
|
||||
|
||||
// Sort the keys in the Map
|
||||
//
|
||||
TreeMap<K, V> nset = new TreeMap<K, V>(m);
|
||||
|
||||
System.out.println("Tree map looks like: " + nset.toString());
|
||||
return nset.toString();
|
||||
}
|
||||
|
||||
public List<String> realElements(List<String> elements) {
|
||||
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String message, String text) {
|
||||
assertNotNull(message, text);
|
||||
assertFalse(message, text.isEmpty());
|
||||
}
|
||||
|
||||
public void assertNotNullOrEmpty(String text) {
|
||||
assertNotNull(text);
|
||||
assertFalse(text.isEmpty());
|
||||
}
|
||||
|
||||
public static class IntTokenStream implements TokenStream {
|
||||
IntegerList types;
|
||||
int p=0;
|
||||
public IntTokenStream(IntegerList types) { this.types = types; }
|
||||
|
||||
@Override
|
||||
public void consume() { p++; }
|
||||
|
||||
@Override
|
||||
public int LA(int i) { return LT(i).getType(); }
|
||||
|
||||
@Override
|
||||
public int mark() {
|
||||
return index();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int index() { return p; }
|
||||
|
||||
@Override
|
||||
public void release(int marker) {
|
||||
seek(marker);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void seek(int index) {
|
||||
p = index;
|
||||
}
|
||||
|
||||
@Override
|
||||
public int size() {
|
||||
return types.size();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getSourceName() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token LT(int i) {
|
||||
CommonToken t;
|
||||
int rawIndex = p + i - 1;
|
||||
if ( rawIndex>=types.size() ) t = new CommonToken(Token.EOF);
|
||||
else t = new CommonToken(types.get(rawIndex));
|
||||
t.setTokenIndex(rawIndex);
|
||||
return t;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Token get(int i) {
|
||||
return new org.antlr.v4.runtime.CommonToken(types.get(i));
|
||||
}
|
||||
|
||||
@Override
|
||||
public TokenSource getTokenSource() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText() {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText(Interval interval) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText(RuleContext ctx) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getText(Token start, Token stop) {
|
||||
throw new UnsupportedOperationException("can't give strings");
|
||||
}
|
||||
}
|
||||
|
||||
/** Sort a list */
|
||||
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
|
||||
List<T> dup = new ArrayList<T>();
|
||||
dup.addAll(data);
|
||||
Collections.sort(dup);
|
||||
return dup;
|
||||
}
|
||||
|
||||
/** Return map sorted by key */
|
||||
public <K extends Comparable<? super K>,V> LinkedHashMap<K,V> sort(Map<K,V> data) {
|
||||
LinkedHashMap<K,V> dup = new LinkedHashMap<K, V>();
|
||||
List<K> keys = new ArrayList<K>();
|
||||
keys.addAll(data.keySet());
|
||||
Collections.sort(keys);
|
||||
for (K k : keys) {
|
||||
dup.put(k, data.get(k));
|
||||
}
|
||||
return dup;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -47,7 +47,7 @@ public class BasePython2Test extends BasePythonTest {
|
|||
: "") + "\n" + "if __name__ == '__main__':\n"
|
||||
+ " main(sys.argv)\n" + "\n");
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
writeFile(tmpdir, "Test.py", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.py", outputFileST.render());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -105,6 +105,6 @@ public class BasePython2Test extends BasePythonTest {
|
|||
outputFileST.add("listenerName", listenerName);
|
||||
outputFileST.add("visitorName", visitorName);
|
||||
outputFileST.add("parserStartRuleName", parserStartRuleName);
|
||||
writeFile(tmpdir, "Test.py", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.py", outputFileST.render());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -44,7 +44,7 @@ public class BasePython3Test extends BasePythonTest {
|
|||
: "") + "\n" + "if __name__ == '__main__':\n"
|
||||
+ " main(sys.argv)\n" + "\n");
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
writeFile(tmpdir, "Test.py", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.py", outputFileST.render());
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -102,6 +102,6 @@ public class BasePython3Test extends BasePythonTest {
|
|||
outputFileST.add("listenerName", listenerName);
|
||||
outputFileST.add("visitorName", visitorName);
|
||||
outputFileST.add("parserStartRuleName", parserStartRuleName);
|
||||
writeFile(tmpdir, "Test.py", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "Test.py", outputFileST.render());
|
||||
}
|
||||
}
|
||||
|
|
|
@ -7,38 +7,35 @@
|
|||
package org.antlr.v4.test.runtime.swift;
|
||||
|
||||
import org.antlr.v4.runtime.misc.Pair;
|
||||
import org.antlr.v4.test.runtime.ErrorQueue;
|
||||
import org.antlr.v4.test.runtime.RuntimeTestSupport;
|
||||
import org.antlr.v4.test.runtime.StreamVacuum;
|
||||
import org.antlr.v4.test.runtime.*;
|
||||
import org.stringtemplate.v4.ST;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.*;
|
||||
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.mkdir;
|
||||
import static org.antlr.v4.test.runtime.RuntimeTestUtils.mkdir;
|
||||
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class BaseSwiftTest implements RuntimeTestSupport {
|
||||
public class BaseSwiftTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
|
||||
|
||||
private static final boolean USE_ARCH_ARM64 = false;
|
||||
private static final boolean VERBOSE = false;
|
||||
|
||||
/**
|
||||
* Path of the ANTLR runtime.
|
||||
*/
|
||||
private static String ANTLR_RUNTIME_PATH;
|
||||
private static final String ANTLR_RUNTIME_PATH;
|
||||
|
||||
/**
|
||||
* Absolute path to swift command.
|
||||
*/
|
||||
private static String SWIFT_CMD;
|
||||
private static final String SWIFT_CMD;
|
||||
|
||||
/**
|
||||
* Environment variable name for swift home.
|
||||
|
@ -54,7 +51,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
// build swift runtime
|
||||
URL swiftRuntime = loader.getResource("Swift");
|
||||
if (swiftRuntime == null) {
|
||||
throw new RuntimeException("Swift runtime file not found at:" + swiftRuntime.getPath());
|
||||
throw new RuntimeException("Swift runtime file not found");
|
||||
}
|
||||
ANTLR_RUNTIME_PATH = swiftRuntime.getPath();
|
||||
try {
|
||||
|
@ -78,71 +75,16 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
});
|
||||
}
|
||||
|
||||
public String tmpdir = null;
|
||||
|
||||
/**
|
||||
* If error during parser execution, store stderr here; can't return
|
||||
* stdout and stderr. This doesn't trap errors from running antlr.
|
||||
*/
|
||||
private String stderrDuringParse;
|
||||
|
||||
/**
|
||||
* Errors found while running antlr
|
||||
*/
|
||||
private StringBuilder antlrToolErrors;
|
||||
@Override
|
||||
protected String getPropertyPrefix() {
|
||||
return "antrl4-swift";
|
||||
}
|
||||
|
||||
/**
|
||||
* Source files used in each small swift project.
|
||||
*/
|
||||
private Set<String> sourceFiles = new HashSet<>();
|
||||
private final Set<String> sourceFiles = new HashSet<>();
|
||||
|
||||
@Override
|
||||
public void testSetUp() throws Exception {
|
||||
// new output dir for each test
|
||||
String propName = "antlr-swift-test-dir";
|
||||
String prop = System.getProperty(propName);
|
||||
if (prop != null && prop.length() > 0) {
|
||||
tmpdir = prop;
|
||||
}
|
||||
else {
|
||||
String classSimpleName = getClass().getSimpleName();
|
||||
String threadName = Thread.currentThread().getName();
|
||||
String childPath = String.format("%s-%s-%s", classSimpleName, threadName, System.currentTimeMillis());
|
||||
tmpdir = new File(System.getProperty("java.io.tmpdir"), childPath).getAbsolutePath();
|
||||
}
|
||||
antlrToolErrors = new StringBuilder();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testTearDown() throws Exception {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void eraseTempDir() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getTmpDir() {
|
||||
return tmpdir;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getStdout() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getParseErrors() {
|
||||
return stderrDuringParse;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getANTLRToolErrors() {
|
||||
if (antlrToolErrors.length() == 0) {
|
||||
return null;
|
||||
}
|
||||
return antlrToolErrors.toString();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String execLexer(String grammarFileName, String grammarStr, String lexerName, String input, boolean showDFA) {
|
||||
|
@ -150,12 +92,12 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
grammarStr,
|
||||
null,
|
||||
lexerName);
|
||||
writeFile(tmpdir, "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
writeLexerTestFile(lexerName, showDFA);
|
||||
addSourceFiles("main.swift");
|
||||
|
||||
String projectName = "testcase-" + System.currentTimeMillis();
|
||||
String projectDir = getTmpDir() + "/" + projectName;
|
||||
String projectDir = new File(getTempTestDir(), projectName).getAbsolutePath();
|
||||
try {
|
||||
buildProject(projectDir, projectName);
|
||||
return execTest(projectDir, projectName);
|
||||
|
@ -173,7 +115,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
parserName,
|
||||
lexerName,
|
||||
"-visitor");
|
||||
writeFile(getTmpDir(), "input", input);
|
||||
writeFile(getTempDirPath(), "input", input);
|
||||
return execParser(parserName,
|
||||
lexerName,
|
||||
startRuleName,
|
||||
|
@ -184,7 +126,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
try {
|
||||
Pair<String, String> output = runProcess(projectDir, "./.build/debug/" + projectName, "input");
|
||||
if (output.b.length() > 0) {
|
||||
stderrDuringParse = output.b;
|
||||
setParseErrors(output.b);
|
||||
}
|
||||
String stdout = output.a;
|
||||
return stdout.length() > 0 ? stdout : null;
|
||||
|
@ -204,10 +146,10 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
mkdir(projectDir);
|
||||
fastFailRunProcess(projectDir, SWIFT_CMD, "package", "init", "--type", "executable");
|
||||
for (String sourceFile: sourceFiles) {
|
||||
String absPath = getTmpDir() + "/" + sourceFile;
|
||||
fastFailRunProcess(getTmpDir(), "mv", "-f", absPath, projectDir + "/Sources/" + projectName);
|
||||
String absPath = new File(getTempTestDir(), sourceFile).getAbsolutePath();
|
||||
fastFailRunProcess(getTempDirPath(), "mv", "-f", absPath, projectDir + "/Sources/" + projectName);
|
||||
}
|
||||
fastFailRunProcess(getTmpDir(), "mv", "-f", "input", projectDir);
|
||||
fastFailRunProcess(getTempDirPath(), "mv", "-f", "input", projectDir);
|
||||
String dylibPath = ANTLR_RUNTIME_PATH + "/.build/debug/";
|
||||
// System.err.println(dylibPath);
|
||||
Pair<String, String> buildResult = runProcess(projectDir, SWIFT_CMD, "build",
|
||||
|
@ -221,31 +163,99 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
}
|
||||
}
|
||||
|
||||
static Boolean IS_MAC_ARM_64 = null;
|
||||
|
||||
private static boolean isMacOSArm64() {
|
||||
if (IS_MAC_ARM_64 == null) {
|
||||
IS_MAC_ARM_64 = computeIsMacOSArm64();
|
||||
System.err.println("IS_MAC_ARM_64 = " + IS_MAC_ARM_64);
|
||||
}
|
||||
return IS_MAC_ARM_64;
|
||||
}
|
||||
|
||||
private static boolean computeIsMacOSArm64() {
|
||||
String os = System.getenv("RUNNER_OS");
|
||||
if(os==null || !os.equalsIgnoreCase("macos"))
|
||||
return false;
|
||||
try {
|
||||
Process p = Runtime.getRuntime().exec("uname -a");
|
||||
BufferedReader in = new BufferedReader(new InputStreamReader(p.getInputStream()));
|
||||
String uname = in.readLine();
|
||||
return uname.contains("_ARM64_");
|
||||
} catch (IOException e) {
|
||||
e.printStackTrace();
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
private static Pair<String,String> runProcess(String execPath, String... args) throws IOException, InterruptedException {
|
||||
Process process = Runtime.getRuntime().exec(args, null, new File(execPath));
|
||||
List<String> argsWithArch = new ArrayList<>();
|
||||
if(USE_ARCH_ARM64 && isMacOSArm64())
|
||||
argsWithArch.addAll(Arrays.asList("arch", "-arm64"));
|
||||
argsWithArch.addAll(Arrays.asList(args));
|
||||
if(VERBOSE)
|
||||
System.err.println("Executing " + argsWithArch.toString() + " " + execPath);
|
||||
final Process process = Runtime.getRuntime().exec(argsWithArch.toArray(new String[0]), null, new File(execPath));
|
||||
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
|
||||
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
|
||||
stdoutVacuum.start();
|
||||
stderrVacuum.start();
|
||||
Timer timer = new Timer();
|
||||
timer.schedule(new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
process.destroy();
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
}, 120_000);
|
||||
int status = process.waitFor();
|
||||
timer.cancel();
|
||||
stdoutVacuum.join();
|
||||
stderrVacuum.join();
|
||||
if(VERBOSE)
|
||||
System.err.println("Done executing " + argsWithArch.toString() + " " + execPath);
|
||||
if (status != 0) {
|
||||
System.err.println("Process exited with status " + status);
|
||||
throw new IOException("Process exited with status " + status + ":\n" + stdoutVacuum.toString() + "\n" + stderrVacuum.toString());
|
||||
}
|
||||
return new Pair<>(stdoutVacuum.toString(), stderrVacuum.toString());
|
||||
}
|
||||
|
||||
private static void fastFailRunProcess(String workingDir, String... command) throws IOException, InterruptedException {
|
||||
ProcessBuilder builder = new ProcessBuilder(command);
|
||||
List<String> argsWithArch = new ArrayList<>();
|
||||
if(USE_ARCH_ARM64 && isMacOSArm64())
|
||||
argsWithArch.addAll(Arrays.asList("arch", "-arm64"));
|
||||
argsWithArch.addAll(Arrays.asList(command));
|
||||
if(VERBOSE)
|
||||
System.err.println("Executing " + argsWithArch.toString() + " " + workingDir);
|
||||
ProcessBuilder builder = new ProcessBuilder(argsWithArch.toArray(new String[0]));
|
||||
builder.directory(new File(workingDir));
|
||||
Process p = builder.start();
|
||||
int status = p.waitFor();
|
||||
final Process process = builder.start();
|
||||
Timer timer = new Timer();
|
||||
timer.schedule(new TimerTask() {
|
||||
@Override
|
||||
public void run() {
|
||||
try {
|
||||
process.destroy();
|
||||
} catch(Exception e) {
|
||||
e.printStackTrace(System.err);
|
||||
}
|
||||
}
|
||||
}, 120_000);
|
||||
int status = process.waitFor();
|
||||
timer.cancel();
|
||||
if(VERBOSE)
|
||||
System.err.println("Done executing " + argsWithArch.toString() + " " + workingDir);
|
||||
if (status != 0) {
|
||||
System.err.println("Process exited with status " + status);
|
||||
throw new IOException("Process exited with status " + status);
|
||||
}
|
||||
}
|
||||
|
||||
@SuppressWarnings("SameParameterValue")
|
||||
private String execParser(String parserName,
|
||||
String lexerName,
|
||||
String parserStartRuleName,
|
||||
|
@ -265,7 +275,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
|
||||
addSourceFiles("main.swift");
|
||||
String projectName = "testcase-" + System.currentTimeMillis();
|
||||
String projectDir = getTmpDir() + "/" + projectName;
|
||||
String projectDir = new File(getTempTestDir(), projectName).getAbsolutePath();
|
||||
try {
|
||||
buildProject(projectDir, projectName);
|
||||
return execTest(projectDir, projectName);
|
||||
|
@ -324,13 +334,13 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
"parser.setInterpreter(profiler)");
|
||||
}
|
||||
else {
|
||||
outputFileST.add("profile", new ArrayList<Object>());
|
||||
outputFileST.add("profile", new ArrayList<>());
|
||||
}
|
||||
outputFileST.add("createParser", createParserST);
|
||||
outputFileST.add("parserName", parserName);
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
outputFileST.add("parserStartRuleName", parserStartRuleName);
|
||||
writeFile(tmpdir, "main.swift", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "main.swift", outputFileST.render());
|
||||
}
|
||||
|
||||
private void writeLexerTestFile(String lexerName, boolean showDFA) {
|
||||
|
@ -352,7 +362,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
(showDFA ? "print(lex.getInterpreter().getDFA(Lexer.DEFAULT_MODE).toLexerString(), terminator: \"\" )\n" : ""));
|
||||
|
||||
outputFileST.add("lexerName", lexerName);
|
||||
writeFile(tmpdir, "main.swift", outputFileST.render());
|
||||
writeFile(getTempDirPath(), "main.swift", outputFileST.render());
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -363,7 +373,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
String parserName,
|
||||
String lexerName,
|
||||
String... extraOptions) {
|
||||
ErrorQueue equeue = antlrOnString(getTmpDir(), "Swift", grammarFileName, grammarStr, false, extraOptions);
|
||||
ErrorQueue equeue = antlrOnString(getTempDirPath(), "Swift", grammarFileName, grammarStr, false, extraOptions);
|
||||
assertTrue(equeue.errors.isEmpty());
|
||||
// System.out.println(getTmpDir());
|
||||
|
||||
|
@ -387,6 +397,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
|
|||
files.add(grammarName + "BaseVisitor.swift");
|
||||
}
|
||||
}
|
||||
addSourceFiles(files.toArray(new String[files.size()]));
|
||||
addSourceFiles(files.toArray(new String[0]));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,375 +0,0 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
using System.Collections.Generic;
|
||||
using Antlr4.Runtime.Atn;
|
||||
using Antlr4.Runtime.Misc;
|
||||
using Antlr4.Runtime.Sharpen;
|
||||
|
||||
namespace Antlr4.Runtime.Atn
|
||||
{
|
||||
public class LL1Analyzer
|
||||
{
|
||||
/// <summary>
|
||||
/// Special value added to the lookahead sets to indicate that we hit
|
||||
/// a predicate during analysis if
|
||||
/// <c>seeThruPreds==false</c>
|
||||
/// .
|
||||
/// </summary>
|
||||
public const int HitPred = TokenConstants.InvalidType;
|
||||
|
||||
[NotNull]
|
||||
public readonly ATN atn;
|
||||
|
||||
public LL1Analyzer(ATN atn)
|
||||
{
|
||||
this.atn = atn;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Calculates the SLL(1) expected lookahead set for each outgoing transition
|
||||
/// of an
|
||||
/// <see cref="ATNState"/>
|
||||
/// . The returned array has one element for each
|
||||
/// outgoing transition in
|
||||
/// <paramref name="s"/>
|
||||
/// . If the closure from transition
|
||||
/// <em>i</em> leads to a semantic predicate before matching a symbol, the
|
||||
/// element at index <em>i</em> of the result will be
|
||||
/// <see langword="null"/>
|
||||
/// .
|
||||
/// </summary>
|
||||
/// <param name="s">the ATN state</param>
|
||||
/// <returns>
|
||||
/// the expected symbols for each outgoing transition of
|
||||
/// <paramref name="s"/>
|
||||
/// .
|
||||
/// </returns>
|
||||
[return: Nullable]
|
||||
public virtual IntervalSet[] GetDecisionLookahead(ATNState s)
|
||||
{
|
||||
// System.out.println("LOOK("+s.stateNumber+")");
|
||||
if (s == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
IntervalSet[] look = new IntervalSet[s.NumberOfTransitions];
|
||||
for (int alt = 0; alt < s.NumberOfTransitions; alt++)
|
||||
{
|
||||
look[alt] = new IntervalSet();
|
||||
HashSet<ATNConfig> lookBusy = new HashSet<ATNConfig>();
|
||||
bool seeThruPreds = false;
|
||||
// fail to get lookahead upon pred
|
||||
Look(s.Transition(alt).target, null, PredictionContext.EMPTY, look[alt], lookBusy, new BitSet(), seeThruPreds, false);
|
||||
// Wipe out lookahead for this alternative if we found nothing
|
||||
// or we had a predicate when we !seeThruPreds
|
||||
if (look[alt].Count == 0 || look[alt].Contains(HitPred))
|
||||
{
|
||||
look[alt] = null;
|
||||
}
|
||||
}
|
||||
return look;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute set of tokens that can follow
|
||||
/// <paramref name="s"/>
|
||||
/// in the ATN in the
|
||||
/// specified
|
||||
/// <paramref name="ctx"/>
|
||||
/// .
|
||||
/// <p>If
|
||||
/// <paramref name="ctx"/>
|
||||
/// is
|
||||
/// <see langword="null"/>
|
||||
/// and the end of the rule containing
|
||||
/// <paramref name="s"/>
|
||||
/// is reached,
|
||||
/// <see cref="TokenConstants.EPSILON"/>
|
||||
/// is added to the result set.
|
||||
/// If
|
||||
/// <paramref name="ctx"/>
|
||||
/// is not
|
||||
/// <see langword="null"/>
|
||||
/// and the end of the outermost rule is
|
||||
/// reached,
|
||||
/// <see cref="TokenConstants.EOF"/>
|
||||
/// is added to the result set.</p>
|
||||
/// </summary>
|
||||
/// <param name="s">the ATN state</param>
|
||||
/// <param name="ctx">
|
||||
/// the complete parser context, or
|
||||
/// <see langword="null"/>
|
||||
/// if the context
|
||||
/// should be ignored
|
||||
/// </param>
|
||||
/// <returns>
|
||||
/// The set of tokens that can follow
|
||||
/// <paramref name="s"/>
|
||||
/// in the ATN in the
|
||||
/// specified
|
||||
/// <paramref name="ctx"/>
|
||||
/// .
|
||||
/// </returns>
|
||||
[return: NotNull]
|
||||
public virtual IntervalSet Look(ATNState s, RuleContext ctx)
|
||||
{
|
||||
return Look(s, null, ctx);
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute set of tokens that can follow
|
||||
/// <paramref name="s"/>
|
||||
/// in the ATN in the
|
||||
/// specified
|
||||
/// <paramref name="ctx"/>
|
||||
/// .
|
||||
/// <p>If
|
||||
/// <paramref name="ctx"/>
|
||||
/// is
|
||||
/// <see langword="null"/>
|
||||
/// and the end of the rule containing
|
||||
/// <paramref name="s"/>
|
||||
/// is reached,
|
||||
/// <see cref="TokenConstants.EPSILON"/>
|
||||
/// is added to the result set.
|
||||
/// If
|
||||
/// <paramref name="ctx"/>
|
||||
/// is not
|
||||
/// <c>PredictionContext#EMPTY_LOCAL</c>
|
||||
/// and the end of the outermost rule is
|
||||
/// reached,
|
||||
/// <see cref="TokenConstants.EOF"/>
|
||||
/// is added to the result set.</p>
|
||||
/// </summary>
|
||||
/// <param name="s">the ATN state</param>
|
||||
/// <param name="stopState">
|
||||
/// the ATN state to stop at. This can be a
|
||||
/// <see cref="BlockEndState"/>
|
||||
/// to detect epsilon paths through a closure.
|
||||
/// </param>
|
||||
/// <param name="ctx">
|
||||
/// the complete parser context, or
|
||||
/// <see langword="null"/>
|
||||
/// if the context
|
||||
/// should be ignored
|
||||
/// </param>
|
||||
/// <returns>
|
||||
/// The set of tokens that can follow
|
||||
/// <paramref name="s"/>
|
||||
/// in the ATN in the
|
||||
/// specified
|
||||
/// <paramref name="ctx"/>
|
||||
/// .
|
||||
/// </returns>
|
||||
[return: NotNull]
|
||||
public virtual IntervalSet Look(ATNState s, ATNState stopState, RuleContext ctx)
|
||||
{
|
||||
IntervalSet r = new IntervalSet();
|
||||
bool seeThruPreds = true;
|
||||
PredictionContext lookContext = ctx != null ? PredictionContext.FromRuleContext(s.atn, ctx) : null;
|
||||
Look(s, stopState, lookContext, r, new HashSet<ATNConfig>(), new BitSet(), seeThruPreds, true);
|
||||
return r;
|
||||
}
|
||||
|
||||
/// <summary>
|
||||
/// Compute set of tokens that can follow
|
||||
/// <paramref name="s"/>
|
||||
/// in the ATN in the
|
||||
/// specified
|
||||
/// <paramref name="ctx"/>
|
||||
/// .
|
||||
/// <p/>
|
||||
/// If
|
||||
/// <paramref name="ctx"/>
|
||||
/// is
|
||||
/// <see cref="PredictionContext.EMPTY"/>
|
||||
/// and
|
||||
/// <paramref name="stopState"/>
|
||||
/// or the end of the rule containing
|
||||
/// <paramref name="s"/>
|
||||
/// is reached,
|
||||
/// <see cref="TokenConstants.EPSILON"/>
|
||||
/// is added to the result set. If
|
||||
/// <paramref name="ctx"/>
|
||||
/// is not
|
||||
/// <see cref="PredictionContext.EMPTY"/>
|
||||
/// and
|
||||
/// <paramref name="addEOF"/>
|
||||
/// is
|
||||
/// <see langword="true"/>
|
||||
/// and
|
||||
/// <paramref name="stopState"/>
|
||||
/// or the end of the outermost rule is reached,
|
||||
/// <see cref="TokenConstants.EOF"/>
|
||||
/// is added to the result set.
|
||||
/// </summary>
|
||||
/// <param name="s">the ATN state.</param>
|
||||
/// <param name="stopState">
|
||||
/// the ATN state to stop at. This can be a
|
||||
/// <see cref="BlockEndState"/>
|
||||
/// to detect epsilon paths through a closure.
|
||||
/// </param>
|
||||
/// <param name="ctx">
|
||||
/// The outer context, or
|
||||
/// <see cref="PredictionContext.EMPTY"/>
|
||||
/// if
|
||||
/// the outer context should not be used.
|
||||
/// </param>
|
||||
/// <param name="look">The result lookahead set.</param>
|
||||
/// <param name="lookBusy">
|
||||
/// A set used for preventing epsilon closures in the ATN
|
||||
/// from causing a stack overflow. Outside code should pass
|
||||
/// <c>new HashSet<ATNConfig></c>
|
||||
/// for this argument.
|
||||
/// </param>
|
||||
/// <param name="calledRuleStack">
|
||||
/// A set used for preventing left recursion in the
|
||||
/// ATN from causing a stack overflow. Outside code should pass
|
||||
/// <c>new BitSet()</c>
|
||||
/// for this argument.
|
||||
/// </param>
|
||||
/// <param name="seeThruPreds">
|
||||
///
|
||||
/// <see langword="true"/>
|
||||
/// to true semantic predicates as
|
||||
/// implicitly
|
||||
/// <see langword="true"/>
|
||||
/// and "see through them", otherwise
|
||||
/// <see langword="false"/>
|
||||
/// to treat semantic predicates as opaque and add
|
||||
/// <see cref="HitPred"/>
|
||||
/// to the
|
||||
/// result if one is encountered.
|
||||
/// </param>
|
||||
/// <param name="addEOF">
|
||||
/// Add
|
||||
/// <see cref="TokenConstants.EOF"/>
|
||||
/// to the result if the end of the
|
||||
/// outermost context is reached. This parameter has no effect if
|
||||
/// <paramref name="ctx"/>
|
||||
/// is
|
||||
/// <see cref="PredictionContext.EMPTY"/>
|
||||
/// .
|
||||
/// </param>
|
||||
protected internal virtual void Look(ATNState s, ATNState stopState, PredictionContext ctx, IntervalSet look, HashSet<ATNConfig> lookBusy, BitSet calledRuleStack, bool seeThruPreds, bool addEOF)
|
||||
{
|
||||
// System.out.println("_LOOK("+s.stateNumber+", ctx="+ctx);
|
||||
ATNConfig c = new ATNConfig(s, 0, ctx);
|
||||
if (!lookBusy.Add(c))
|
||||
{
|
||||
return;
|
||||
}
|
||||
if (s == stopState)
|
||||
{
|
||||
if (ctx == null)
|
||||
{
|
||||
look.Add(TokenConstants.EPSILON);
|
||||
return;
|
||||
}
|
||||
else if (ctx.IsEmpty && addEOF) {
|
||||
look.Add(TokenConstants.EOF);
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (s is RuleStopState)
|
||||
{
|
||||
if (ctx == null)
|
||||
{
|
||||
look.Add(TokenConstants.EPSILON);
|
||||
return;
|
||||
}
|
||||
else if (ctx.IsEmpty && addEOF)
|
||||
{
|
||||
look.Add(TokenConstants.EOF);
|
||||
return;
|
||||
}
|
||||
if (ctx != PredictionContext.EMPTY)
|
||||
{
|
||||
for (int i = 0; i < ctx.Size; i++)
|
||||
{
|
||||
ATNState returnState = atn.states[ctx.GetReturnState(i)];
|
||||
bool removed = calledRuleStack.Get(returnState.ruleIndex);
|
||||
try
|
||||
{
|
||||
calledRuleStack.Clear(returnState.ruleIndex);
|
||||
Look(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (removed)
|
||||
{
|
||||
calledRuleStack.Set(returnState.ruleIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
int n = s.NumberOfTransitions;
|
||||
for (int i_1 = 0; i_1 < n; i_1++)
|
||||
{
|
||||
Transition t = s.Transition(i_1);
|
||||
if (t is RuleTransition)
|
||||
{
|
||||
RuleTransition ruleTransition = (RuleTransition)t;
|
||||
if (calledRuleStack.Get(ruleTransition.ruleIndex))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
PredictionContext newContext = SingletonPredictionContext.Create(ctx, ruleTransition.followState.stateNumber);
|
||||
try
|
||||
{
|
||||
calledRuleStack.Set(ruleTransition.target.ruleIndex);
|
||||
Look(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
||||
}
|
||||
finally
|
||||
{
|
||||
calledRuleStack.Clear(ruleTransition.target.ruleIndex);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (t is AbstractPredicateTransition)
|
||||
{
|
||||
if (seeThruPreds)
|
||||
{
|
||||
Look(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
||||
}
|
||||
else
|
||||
{
|
||||
look.Add(HitPred);
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
if (t.IsEpsilon)
|
||||
{
|
||||
Look(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
||||
}
|
||||
else
|
||||
{
|
||||
if (t is WildcardTransition)
|
||||
{
|
||||
look.AddAll(IntervalSet.Of(TokenConstants.MinUserTokenType, atn.maxTokenType));
|
||||
}
|
||||
else
|
||||
{
|
||||
IntervalSet set = t.Label;
|
||||
if (set != null)
|
||||
{
|
||||
if (t is NotSetTransition)
|
||||
{
|
||||
set = set.Complement(IntervalSet.Of(TokenConstants.MinUserTokenType, atn.maxTokenType));
|
||||
}
|
||||
look.AddAll(set);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -1,9 +1,9 @@
|
|||
<Project Sdk="Microsoft.NET.Sdk">
|
||||
<PropertyGroup>
|
||||
<Company>The ANTLR Organization</Company>
|
||||
<Version>4.9.1</Version>
|
||||
<Version>4.9.2</Version>
|
||||
<NeutralLanguage>en-US</NeutralLanguage>
|
||||
<TargetFrameworks>netstandard2.0;netstandard2.1</TargetFrameworks>
|
||||
<TargetFramework>netstandard2.0</TargetFramework>
|
||||
<NoWarn>$(NoWarn);CS1591;CS1574;CS1580</NoWarn>
|
||||
<GenerateDocumentationFile>true</GenerateDocumentationFile>
|
||||
<AssemblyName>Antlr4.Runtime.Standard</AssemblyName>
|
||||
|
@ -32,6 +32,7 @@
|
|||
<GenerateAssemblyFileVersionAttribute>false</GenerateAssemblyFileVersionAttribute>
|
||||
<GenerateAssemblyInformationalVersionAttribute>false</GenerateAssemblyInformationalVersionAttribute>
|
||||
<RootNamespace>Antlr4.Runtime</RootNamespace>
|
||||
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
|
||||
<DebugSymbols>true</DebugSymbols>
|
|
@ -41,6 +41,9 @@ namespace Antlr4.Runtime.Atn
|
|||
/// <since>4.3</since>
|
||||
public class AmbiguityInfo : DecisionEventInfo
|
||||
{
|
||||
/** The set of alternative numbers for this decision event that lead to a valid parse. */
|
||||
public BitSet ambigAlts;
|
||||
|
||||
/// <summary>
|
||||
/// Constructs a new instance of the
|
||||
/// <see cref="AmbiguityInfo"/>
|
||||
|
@ -48,19 +51,30 @@ namespace Antlr4.Runtime.Atn
|
|||
/// specified detailed ambiguity information.
|
||||
/// </summary>
|
||||
/// <param name="decision">The decision number</param>
|
||||
/// <param name="state">
|
||||
/// The final simulator state identifying the ambiguous
|
||||
/// <param name="configs">The final configuration set identifying the ambiguous
|
||||
/// alternatives for the current input
|
||||
/// </param>
|
||||
/// <param name="ambigAlts">The set of alternatives in the decision that lead to a valid parse.
|
||||
/// The predicted alt is the min(ambigAlts)
|
||||
/// </param>
|
||||
/// <param name="input">The input token stream</param>
|
||||
/// <param name="startIndex">The start index for the current prediction</param>
|
||||
/// <param name="stopIndex">
|
||||
/// The index at which the ambiguity was identified during
|
||||
/// prediction
|
||||
/// </param>
|
||||
public AmbiguityInfo(int decision, SimulatorState state, ITokenStream input, int startIndex, int stopIndex)
|
||||
: base(decision, state, input, startIndex, stopIndex, state.useContext)
|
||||
/// <param name="fullCtx">@code true} if the ambiguity was identified during LL
|
||||
/// prediction; otherwise, {@code false} if the ambiguity was identified
|
||||
/// during SLL prediction
|
||||
/// </param>
|
||||
public AmbiguityInfo(int decision,
|
||||
ATNConfigSet configs,
|
||||
BitSet ambigAlts,
|
||||
ITokenStream input, int startIndex, int stopIndex,
|
||||
bool fullCtx)
|
||||
: base(decision, configs, input, startIndex, stopIndex, fullCtx)
|
||||
{
|
||||
this.ambigAlts = ambigAlts;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -35,9 +35,8 @@ namespace Antlr4.Runtime.Atn
|
|||
/// with the specified detailed context sensitivity information.
|
||||
/// </summary>
|
||||
/// <param name="decision">The decision number</param>
|
||||
/// <param name="state">
|
||||
/// The final simulator state containing the unique
|
||||
/// alternative identified by full-context prediction
|
||||
/// <param name="configs">The final configuration set identifying the ambiguous
|
||||
/// alternatives for the current input
|
||||
/// </param>
|
||||
/// <param name="input">The input token stream</param>
|
||||
/// <param name="startIndex">The start index for the current prediction</param>
|
||||
|
@ -45,8 +44,8 @@ namespace Antlr4.Runtime.Atn
|
|||
/// The index at which the context sensitivity was
|
||||
/// identified during full-context prediction
|
||||
/// </param>
|
||||
public ContextSensitivityInfo(int decision, SimulatorState state, ITokenStream input, int startIndex, int stopIndex)
|
||||
: base(decision, state, input, startIndex, stopIndex, true)
|
||||
public ContextSensitivityInfo(int decision, ATNConfigSet configs, ITokenStream input, int startIndex, int stopIndex)
|
||||
: base(decision, configs, input, startIndex, stopIndex, true)
|
||||
{
|
||||
}
|
||||
}
|
|
@ -25,15 +25,13 @@ namespace Antlr4.Runtime.Atn
|
|||
/// <seealso cref="ATN.decisionToState"/>
|
||||
public readonly int decision;
|
||||
|
||||
/// <summary>
|
||||
/// The simulator state containing additional information relevant to the
|
||||
/// prediction state when the current event occurred, or
|
||||
/// <see langword="null"/>
|
||||
/// if no
|
||||
/// additional information is relevant or available.
|
||||
/// </summary>
|
||||
[Nullable]
|
||||
public readonly SimulatorState state;
|
||||
/// <summary>The configuration set containing additional information relevant to the
|
||||
/// prediction state when the current event occurred, or {@code null} if no
|
||||
/// additional information is relevant or available.</summary>
|
||||
/// <remarks>The configuration set containing additional information relevant to the
|
||||
/// prediction state when the current event occurred, or {@code null} if no
|
||||
/// additional information is relevant or available.</remarks>
|
||||
public readonly ATNConfigSet configs;
|
||||
|
||||
/// <summary>The input token stream which is being parsed.</summary>
|
||||
/// <remarks>The input token stream which is being parsed.</remarks>
|
||||
|
@ -63,14 +61,17 @@ namespace Antlr4.Runtime.Atn
|
|||
/// </summary>
|
||||
public readonly bool fullCtx;
|
||||
|
||||
public DecisionEventInfo(int decision, SimulatorState state, ITokenStream input, int startIndex, int stopIndex, bool fullCtx)
|
||||
public DecisionEventInfo(int decision,
|
||||
ATNConfigSet configs,
|
||||
ITokenStream input, int startIndex, int stopIndex,
|
||||
bool fullCtx)
|
||||
{
|
||||
this.decision = decision;
|
||||
this.fullCtx = fullCtx;
|
||||
this.stopIndex = stopIndex;
|
||||
this.input = input;
|
||||
this.startIndex = startIndex;
|
||||
this.state = state;
|
||||
this.configs = configs;
|
||||
}
|
||||
}
|
||||
}
|
|
@ -30,17 +30,18 @@ namespace Antlr4.Runtime.Atn
|
|||
/// specified detailed syntax error information.
|
||||
/// </summary>
|
||||
/// <param name="decision">The decision number</param>
|
||||
/// <param name="state">
|
||||
/// The final simulator state reached during prediction
|
||||
/// prior to reaching the
|
||||
/// <see cref="ATNSimulator.ERROR"/>
|
||||
/// state
|
||||
/// <param name="configs">The final configuration set reached during prediction
|
||||
/// prior to reaching the {@link ATNSimulator#ERROR} state
|
||||
/// </param>
|
||||
/// <param name="input">The input token stream</param>
|
||||
/// <param name="startIndex">The start index for the current prediction</param>
|
||||
/// <param name="stopIndex">The index at which the syntax error was identified</param>
|
||||
public ErrorInfo(int decision, SimulatorState state, ITokenStream input, int startIndex, int stopIndex)
|
||||
: base(decision, state, input, startIndex, stopIndex, state.useContext)
|
||||
/// <param name="fullCtx">{@code true} if the syntax error was identified during LL
|
||||
/// prediction; otherwise, {@code false} if the syntax error was identified
|
||||
/// during SLL prediction
|
||||
/// </param>
|
||||
public ErrorInfo(int decision, ATNConfigSet configs, ITokenStream input, int startIndex, int stopIndex, bool fullCtx)
|
||||
: base(decision, configs, input, startIndex, stopIndex, fullCtx)
|
||||
{
|
||||
}
|
||||
}
|
|
@ -0,0 +1,252 @@
|
|||
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
||||
* Use of this file is governed by the BSD 3-clause license that
|
||||
* can be found in the LICENSE.txt file in the project root.
|
||||
*/
|
||||
using System.Collections.Generic;
|
||||
using Antlr4.Runtime.Misc;
|
||||
using Antlr4.Runtime.Sharpen;
|
||||
|
||||
namespace Antlr4.Runtime.Atn
|
||||
{
|
||||
public class LL1Analyzer
|
||||
{
|
||||
/** Special value added to the lookahead sets to indicate that we hit
|
||||
* a predicate during analysis if {@code seeThruPreds==false}.
|
||||
*/
|
||||
public const int HitPred = TokenConstants.InvalidType;
|
||||
|
||||
[NotNull]
|
||||
public readonly ATN atn;
|
||||
|
||||
public LL1Analyzer(ATN atn)
|
||||
{
|
||||
this.atn = atn;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculates the SLL(1) expected lookahead set for each outgoing transition
|
||||
* of an {@link ATNState}. The returned array has one element for each
|
||||
* outgoing transition in {@code s}. If the closure from transition
|
||||
* <em>i</em> leads to a semantic predicate before matching a symbol, the
|
||||
* element at index <em>i</em> of the result will be {@code null}.
|
||||
*
|
||||
* @param s the ATN state
|
||||
* @return the expected symbols for each outgoing transition of {@code s}.
|
||||
*/
|
||||
[return: Nullable]
|
||||
public virtual IntervalSet[] GetDecisionLookahead(ATNState s)
|
||||
{
|
||||
// System.out.println("LOOK("+s.stateNumber+")");
|
||||
if (s == null)
|
||||
{
|
||||
return null;
|
||||
}
|
||||
IntervalSet[] look = new IntervalSet[s.NumberOfTransitions];
|
||||
for (int alt = 0; alt < s.NumberOfTransitions; alt++)
|
||||
{
|
||||
look[alt] = new IntervalSet();
|
||||
HashSet<ATNConfig> lookBusy = new HashSet<ATNConfig>();
|
||||
bool seeThruPreds = false;
|
||||
// fail to get lookahead upon pred
|
||||
Look_(s.Transition(alt).target, null, PredictionContext.EMPTY, look[alt], lookBusy, new BitSet(), seeThruPreds, false);
|
||||
// Wipe out lookahead for this alternative if we found nothing
|
||||
// or we had a predicate when we !seeThruPreds
|
||||
if (look[alt].Count == 0 || look[alt].Contains(HitPred))
|
||||
{
|
||||
look[alt] = null;
|
||||
}
|
||||
}
|
||||
return look;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute set of tokens that can follow {@code s} in the ATN in the
|
||||
* specified {@code ctx}.
|
||||
*
|
||||
* <p>If {@code ctx} is {@code null} and the end of the rule containing
|
||||
* {@code s} is reached, {@link Token#EPSILON} is added to the result set.
|
||||
* If {@code ctx} is not {@code null} and the end of the outermost rule is
|
||||
* reached, {@link Token#EOF} is added to the result set.</p>
|
||||
*
|
||||
* @param s the ATN state
|
||||
* @param ctx the complete parser context, or {@code null} if the context
|
||||
* should be ignored
|
||||
*
|
||||
* @return The set of tokens that can follow {@code s} in the ATN in the
|
||||
* specified {@code ctx}.
|
||||
*/
|
||||
[return: NotNull]
|
||||
public virtual IntervalSet Look(ATNState s, RuleContext ctx)
|
||||
{
|
||||
return Look(s, null, ctx);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute set of tokens that can follow {@code s} in the ATN in the
|
||||
* specified {@code ctx}.
|
||||
*
|
||||
* <p>If {@code ctx} is {@code null} and the end of the rule containing
|
||||
* {@code s} is reached, {@link Token#EPSILON} is added to the result set.
|
||||
* If {@code ctx} is not {@code null} and the end of the outermost rule is
|
||||
* reached, {@link Token#EOF} is added to the result set.</p>
|
||||
*
|
||||
* @param s the ATN state
|
||||
* @param stopState the ATN state to stop at. This can be a
|
||||
* {@link BlockEndState} to detect epsilon paths through a closure.
|
||||
* @param ctx the complete parser context, or {@code null} if the context
|
||||
* should be ignored
|
||||
*
|
||||
* @return The set of tokens that can follow {@code s} in the ATN in the
|
||||
* specified {@code ctx}.
|
||||
*/
|
||||
[return: NotNull]
|
||||
public virtual IntervalSet Look(ATNState s, ATNState stopState, RuleContext ctx)
|
||||
{
|
||||
IntervalSet r = new IntervalSet();
|
||||
bool seeThruPreds = true;
|
||||
PredictionContext lookContext = ctx != null ? PredictionContext.FromRuleContext(s.atn, ctx) : null;
|
||||
Look_(s, stopState, lookContext, r, new HashSet<ATNConfig>(), new BitSet(), seeThruPreds, true);
|
||||
return r;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute set of tokens that can follow {@code s} in the ATN in the
|
||||
* specified {@code ctx}.
|
||||
*
|
||||
* <p>If {@code ctx} is {@code null} and {@code stopState} or the end of the
|
||||
* rule containing {@code s} is reached, {@link Token#EPSILON} is added to
|
||||
* the result set. If {@code ctx} is not {@code null} and {@code addEOF} is
|
||||
* {@code true} and {@code stopState} or the end of the outermost rule is
|
||||
* reached, {@link Token#EOF} is added to the result set.</p>
|
||||
*
|
||||
* @param s the ATN state.
|
||||
* @param stopState the ATN state to stop at. This can be a
|
||||
* {@link BlockEndState} to detect epsilon paths through a closure.
|
||||
* @param ctx The outer context, or {@code null} if the outer context should
|
||||
* not be used.
|
||||
* @param look The result lookahead set.
|
||||
* @param lookBusy A set used for preventing epsilon closures in the ATN
|
||||
* from causing a stack overflow. Outside code should pass
|
||||
* {@code new HashSet<ATNConfig>} for this argument.
|
||||
* @param calledRuleStack A set used for preventing left recursion in the
|
||||
* ATN from causing a stack overflow. Outside code should pass
|
||||
* {@code new BitSet()} for this argument.
|
||||
* @param seeThruPreds {@code true} to true semantic predicates as
|
||||
* implicitly {@code true} and "see through them", otherwise {@code false}
|
||||
* to treat semantic predicates as opaque and add {@link #HIT_PRED} to the
|
||||
* result if one is encountered.
|
||||
* @param addEOF Add {@link Token#EOF} to the result if the end of the
|
||||
* outermost context is reached. This parameter has no effect if {@code ctx}
|
||||
* is {@code null}.
|
||||
*/
|
||||
protected internal virtual void Look_(ATNState s, ATNState stopState, PredictionContext ctx, IntervalSet look, HashSet<ATNConfig> lookBusy, BitSet calledRuleStack, bool seeThruPreds, bool addEOF)
|
||||
{
|
||||
ATNConfig c = new ATNConfig(s, 0, ctx);
|
||||
if (!lookBusy.Add(c))
|
||||
{
|
||||
return;
|
||||
}
|
||||
if (s == stopState)
|
||||
{
|
||||
if (ctx == null)
|
||||
{
|
||||
look.Add(TokenConstants.EPSILON);
|
||||
return;
|
||||
}
|
||||
else if (ctx.IsEmpty && addEOF)
|
||||
{
|
||||
look.Add(TokenConstants.EOF);
|
||||
return;
|
||||
}
|
||||
}
|
||||
if (s is RuleStopState)
|
||||
{
|
||||
if (ctx == null)
|
||||
{
|
||||
look.Add(TokenConstants.EPSILON);
|
||||
return;
|
||||
}
|
||||
else if (ctx.IsEmpty && addEOF)
|
||||
{
|
||||
look.Add(TokenConstants.EOF);
|
||||
return;
|
||||
}
|
||||
if (ctx != PredictionContext.EMPTY)
|
||||
{
|
||||
bool removed = calledRuleStack.Get(s.ruleIndex);
|
||||
try
|
||||
{
|
||||
calledRuleStack.Clear(s.ruleIndex);
|
||||
for (int i = 0; i < ctx.Size; i++)
|
||||
{
|
||||
ATNState returnState = atn.states[ctx.GetReturnState(i)];
|
||||
Look_(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
||||
}
|
||||
}
|
||||
finally
|
||||
{
|
||||
if (removed)
|
||||
{
|
||||
calledRuleStack.Set(s.ruleIndex);
|
||||
}
|
||||
}
|
||||
return;
|
||||
}
|
||||
}
|
||||
int n = s.NumberOfTransitions;
|
||||
for (int i_1 = 0; i_1 < n; i_1++)
|
||||
{
|
||||
Transition t = s.Transition(i_1);
|
||||
if (t.GetType() == typeof(RuleTransition))
|
||||
{
|
||||
RuleTransition ruleTransition = (RuleTransition)t;
|
||||
if (calledRuleStack.Get(ruleTransition.ruleIndex))
|
||||
{
|
||||
continue;
|
||||
}
|
||||
PredictionContext newContext = SingletonPredictionContext.Create(ctx, ruleTransition.followState.stateNumber);
|
||||
try
|
||||
{
|
||||
calledRuleStack.Set(ruleTransition.target.ruleIndex);
|
||||
Look_(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
||||
}
|
||||
finally
|
||||
{
|
||||
calledRuleStack.Clear(ruleTransition.target.ruleIndex);
|
||||
}
|
||||
}
|
||||
else if (t is AbstractPredicateTransition)
|
||||
{
|
||||
if (seeThruPreds)
|
||||
{
|
||||
Look_(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
||||
}
|
||||
else
|
||||
{
|
||||
look.Add(HitPred);
|
||||
}
|
||||
}
|
||||
else if (t.IsEpsilon)
|
||||
{
|
||||
Look_(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
|
||||
}
|
||||
else if (t.GetType() == typeof(WildcardTransition))
|
||||
{
|
||||
look.AddAll(IntervalSet.Of(TokenConstants.MinUserTokenType, atn.maxTokenType));
|
||||
}
|
||||
else
|
||||
{
|
||||
IntervalSet set = t.Label;
|
||||
if (set != null)
|
||||
{
|
||||
if (t is NotSetTransition)
|
||||
{
|
||||
set = set.Complement(IntervalSet.Of(TokenConstants.MinUserTokenType, atn.maxTokenType));
|
||||
}
|
||||
look.AddAll(set);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue