不同仓库的合并请求带刷新 #21

Closed
jasder wants to merge 30 commits from forgetest1/antlr:12231 into fix-3216
106 changed files with 2995 additions and 4954 deletions
Showing only changes of commit ed2631b2d0 - Show all commits

66
.circleci/config.yml Normal file
View File

@ -0,0 +1,66 @@
version: 2.1
jobs:
test_tool_and_runtime_java:
docker:
- image: cimg/openjdk:8.0
steps:
- checkout
- run:
name: build tool
command: mvn -B -V -DskipTests=true -Dmaven.javadoc.skip=true install
- run:
name: test runtime
command: |
cd runtime-testsuite
mvn -q -Dparallel=methods -DthreadCount=4 -Dtest=java.* test
cd ..
- run:
name: test tool
command: |
cd tool-testsuite
mvn -q -Dparallel=methods -DthreadCount=4 test
cd ..
test_runtime:
parameters:
test-group:
description: The section
type: string
default: ALL
target:
description: The target
type: string
default: java
docker:
- image: cimg/openjdk:8.0
environment:
TARGET: << parameters.target >>
GROUP: << parameters.test-group >>
steps:
- checkout
- run:
name: Install << parameters.target >> pre-requisites
command: |
f=".circleci/scripts/install-linux-<< parameters.target >>.sh"; ! [ -x "$f" ] || "$f"
- run:
name: Build ANTLR4 tool
command: mvn -B -V -DskipTests=true -Dmaven.javadoc.skip=true install
- run:
name: Test << parameters.target >> runtime
command: |
.circleci/scripts/run-tests-<< parameters.target >>.sh
workflows:
build:
jobs:
- test_tool_and_runtime_java
- test_runtime:
matrix:
parameters:
target: [ dart, go, python2, python3, javascript ]
- test_runtime:
matrix:
parameters:
# target: [ cpp, dotnet, swift ]
target: [ cpp, dotnet ]
test-group: [ LEXER, PARSER, RECURSION ]

View File

@ -0,0 +1,35 @@
#!/bin/bash
set -euo pipefail
echo "installing cpp SDK..."
sudo apt-get update -y
sudo apt-get install -y clang
sudo apt-get install -y cmake
sudo apt-get install -y pkg-config
sudo apt-get install -y uuid-dev
echo "done installing cpp SDK"
clang++ --version
cmake --version
echo "building cpp runtime..."
pushd "runtime/Cpp/"
echo $PWD
rc=0
if [ $rc == 0 ]; then
cmake . -DCMAKE_BUILD_TYPE=release
rc=$?
fi
if [ $rc == 0 ]; then
make -j 8
rc=$?
fi
popd
echo "done building cpp runtime"

View File

@ -0,0 +1,14 @@
#!/bin/bash
set -euo pipefail
echo "installing dart SDK..."
sudo apt-get update
sudo apt-get install apt-transport-https
sudo sh -c 'wget -qO- https://dl-ssl.google.com/linux/linux_signing_key.pub | apt-key add -'
sudo sh -c 'wget -qO- https://storage.googleapis.com/download.dartlang.org/linux/debian/dart_stable.list > /etc/apt/sources.list.d/dart_stable.list'
sudo apt-get update
sudo apt-get install dart=2.8.4-1
export PATH="$PATH:/usr/lib/dart/bin"
echo "done installing dart SDK"
sudo apt-get install -f

View File

@ -0,0 +1,19 @@
#!/bin/bash
set -euo pipefail
echo "installing .Net SDK..."
wget https://packages.microsoft.com/config/ubuntu/16.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
sudo dpkg -i packages-microsoft-prod.deb
sudo apt-get update; \
sudo apt-get install -y apt-transport-https && \
sudo apt-get update && \
sudo apt-get install -y dotnet-sdk-3.1
export PATH=$PATH:~/.dotnet
echo "done installing .Net SDK"
# we need to build the runtime before test run, since we used "--no-dependencies"
# when we call dotnet cli for restore and build, in order to speed up
echo "building runtime..."
dotnet build -c Release -f netstandard2.0 runtime/CSharp/Antlr4.csproj
echo "done building runtime"

View File

@ -0,0 +1,9 @@
#!/bin/bash
set -euo pipefail
echo "installing go SDK..."
sudo apt update
sudo apt install golang-go
go version
echo "done installing go SDK"

View File

@ -0,0 +1,17 @@
#!/bin/bash
set -euo pipefail
# use v14 and check
echo "installing nodejs..."
curl -sL https://deb.nodesource.com/setup_14.x | sudo -E bash -
sudo apt-get install -y nodejs
echo node version: $(node --version)
echo "done installing nodejs..."
echo "packaging javascript runtime..."
pushd runtime/JavaScript
sudo npm install
sudo npm link
popd
echo "done packaging javascript runtime"

View File

@ -0,0 +1,24 @@
#!/bin/bash
echo "before patching"
ls -all /lib/x86_64-linux-gnu/ | grep libcurl
# This would fix missing CURL_OPENSSL_3
# use a dedicated temp dir in the user space
mkdir ~/libcurl3
cd ~/libcurl3
# fetch latest libcurl3
wget http://archive.ubuntu.com/ubuntu/pool/main/c/curl/libcurl3_7.47.0-1ubuntu2_amd64.deb
# extract data.tar.xz
ar x libcurl3* data.tar.xz
# extract all from data.tar.xz
tar xf data.tar.xz
# copy libcurl.so.3 where required
sudo cp -L ~/libcurl3/usr/lib/x86_64-linux-gnu/libcurl.so.4.4.0 /lib/x86_64-linux-gnu/libcurl.so.4.4.0
sudo ln -sf libcurl.so.4.4.0 /lib/x86_64-linux-gnu/libcurl.so.4
cd ..
# drop dedicated temp dir
sudo rm -rf ~/libcurl3
echo "after patching"
ls -all /lib/x86_64-linux-gnu/ | grep libcurl

View File

@ -0,0 +1,11 @@
#!/bin/bash
set -euo pipefail
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF
sudo apt-get update -qq
php -v
git clone https://github.com/antlr/antlr-php-runtime.git
mvn install -DskipTests=true -Dmaven.javadoc.skip=true -B -V

View File

@ -0,0 +1,8 @@
#!/bin/bash
set -euo pipefail
echo "installing python 2..."
sudo apt-get update -y
sudo apt-get install python2
echo "done installing python 2"

View File

@ -0,0 +1,8 @@
#!/bin/bash
set -euo pipefail
echo "installing python 3..."
sudo apt-get update -y
sudo apt-get install python3
echo "done installing python 3"

View File

@ -0,0 +1,36 @@
#!/bin/bash
set -euo pipefail
echo "installing swift SDK..."
.circleci/scripts/install-linux-libcurl3.sh
# see https://tecadmin.net/install-swift-ubuntu-1604-xenial/
sudo apt-get update -y
sudo apt-get install clang libicu-dev
sudo apt-get install libpython2.7 libpython2.7-dev
export SWIFT_VERSION=swift-5.3.2
echo "installing gpg key..."
wget -q -O - https://swift.org/keys/all-keys.asc | sudo gpg --import -
echo "downloading SDK gpg key..."
SWIFT_SDK=https://swift.org/builds/$SWIFT_VERSION-release/ubuntu1604/$SWIFT_VERSION-RELEASE/$SWIFT_VERSION-RELEASE-ubuntu16.04.tar.gz
echo $SWIFT_SDK
wget -q $SWIFT_SDK
sudo tar xzf $SWIFT_VERSION-RELEASE-ubuntu16.04.tar.gz
mv $SWIFT_VERSION-RELEASE-ubuntu16.04 $PWD/swift
export SWIFT_HOME=$PWD/swift/$SWIFT_VERSION-RELEASE-ubuntu16.04/usr/bin/
export PATH=$PWD/swift/usr/bin:$PATH
# This would fix a know linker issue mentioned in: # https://bugs.swift.org/browse/SR-2299
sudo ln -sf ld.gold /usr/bin/ld
# This would fix missing libtinfo.so.5
sudo apt install libncurses5
echo "done installing swift SDK..."
# check swift
swift --version
swift build --version

View File

@ -0,0 +1,17 @@
#!/bin/bash
set -euo pipefail
pushd runtime-testsuite
echo "running maven tests..."
if [ $GROUP == "LEXER" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest=cpp.* test
elif [ $GROUP == "PARSER" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.ParserTests" -Dtest=cpp.* test
elif [ $GROUP == "RECURSION" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LeftRecursionTests" -Dtest=cpp.* test
else
mvn -q -Dtest=cpp.* test
fi
popd

View File

@ -0,0 +1,11 @@
#!/bin/bash
set -euo pipefail
dart --version
pushd runtime-testsuite
echo "running maven tests..."
# mvn -q -Dparallel=classes -DthreadCount=4 -Dtest=dart.* test
mvn -q -Dtest=dart.* test
popd

View File

@ -0,0 +1,16 @@
#!/bin/bash
set -euo pipefail
pushd runtime-testsuite/
echo "running maven tests..."
if [ $GROUP == "LEXER" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest=csharp.* test
elif [ $GROUP == "PARSER" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.ParserTests" -Dtest=csharp.* test
elif [ $GROUP == "RECURSION" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LeftRecursionTests" -Dtest=csharp.* test
else
mvn -q -Dtest=csharp.* test
fi
popd

View File

@ -0,0 +1,10 @@
#!/bin/bash
set -euo pipefail
go version
pushd runtime-testsuite
echo "running maven tests..."
mvn -q -Dparallel=methods -DthreadCount=4 -Dtest=go.* test
popd

View File

@ -0,0 +1,8 @@
#!/bin/bash
set -euo pipefail
pushd runtime-testsuite
echo "running maven tests..."
mvn -q -Dtest=javascript.* test
popd

View File

@ -0,0 +1,9 @@
#!/bin/bash
set -euo pipefail
php_path=$(which php)
composer install -d ../runtime/PHP
mvn -q -DPHP_PATH="${php_path}" -Dparallel=methods -DthreadCount=4 -Dtest=php.* test

View File

@ -0,0 +1,24 @@
#!/bin/bash
set -euo pipefail
python2 --version
pushd runtime/Python2/tests
echo "running native tests..."
python2 run.py
rc=$?
if [ $rc != 0 ]; then
echo "failed running native tests"
fi
popd
if [ $rc == 0 ]; then
pushd runtime-testsuite
echo "running maven tests..."
mvn -q -Dtest=python2.* test
rc=$?
popd
fi
# return $rc

View File

@ -0,0 +1,24 @@
#!/bin/bash
set -euo pipefail
python3 --version
pushd runtime/Python3/tests
echo "running native tests..."
python3 run.py
rc=$?
if [ $rc != 0 ]; then
echo "failed running native tests"
fi
popd
if [ $rc == 0 ]; then
pushd runtime-testsuite
echo "running maven tests..."
mvn -q -Dtest=python3.* test
rc=$?
popd
fi
# return $rc

View File

@ -0,0 +1,27 @@
#!/bin/bash
set -euo pipefail
pushd runtime/Swift
echo "running native tests..."
./boot.py --test
rc=$?
if [ $rc != 0 ]; then
echo "failed running native tests"
fi
popd
if [ $rc == 0 ]; then
pushd runtime-testsuite
echo "running maven tests..."
if [ $GROUP == "LEXER" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest=swift.* test
elif [ $GROUP == "PARSER" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.ParserTests" -Dtest=swift.* test
elif [ $GROUP == "RECURSION" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LeftRecursionTests" -Dtest=swift.* test
else
mvn -q -Dtest=swift.* test
fi
popd
fi

1112
.github/scripts/install-dotnet-on-osx.sh vendored Executable file

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,30 @@
Sharing the trouble of getting github action runners to work on a mac coming straight out of the factory, running Big Sur
XCode (you need XCode to build the Swift runtime):
- install XCode from the Mac App Store
- Launch it, this will force installation of components
- Go to Preferences -> Locations and select XCode as Command Line Tools
Brew (you need Brew to install maven):
- get the script from https://brew.sh
- once installed, run the following:
echo 'eval $(/opt/homebrew/bin/brew shellenv)' >> /Users/{user-account}/.zprofile
eval $(/opt/homebrew/bin/brew shellenv)
(you need to repeat these last steps for each user account)
Maven (supposedly installed by the github workflow, but it's convenient to have a global install for troubleshooting):
- brew install maven
JDK (we need a specific JDK):
- download openjdk8 from Oracle (later versions break the build due to some packages having disappeared)
- install it -> this will mess up your JAVA_HOME completely, pointing to /Library/Internet...
- fix the JAVA_HOME mess as follows:
sudo rm -fr /Library/Internet\ Plug-Ins/JavaAppletPlugin.plugin
sudo rm -fr /Library/PreferencePanes/JavaControlPanel.prefpane
C++:
- brew install cmake
C#:
- .github/scripts/install-dotnet-on-osx.sh
(you need to repeat this step for each user account)

15
.github/scripts/run-tests-cpp.sh vendored Executable file
View File

@ -0,0 +1,15 @@
#!/bin/bash
set -euo pipefail
cd runtime-testsuite/
if [ $GROUP == "LEXER" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest=cpp.* test
elif [ $GROUP == "PARSER" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.ParserTests" -Dtest=cpp.* test
elif [ $GROUP == "RECURSION" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LeftRecursionTests" -Dtest=cpp.* test
else
mvn -q -Dtest=cpp.* test
fi

23
.github/scripts/run-tests-dotnet.sh vendored Executable file
View File

@ -0,0 +1,23 @@
#!/bin/bash
set -euo pipefail
export PATH=$PATH:~/.dotnet
# we need to build the runtime before test run, since we used "--no-dependencies"
# when we call dotnet cli for restore and build, in order to speed up
dotnet build -c Release -f netstandard2.0 runtime/CSharp/Antlr4.csproj
# run tests
cd runtime-testsuite/
if [ $GROUP == "LEXER" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest=csharp.* test
elif [ $GROUP == "PARSER" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.ParserTests" -Dtest=csharp.* test
elif [ $GROUP == "RECURSION" ]; then
mvn -q -Dgroups="org.antlr.v4.test.runtime.category.LeftRecursionTests" -Dtest=csharp.* test
else
mvn -q -Dtest=csharp.* test
fi

50
.github/scripts/run-tests-swift.sh vendored Executable file
View File

@ -0,0 +1,50 @@
#!/bin/bash
set -euo pipefail
# linux specific setup, those setup have to be
# here since environment variables doesn't pass
# across scripts
if [ $RUNNER_OS == "Linux" ]; then
export SWIFT_VERSION=swift-5.0.1
export SWIFT_HOME=$(pwd)/swift/$SWIFT_VERSION-RELEASE-ubuntu16.04/usr/bin/
export PATH=$SWIFT_HOME:$PATH
# download swift
mkdir swift
curl https://swift.org/builds/$SWIFT_VERSION-release/ubuntu1604/$SWIFT_VERSION-RELEASE/$SWIFT_VERSION-RELEASE-ubuntu16.04.tar.gz -s | tar xz -C swift &> /dev/null
fi
if [ -z "${JAVA_HOME}" ]
then
export JAVA_HOME="$(java -XshowSettings:properties -version 2>&1 |
grep 'java\.home' | awk '{ print $3 }')"
fi
echo "export JAVA_HOME=$JAVA_HOME"
# check swift
swift --version
swift build --version
# run swift tests
pushd runtime/Swift
./boot.py --test
rc=$?
popd
if [ $rc == 0 ]; then
# run java tests
cd runtime-testsuite/
if [ $GROUP == "LEXER" ]; then
mvn -e -q -Dgroups="org.antlr.v4.test.runtime.category.LexerTests" -Dtest="swift.*" test
elif [ $GROUP == "PARSER" ]; then
mvn -e -q -Dgroups="org.antlr.v4.test.runtime.category.ParserTests" -Dtest="swift.*" test
elif [ $GROUP == "RECURSION" ]; then
mvn -e -q -Dgroups="org.antlr.v4.test.runtime.category.LeftRecursionTests" -Dtest="swift.*" test
else
mvn -e -q -Dtest=swift.* test
fi
rc=$?
cat target/surefire-reports/*.dumpstream || true
fi
exit $rc

34
.github/workflows/macosx.yml vendored Normal file
View File

@ -0,0 +1,34 @@
name: MacOSX
on:
push:
branches: [ master ]
pull_request:
branches: [ master ]
jobs:
build:
runs-on: [self-hosted, macOS, x64]
strategy:
fail-fast: false
matrix:
# TARGET: [swift, cpp, dotnet] disabling dotnet which is unstable on M1
TARGET: [swift, cpp]
GROUP: [LEXER, PARSER, RECURSION]
steps:
- uses: actions/checkout@v2
- name: Set up JDK 1.8
uses: actions/setup-java@v1
with:
java-version: 1.8
- name: Set up Maven
uses: stCarolas/setup-maven@v4
with:
maven-version: 3.5.4
- name: Build tool with Maven
run: mvn install -DskipTests=true -Dmaven.javadoc.skip=true -B -V
- name: Test with Maven
run: arch -x86_64 .github/scripts/run-tests-${{ matrix.TARGET }}.sh
env:
TARGET: ${{ matrix.TARGET }}
GROUP: ${{ matrix.GROUP }}

1
.gitignore vendored
View File

@ -100,3 +100,4 @@ javac-services.0.log.lck
# Don't ignore python tests
!runtime/Python3/test/
Antlr4.sln

View File

@ -12,105 +12,12 @@ cache:
- $HOME/Library/Caches/Homebrew
stages:
- smoke-test
- main-test
# - smoke-test
# - main-test
- extended-test
matrix:
include:
- os: linux
dist: focal
compiler: clang
jdk: openjdk11
env:
- TARGET=cpp
- CXX=g++-10
- GROUP=LEXER
stage: main-test
addons:
apt:
sources:
- sourceline: 'deb http://apt.llvm.org/focal/ llvm-toolchain-focal-10 main'
packages:
- g++-10
- uuid-dev
- clang-10
- os: linux
dist: focal
compiler: clang
jdk: openjdk11
env:
- TARGET=cpp
- CXX=g++-10
- GROUP=PARSER
stage: main-test
addons:
apt:
sources:
- sourceline: 'deb http://apt.llvm.org/focal/ llvm-toolchain-focal-10 main'
packages:
- g++-10
- uuid-dev
- clang-10
- os: linux
dist: focal
compiler: clang
jdk: openjdk11
env:
- TARGET=cpp
- CXX=g++-10
- GROUP=RECURSION
stage: main-test
addons:
apt:
sources:
- sourceline: 'deb http://apt.llvm.org/focal/ llvm-toolchain-focal-10 main'
packages:
- g++-10
- uuid-dev
- clang-10
- os: osx
compiler: clang
osx_image: xcode10.2
env:
- TARGET=cpp
- GROUP=LEXER
stage: extended-test
- os: osx
compiler: clang
osx_image: xcode10.2
env:
- TARGET=cpp
- GROUP=PARSER
stage: extended-test
- os: osx
compiler: clang
osx_image: xcode10.2
env:
- TARGET=cpp
- GROUP=RECURSION
stage: extended-test
- os: osx
compiler: clang
osx_image: xcode10.2
env:
- TARGET=swift
- GROUP=LEXER
stage: main-test
- os: osx
compiler: clang
osx_image: xcode10.2
env:
- TARGET=swift
- GROUP=PARSER
stage: main-test
- os: osx
compiler: clang
osx_image: xcode10.2
env:
- TARGET=swift
- GROUP=RECURSION
stage: main-test
- os: linux
dist: xenial
compiler: clang
@ -118,95 +25,6 @@ matrix:
- TARGET=swift
- GROUP=ALL
stage: extended-test
- os: osx
osx_image: xcode10.2
env:
- TARGET=dotnet
- GROUP=LEXER
stage: extended-test
- os: osx
osx_image: xcode10.2
env:
- TARGET=dotnet
- GROUP=PARSER
stage: extended-test
- os: osx
osx_image: xcode10.2
env:
- TARGET=dotnet
- GROUP=RECURSION
stage: extended-test
- os: linux
dist: trusty
jdk: openjdk7
env: TARGET=java
stage: extended-test
- os: linux
jdk: openjdk8
env: TARGET=java
stage: smoke-test
- os: linux
jdk: openjdk8
env:
- TARGET=dotnet
- GROUP=MAIN
stage: main-test
- os: linux
jdk: openjdk8
env: TARGET=dart
stage: main-test
- os: linux
language: php
php:
- 7.2
jdk: openjdk8
env: TARGET=php
stage: main-test
- os: linux
jdk: openjdk8
env:
- TARGET=dotnet
- GROUP=LEXER
stage: extended-test
- os: linux
jdk: openjdk8
env:
- TARGET=dotnet
- GROUP=PARSER
stage: extended-test
- os: linux
jdk: openjdk8
env:
- TARGET=dotnet
- GROUP=RECURSION
stage: extended-test
- os: linux
jdk: openjdk8
env: TARGET=python2
stage: main-test
- os: linux
jdk: openjdk8
env: TARGET=python3
addons:
apt:
sources:
- deadsnakes # source required so it finds the package definition below
packages:
- python3.7
stage: main-test
- os: linux
dist: trusty
jdk: openjdk8
env: TARGET=javascript
stage: main-test
before_install:
- nvm install 14 # otherwise it runs by default on node 8
- f="./.travis/before-install-linux-javascript.sh"; ! [ -x "$f" ] || "$f"
- os: linux
dist: trusty
jdk: openjdk8
env: TARGET=go
stage: main-test
before_install:
- f="./.travis/before-install-$TRAVIS_OS_NAME-$TARGET.sh"; ! [ -x "$f" ] || "$f"

View File

@ -1,6 +1,14 @@
# ANTLR v4
[![Build Travis-CI Status](https://travis-ci.org/antlr/antlr4.svg?branch=master)](https://travis-ci.org/antlr/antlr4) [![Build AppVeyor Status](https://ci.appveyor.com/api/projects/status/5acpbx1pg7bhgh8v/branch/master?svg=true)](https://ci.appveyor.com/project/parrt/antlr4) [![Java 7+](https://img.shields.io/badge/java-7+-4c7e9f.svg)](http://java.oracle.com) [![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://raw.githubusercontent.com/antlr/antlr4/master/LICENSE.txt)
[![Java 7+](https://img.shields.io/badge/java-7+-4c7e9f.svg)](http://java.oracle.com)
[![License](https://img.shields.io/badge/license-BSD-blue.svg)](https://raw.githubusercontent.com/antlr/antlr4/master/LICENSE.txt)
**Build status**
[![Github CI Build Status (MacOSX)](https://img.shields.io/github/workflow/status/antlr/antlr4/MacOSX?label=MacOSX)](https://github.com/antlr/antlr4/actions)
[![AppVeyor CI Build Status (Windows)](https://img.shields.io/appveyor/build/parrt/antlr4?label=Windows)](https://ci.appveyor.com/project/parrt/antlr4)
[![Circle CI Build Status (Linux)](https://img.shields.io/circleci/build/gh/antlr/antlr4/master?label=Linux)](https://app.circleci.com/pipelines/github/antlr/antlr4)
[![Travis-CI Build Status (Swift-Linux)](https://img.shields.io/travis/antlr/antlr4.svg?label=Linux-Swift&branch=master)](https://travis-ci.com/github/antlr/antlr4)
**ANTLR** (ANother Tool for Language Recognition) is a powerful parser generator for reading, processing, executing, or translating structured text or binary files. It's widely used to build languages, tools, and frameworks. From a grammar, ANTLR generates a parser that can build parse trees and also generates a listener interface (or visitor) that makes it easy to respond to the recognition of phrases of interest.
@ -13,8 +21,8 @@
* [Terence Parr](http://www.cs.usfca.edu/~parrt/), parrt@cs.usfca.edu
ANTLR project lead and supreme dictator for life
[University of San Francisco](http://www.usfca.edu/)
* [Sam Harwell](http://tunnelvisionlabs.com/) (Tool co-author, Java and C# target)
* Eric Vergnaud (Javascript, Python2, Python3 targets and significant work on C# target)
* [Sam Harwell](http://tunnelvisionlabs.com/) (Tool co-author, Java and original C# target)
* [Eric Vergnaud](https://github.com/ericvergnaud) (Javascript, Python2, Python3 targets and maintenance of C# target)
* [Peter Boyer](https://github.com/pboyer) (Go target)
* [Mike Lischke](http://www.soft-gems.net/) (C++ completed target)
* Dan McLaughlin (C++ initial target)

View File

@ -285,3 +285,4 @@ YYYY/MM/DD, github id, Full name, email
2020/12/01, maxence-lefebvre, Maxence Lefebvre, maxence-lefebvre@users.noreply.github.com
2020/12/03, electrum, David Phillips, david@acz.org
2020/12/03, bigerl, Alexander Bigerl, bigerl@mail.upb.de
2021/01/25, l215884529, Qiheng Liu, 13607681+l215884529@users.noreply.github.com

View File

@ -461,6 +461,8 @@ popd
### Dart
Install Dart SDK from https://dart.dev/get-dart
Push to pub.dev
```bash

View File

@ -11,9 +11,7 @@ import org.antlr.v4.runtime.misc.Pair;
import org.antlr.v4.runtime.misc.Utils;
import org.antlr.v4.tool.ANTLRMessage;
import org.antlr.v4.tool.DefaultToolListener;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
import org.junit.*;
import org.junit.rules.TestRule;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
@ -44,6 +42,7 @@ import static org.junit.Assume.assumeFalse;
* @since 4.6.
*/
public abstract class BaseRuntimeTest {
public final static String[] Targets = {
"Cpp",
"CSharp",
@ -56,18 +55,43 @@ public abstract class BaseRuntimeTest {
"Swift"
};
static {
// Add heartbeat thread to gen minimal output for travis, appveyor to
// avoid timeout.
@BeforeClass
public static void startHeartbeatToAvoidTimeout() {
if (isTravisCI() || isAppVeyorCI())
startHeartbeat();
}
@AfterClass
public static void stopHeartbeat() {
heartbeat = false;
}
private static boolean isAppVeyorCI() {
// see https://www.appveyor.com/docs/environment-variables/
String s = System.getenv("APPVEYOR");
return s!=null && "true".equals(s.toLowerCase());
}
private static boolean isTravisCI() {
// see https://docs.travis-ci.com/user/environment-variables/#default-environment-variables
String s = System.getenv("TRAVIS");
return s!=null && "true".equals(s.toLowerCase());
}
static boolean heartbeat = false;
private static void startHeartbeat() {
// Add heartbeat thread to gen minimal output for travis, appveyor to avoid timeout.
Thread t = new Thread("heartbeat") {
@Override
public void run() {
while (true) {
heartbeat = true;
while (heartbeat) {
System.out.print('.');
try {
//noinspection BusyWait
Thread.sleep(5000);
}
catch (Exception e) {
} catch (Exception e) {
e.printStackTrace();
}
}
@ -87,11 +111,6 @@ public abstract class BaseRuntimeTest {
this.delegate = delegate;
}
public static void mkdir(String dir) {
File f = new File(dir);
f.mkdirs();
}
@Before
public void setUp() throws Exception {
// From http://junit.sourceforge.net/javadoc/org/junit/Assume.html
@ -118,22 +137,24 @@ public abstract class BaseRuntimeTest {
@Test
public void testOne() throws Exception {
// System.out.println(descriptor.getTestName());
// System.out.println(delegate.getTmpDir());
if (descriptor.ignore(descriptor.getTarget()) ) {
System.out.println("Ignore " + descriptor);
return;
}
delegate.beforeTest(descriptor);
if (descriptor.getTestType().contains("Parser") ) {
testParser(descriptor);
}
else {
testLexer(descriptor);
}
delegate.afterTest(descriptor);
}
public void testParser(RuntimeTestDescriptor descriptor) throws Exception {
mkdir(delegate.getTmpDir());
RuntimeTestUtils.mkdir(delegate.getTempParserDirPath());
Pair<String, String> pair = descriptor.getGrammar();
@ -150,7 +171,7 @@ public abstract class BaseRuntimeTest {
g.registerRenderer(String.class, new StringRenderer());
g.importTemplates(targetTemplates);
ST grammarST = new ST(g, spair.b);
writeFile(delegate.getTmpDir(), spair.a+".g4", grammarST.render());
writeFile(delegate.getTempParserDirPath(), spair.a+".g4", grammarST.render());
}
}
@ -175,7 +196,7 @@ public abstract class BaseRuntimeTest {
}
public void testLexer(RuntimeTestDescriptor descriptor) throws Exception {
mkdir(delegate.getTmpDir());
RuntimeTestUtils.mkdir(delegate.getTempParserDirPath());
Pair<String, String> pair = descriptor.getGrammar();
@ -192,7 +213,7 @@ public abstract class BaseRuntimeTest {
g.registerRenderer(String.class, new StringRenderer());
g.importTemplates(targetTemplates);
ST grammarST = new ST(g, spair.b);
writeFile(delegate.getTmpDir(), spair.a+".g4", grammarST.render());
writeFile(delegate.getTempParserDirPath(), spair.a+".g4", grammarST.render());
}
}
@ -216,7 +237,7 @@ public abstract class BaseRuntimeTest {
boolean defaultListener,
String... extraOptions)
{
mkdir(workdir);
RuntimeTestUtils.mkdir(workdir);
writeFile(workdir, grammarFileName, grammarStr);
return antlrOnString(workdir, targetName, grammarFileName, defaultListener, extraOptions);
}

View File

@ -0,0 +1,215 @@
package org.antlr.v4.test.runtime;
import org.antlr.v4.Tool;
import org.antlr.v4.automata.LexerATNFactory;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.ATNSerializer;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.LexerGrammar;
import org.junit.rules.TestRule;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import java.io.File;
import java.util.Locale;
import java.util.logging.Logger;
import static org.junit.Assert.assertEquals;
public abstract class BaseRuntimeTestSupport implements RuntimeTestSupport {
// -J-Dorg.antlr.v4.test.BaseTest.level=FINE
protected static final Logger logger = Logger.getLogger(BaseRuntimeTestSupport.class.getName());
public static final String NEW_LINE = System.getProperty("line.separator");
public static final String PATH_SEP = System.getProperty("path.separator");
private File tempTestDir = null;
/** If error during parser execution, store stderr here; can't return
* stdout and stderr. This doesn't trap errors from running antlr.
*/
private String parseErrors;
/** Errors found while running antlr */
private StringBuilder antlrToolErrors;
@org.junit.Rule
public final TestRule testWatcher = new TestWatcher() {
@Override
protected void succeeded(Description description) {
testSucceeded(description);
}
};
protected void testSucceeded(Description description) {
// remove tmpdir if no error.
eraseTempDir();
}
@Override
public File getTempParserDir() {
return getTempTestDir();
}
@Override
public String getTempParserDirPath() {
return getTempParserDir() == null ? null : getTempParserDir().getAbsolutePath();
}
@Override
public final File getTempTestDir() {
return tempTestDir;
}
@Override
public final String getTempDirPath() {
return tempTestDir ==null ? null : tempTestDir.getAbsolutePath();
}
public void setParseErrors(String errors) {
this.parseErrors = errors;
}
public String getParseErrors() {
return parseErrors;
}
public String getANTLRToolErrors() {
if ( antlrToolErrors.length()==0 ) {
return null;
}
return antlrToolErrors.toString();
}
protected abstract String getPropertyPrefix();
@Override
public void testSetUp() throws Exception {
createTempDir();
antlrToolErrors = new StringBuilder();
}
private void createTempDir() {
// new output dir for each test
String propName = getPropertyPrefix() + "-test-dir";
String prop = System.getProperty(propName);
if(prop!=null && prop.length()>0) {
tempTestDir = new File(prop);
}
else {
String dirName = getClass().getSimpleName() + "-" + Thread.currentThread().getName() + "-" + System.currentTimeMillis();
tempTestDir = new File(System.getProperty("java.io.tmpdir"), dirName);
}
}
@Override
public void testTearDown() throws Exception {
}
@Override
public void beforeTest(RuntimeTestDescriptor descriptor) {
}
@Override
public void afterTest(RuntimeTestDescriptor descriptor) {
}
public void eraseTempDir() {
if(shouldEraseTempDir()) {
eraseDirectory(getTempTestDir());
}
}
protected boolean shouldEraseTempDir() {
if(tempTestDir == null)
return false;
String propName = getPropertyPrefix() + "-erase-test-dir";
String prop = System.getProperty(propName);
if (prop != null && prop.length() > 0)
return Boolean.getBoolean(prop);
else
return true;
}
public static void eraseDirectory(File dir) {
if ( dir.exists() ) {
eraseFilesInDir(dir);
dir.delete();
}
}
public static void eraseFilesInDir(File dir) {
String[] files = dir.list();
for(int i = 0; files!=null && i < files.length; i++) {
new File(dir,files[i]).delete();
}
}
private static String detectedOS;
public static String getOS() {
if (detectedOS == null) {
String os = System.getProperty("os.name", "generic").toLowerCase(Locale.ENGLISH);
if ((os.indexOf("mac") >= 0) || (os.indexOf("darwin") >= 0)) {
detectedOS = "mac";
}
else if (os.indexOf("win") >= 0) {
detectedOS = "windows";
}
else if (os.indexOf("nux") >= 0) {
detectedOS = "linux";
}
else {
detectedOS = "unknown";
}
}
return detectedOS;
}
public static boolean isWindows() {
return getOS().equalsIgnoreCase("windows");
}
protected ATN createATN(Grammar g, boolean useSerializer) {
if ( g.atn==null ) {
semanticProcess(g);
assertEquals(0, g.tool.getNumErrors());
ParserATNFactory f = g.isLexer() ? new LexerATNFactory((LexerGrammar) g) : new ParserATNFactory(g);
g.atn = f.createATN();
assertEquals(0, g.tool.getNumErrors());
}
ATN atn = g.atn;
if ( useSerializer ) {
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
return new ATNDeserializer().deserialize(serialized);
}
return atn;
}
protected void semanticProcess(Grammar g) {
if ( g.ast!=null && !g.ast.hasErrors ) {
// System.out.println(g.ast.toStringTree());
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
for (Grammar imp : g.getImportedGrammars()) {
antlr.processNonCombinedGrammar(imp, false);
}
}
}
}
}

View File

@ -0,0 +1,91 @@
package org.antlr.v4.test.runtime;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.IntegerList;
import org.antlr.v4.runtime.misc.Interval;
public class MockIntTokenStream implements TokenStream {
public IntegerList types;
int p=0;
public MockIntTokenStream(IntegerList types) { this.types = types; }
@Override
public void consume() { p++; }
@Override
public int LA(int i) { return LT(i).getType(); }
@Override
public int mark() {
return index();
}
@Override
public int index() { return p; }
@Override
public void release(int marker) {
seek(marker);
}
@Override
public void seek(int index) {
p = index;
}
@Override
public int size() {
return types.size();
}
@Override
public String getSourceName() {
return UNKNOWN_SOURCE_NAME;
}
@Override
public Token LT(int i) {
CommonToken t;
int rawIndex = p + i - 1;
if ( rawIndex>=types.size() ) t = new CommonToken(Token.EOF);
else t = new CommonToken(types.get(rawIndex));
t.setTokenIndex(rawIndex);
return t;
}
@Override
public Token get(int i) {
return new org.antlr.v4.runtime.CommonToken(types.get(i));
}
@Override
public TokenSource getTokenSource() {
return null;
}
@Override
public String getText() {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(Interval interval) {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(RuleContext ctx) {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(Token start, Token stop) {
throw new UnsupportedOperationException("can't give strings");
}
}

View File

@ -6,6 +6,8 @@
package org.antlr.v4.test.runtime;
import java.io.File;
/** This interface describes functionality needed to execute a runtime test.
* Unfortunately the Base*Test.java files are big junk drawers. This is
* an attempt to make it more obvious what new target implementers have to
@ -14,13 +16,22 @@ package org.antlr.v4.test.runtime;
* @since 4.6
*/
public interface RuntimeTestSupport {
void testSetUp() throws Exception;
void testTearDown() throws Exception;
// dir containing grammar input and output
File getTempParserDir();
String getTempParserDirPath();
// dir containing test input and output
File getTempTestDir();
String getTempDirPath();
void eraseTempDir();
String getTmpDir();
void testSetUp() throws Exception;
void testTearDown() throws Exception;
void beforeTest(RuntimeTestDescriptor descriptor);
void afterTest(RuntimeTestDescriptor descriptor);
String getStdout();
String getParseErrors();
String getANTLRToolErrors();
@ -39,4 +50,5 @@ public interface RuntimeTestSupport {
String startRuleName,
String input,
boolean showDiagnosticErrors);
}

View File

@ -0,0 +1,89 @@
package org.antlr.v4.test.runtime;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntegerList;
import org.antlr.v4.tool.LexerGrammar;
import java.io.*;
import java.util.*;
public abstract class RuntimeTestUtils {
/** Sort a list */
public static <T extends Comparable<? super T>> List<T> sort(List<T> data) {
List<T> dup = new ArrayList<T>(data);
dup.addAll(data);
Collections.sort(dup);
return dup;
}
/** Return map sorted by key */
public static <K extends Comparable<? super K>,V> LinkedHashMap<K,V> sort(Map<K,V> data) {
LinkedHashMap<K,V> dup = new LinkedHashMap<K, V>();
List<K> keys = new ArrayList<K>(data.keySet());
Collections.sort(keys);
for (K k : keys) {
dup.put(k, data.get(k));
}
return dup;
}
public static List<String> getTokenTypes(LexerGrammar lg,
ATN atn,
CharStream input) {
LexerATNSimulator interp = new LexerATNSimulator(atn, new DFA[]{new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE))}, null);
List<String> tokenTypes = new ArrayList<String>();
int ttype;
boolean hitEOF = false;
do {
if ( hitEOF ) {
tokenTypes.add("EOF");
break;
}
int t = input.LA(1);
ttype = interp.match(input, Lexer.DEFAULT_MODE);
if ( ttype==Token.EOF ) {
tokenTypes.add("EOF");
}
else {
tokenTypes.add(lg.typeToTokenList.get(ttype));
}
if ( t== IntStream.EOF ) {
hitEOF = true;
}
} while ( ttype!=Token.EOF );
return tokenTypes;
}
public static IntegerList getTokenTypesViaATN(String input, LexerATNSimulator lexerATN) {
ANTLRInputStream in = new ANTLRInputStream(input);
IntegerList tokenTypes = new IntegerList();
int ttype;
do {
ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
tokenTypes.add(ttype);
} while ( ttype!= Token.EOF );
return tokenTypes;
}
public static void copyFile(File source, File dest) throws IOException {
InputStream is = new FileInputStream(source);
OutputStream os = new FileOutputStream(dest);
byte[] buf = new byte[4 << 10];
int l;
while ((l = is.read(buf)) > -1) {
os.write(buf, 0, l);
}
is.close();
os.close();
}
public static void mkdir(String dir) {
File f = new File(dir);
f.mkdirs();
}
}

View File

@ -10,10 +10,15 @@ public abstract class TestContext {
return "true".equals(String.valueOf(System.getenv("APPVEYOR")).toLowerCase());
}
public static boolean isCircleCI() {
return "true".equals(String.valueOf(System.getenv("CIRCLECI")).toLowerCase());
}
public static boolean isSupportedTarget(String target) {
if(isAppVeyorCI())
return !target.matches("Swift|Node");
else
return true;
}
}

View File

@ -5,64 +5,19 @@
*/
package org.antlr.v4.test.runtime.cpp;
import org.antlr.v4.Tool;
import org.antlr.v4.automata.ATNFactory;
import org.antlr.v4.automata.ATNPrinter;
import org.antlr.v4.automata.LexerATNFactory;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.codegen.CodeGenerator;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.IntStream;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.RuleContext;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenSource;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.WritableToken;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.ATNSerializer;
import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.atn.DecisionState;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntegerList;
import org.antlr.v4.runtime.misc.Interval;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.test.runtime.ErrorQueue;
import org.antlr.v4.test.runtime.RuntimeTestSupport;
import org.antlr.v4.test.runtime.StreamVacuum;
import org.antlr.v4.tool.ANTLRMessage;
import org.antlr.v4.tool.DOTGenerator;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.GrammarSemanticsMessage;
import org.antlr.v4.tool.LexerGrammar;
import org.antlr.v4.tool.Rule;
import org.antlr.v4.test.runtime.*;
import org.stringtemplate.v4.ST;
import org.stringtemplate.v4.STGroup;
import org.stringtemplate.v4.STGroupString;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Locale;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
@ -72,248 +27,12 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class BaseCppTest implements RuntimeTestSupport {
// -J-Dorg.antlr.v4.test.BaseTest.level=FINE
// private static final Logger LOGGER = Logger.getLogger(BaseTest.class.getName());
public static final String newline = System.getProperty("line.separator");
public static final String pathSep = System.getProperty("path.separator");
public class BaseCppTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
public String tmpdir = null;
/** If error during parser execution, store stderr here; can't return
* stdout and stderr. This doesn't trap errors from running antlr.
*/
protected String stderrDuringParse;
/** Errors found while running antlr */
protected StringBuilder antlrToolErrors;
private String getPropertyPrefix() {
protected String getPropertyPrefix() {
return "antlr-" + getLanguage().toLowerCase();
}
@Override
public void testSetUp() throws Exception {
// new output dir for each test
String propName = getPropertyPrefix() + "-test-dir";
String prop = System.getProperty(propName);
if(prop!=null && prop.length()>0) {
tmpdir = prop;
}
else {
tmpdir = new File(System.getProperty("java.io.tmpdir"),
getClass().getSimpleName()+"-"+Thread.currentThread().getName()+"-"+System.currentTimeMillis()).getAbsolutePath();
}
antlrToolErrors = new StringBuilder();
}
@Override
public void testTearDown() throws Exception {
}
@Override
public String getTmpDir() {
return tmpdir;
}
@Override
public String getStdout() {
return null;
}
@Override
public String getParseErrors() {
return stderrDuringParse;
}
@Override
public String getANTLRToolErrors() {
if ( antlrToolErrors.length()==0 ) {
return null;
}
return antlrToolErrors.toString();
}
protected org.antlr.v4.Tool newTool(String[] args) {
Tool tool = new Tool(args);
return tool;
}
protected Tool newTool() {
org.antlr.v4.Tool tool = new Tool(new String[] {"-o", tmpdir});
return tool;
}
protected ATN createATN(Grammar g, boolean useSerializer) {
if ( g.atn==null ) {
semanticProcess(g);
assertEquals(0, g.tool.getNumErrors());
ParserATNFactory f;
if ( g.isLexer() ) {
f = new LexerATNFactory((LexerGrammar)g);
}
else {
f = new ParserATNFactory(g);
}
g.atn = f.createATN();
assertEquals(0, g.tool.getNumErrors());
}
ATN atn = g.atn;
if (useSerializer) {
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
return new ATNDeserializer().deserialize(serialized);
}
return atn;
}
protected void semanticProcess(Grammar g) {
if ( g.ast!=null && !g.ast.hasErrors ) {
System.out.println(g.ast.toStringTree());
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
for (Grammar imp : g.getImportedGrammars()) {
antlr.processNonCombinedGrammar(imp, false);
}
}
}
}
public DFA createDFA(Grammar g, DecisionState s) {
// PredictionDFAFactory conv = new PredictionDFAFactory(g, s);
// DFA dfa = conv.createDFA();
// conv.issueAmbiguityWarnings();
// System.out.print("DFA="+dfa);
// return dfa;
return null;
}
// public void minimizeDFA(DFA dfa) {
// DFAMinimizer dmin = new DFAMinimizer(dfa);
// dfa.minimized = dmin.minimize();
// }
IntegerList getTypesFromString(Grammar g, String expecting) {
IntegerList expectingTokenTypes = new IntegerList();
if ( expecting!=null && !expecting.trim().isEmpty() ) {
for (String tname : expecting.replace(" ", "").split(",")) {
int ttype = g.getTokenType(tname);
expectingTokenTypes.add(ttype);
}
}
return expectingTokenTypes;
}
public IntegerList getTokenTypesViaATN(String input, LexerATNSimulator lexerATN) {
ANTLRInputStream in = new ANTLRInputStream(input);
IntegerList tokenTypes = new IntegerList();
int ttype;
do {
ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
tokenTypes.add(ttype);
} while ( ttype!= Token.EOF );
return tokenTypes;
}
public List<String> getTokenTypes(LexerGrammar lg,
ATN atn,
CharStream input)
{
LexerATNSimulator interp = new LexerATNSimulator(atn,new DFA[] { new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE)) },null);
List<String> tokenTypes = new ArrayList<String>();
int ttype;
boolean hitEOF = false;
do {
if ( hitEOF ) {
tokenTypes.add("EOF");
break;
}
int t = input.LA(1);
ttype = interp.match(input, Lexer.DEFAULT_MODE);
if ( ttype == Token.EOF ) {
tokenTypes.add("EOF");
}
else {
tokenTypes.add(lg.typeToTokenList.get(ttype));
}
if ( t== IntStream.EOF ) {
hitEOF = true;
}
} while ( ttype!=Token.EOF );
return tokenTypes;
}
List<ANTLRMessage> checkRuleDFA(String gtext, String ruleName, String expecting)
throws Exception
{
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(gtext, equeue);
ATN atn = createATN(g, false);
ATNState s = atn.ruleToStartState[g.getRule(ruleName).index];
if ( s==null ) {
System.err.println("no such rule: "+ruleName);
return null;
}
ATNState t = s.transition(0).target;
if ( !(t instanceof DecisionState) ) {
System.out.println(ruleName+" has no decision");
return null;
}
DecisionState blk = (DecisionState)t;
checkRuleDFA(g, blk, expecting);
return equeue.all;
}
List<ANTLRMessage> checkRuleDFA(String gtext, int decision, String expecting)
throws Exception
{
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(gtext, equeue);
ATN atn = createATN(g, false);
DecisionState blk = atn.decisionToState.get(decision);
checkRuleDFA(g, blk, expecting);
return equeue.all;
}
void checkRuleDFA(Grammar g, DecisionState blk, String expecting)
throws Exception
{
DFA dfa = createDFA(g, blk);
String result = null;
if ( dfa!=null ) result = dfa.toString();
assertEquals(expecting, result);
}
List<ANTLRMessage> checkLexerDFA(String gtext, String expecting)
throws Exception
{
return checkLexerDFA(gtext, LexerGrammar.DEFAULT_MODE_NAME, expecting);
}
List<ANTLRMessage> checkLexerDFA(String gtext, String modeName, String expecting)
throws Exception
{
ErrorQueue equeue = new ErrorQueue();
LexerGrammar g = new LexerGrammar(gtext, equeue);
g.atn = createATN(g, false);
// LexerATNToDFAConverter conv = new LexerATNToDFAConverter(g);
// DFA dfa = conv.createDFA(modeName);
// g.setLookaheadDFA(0, dfa); // only one decision to worry about
//
// String result = null;
// if ( dfa!=null ) result = dfa.toString();
// assertEquals(expecting, result);
//
// return equeue.all;
return null;
}
protected String getLanguage() {
return "Cpp";
}
@ -338,44 +57,13 @@ public class BaseCppTest implements RuntimeTestSupport {
null,
lexerName,"-no-listener");
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
writeLexerTestFile(lexerName, showDFA);
String output = execModule("Test.cpp");
return output;
}
public ParseTree execStartRule(String startRuleName, Parser parser)
throws IllegalAccessException, InvocationTargetException,
NoSuchMethodException
{
Method startRule = null;
Object[] args = null;
try {
startRule = parser.getClass().getMethod(startRuleName);
}
catch (NoSuchMethodException nsme) {
// try with int _p arg for recursive func
startRule = parser.getClass().getMethod(startRuleName, int.class);
args = new Integer[] {0};
}
ParseTree result = (ParseTree)startRule.invoke(parser, args);
// System.out.println("parse tree = "+result.toStringTree(parser));
return result;
}
// protected String execParser(String grammarFileName,
// String grammarStr,
// String parserName,
// String lexerName,
// String listenerName,
// String visitorName,
// String startRuleName,
// String input,
// boolean debug) {
// return execParser(grammarFileName, grammarStr, parserName, lexerName,
// listenerName, visitorName, startRuleName, input, debug);
// }
//
@Override
public String execParser(String grammarFileName,
String grammarStr,
@ -393,7 +81,7 @@ public class BaseCppTest implements RuntimeTestSupport {
lexerName,
"-visitor");
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
rawBuildRecognizerTestFile(parserName,
lexerName,
listenerName,
@ -423,7 +111,7 @@ public class BaseCppTest implements RuntimeTestSupport {
String... extraOptions)
{
ErrorQueue equeue =
antlrOnString(getTmpDir(), "Cpp", grammarFileName, grammarStr, defaultListener, extraOptions);
antlrOnString(getTempDirPath(), "Cpp", grammarFileName, grammarStr, defaultListener, extraOptions);
if (!equeue.errors.isEmpty()) {
return false;
}
@ -457,7 +145,7 @@ public class BaseCppTest implements RuntimeTestSupport {
boolean debug,
boolean trace)
{
this.stderrDuringParse = null;
setParseErrors(null);
if ( parserName==null ) {
writeLexerTestFile(lexerName, false);
}
@ -476,26 +164,6 @@ public class BaseCppTest implements RuntimeTestSupport {
}
private static String detectedOS;
public static String getOS() {
if (detectedOS == null) {
String os = System.getProperty("os.name", "generic").toLowerCase(Locale.ENGLISH);
if ((os.indexOf("mac") >= 0) || (os.indexOf("darwin") >= 0)) {
detectedOS = "mac";
}
else if (os.indexOf("win") >= 0) {
detectedOS = "windows";
}
else if (os.indexOf("nux") >= 0) {
detectedOS = "linux";
}
else {
detectedOS = "unknown";
}
}
return detectedOS;
}
public List<String> allCppFiles(String path) {
ArrayList<String> files = new ArrayList<String>();
File folder = new File(path);
@ -510,7 +178,7 @@ public class BaseCppTest implements RuntimeTestSupport {
}
private String runProcess(ProcessBuilder builder, String description, boolean showStderr) throws Exception {
// System.out.println("BUILDER: "+builder.command());
// System.out.println("BUILDER: " + builder.command() + " @ " + builder.directory().toString());
Process process = builder.start();
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
@ -521,16 +189,16 @@ public class BaseCppTest implements RuntimeTestSupport {
stderrVacuum.join();
String output = stdoutVacuum.toString();
if ( stderrVacuum.toString().length()>0 ) {
this.stderrDuringParse = stderrVacuum.toString();
if ( showStderr ) System.err.println(this.stderrDuringParse);
setParseErrors(stderrVacuum.toString());
if ( showStderr ) System.err.println(getParseErrors());
}
if (errcode != 0) {
String err = "execution of '"+description+"' failed with error code: "+errcode;
if ( this.stderrDuringParse!=null ) {
this.stderrDuringParse += err;
if ( getParseErrors()!=null ) {
setParseErrors(getParseErrors() + err);
}
else {
this.stderrDuringParse = err;
setParseErrors(err);
}
}
@ -597,15 +265,15 @@ public class BaseCppTest implements RuntimeTestSupport {
public String execModule(String fileName) {
String runtimePath = locateRuntime();
String includePath = runtimePath + "/runtime/src";
String binPath = new File(new File(tmpdir), "a.out").getAbsolutePath();
String inputPath = new File(new File(tmpdir), "input").getAbsolutePath();
String binPath = new File(getTempTestDir(), "a.out").getAbsolutePath();
String inputPath = new File(getTempTestDir(), "input").getAbsolutePath();
// Build runtime using cmake once.
synchronized (runtimeBuiltOnce) {
if ( !runtimeBuiltOnce ) {
try {
String command[] = {"clang++", "--version"};
String output = runCommand(command, tmpdir, "printing compiler version", false);
String output = runCommand(command, getTempDirPath(), "printing compiler version", false);
System.out.println("Compiler version is: "+output);
}
catch (Exception e) {
@ -625,7 +293,7 @@ public class BaseCppTest implements RuntimeTestSupport {
String libExtension = (getOS().equals("mac")) ? "dylib" : "so";
try {
String command[] = { "ln", "-s", runtimePath + "/dist/libantlr4-runtime." + libExtension };
if (runCommand(command, tmpdir, "sym linking C++ runtime", true) == null)
if (runCommand(command, getTempDirPath(), "sym linking C++ runtime", true) == null)
return null;
}
catch (Exception e) {
@ -636,8 +304,8 @@ public class BaseCppTest implements RuntimeTestSupport {
try {
List<String> command2 = new ArrayList<String>(Arrays.asList("clang++", "-std=c++11", "-I", includePath, "-L.", "-lantlr4-runtime", "-o", "a.out"));
command2.addAll(allCppFiles(tmpdir));
if (runCommand(command2.toArray(new String[0]), tmpdir, "building test binary", true) == null) {
command2.addAll(allCppFiles(getTempDirPath()));
if (runCommand(command2.toArray(new String[0]), getTempDirPath(), "building test binary", true) == null) {
return null;
}
}
@ -648,10 +316,10 @@ public class BaseCppTest implements RuntimeTestSupport {
}
// Now run the newly minted binary. Reset the error output, as we could have got compiler warnings which are not relevant here.
this.stderrDuringParse = null;
setParseErrors(null);
try {
ProcessBuilder builder = new ProcessBuilder(binPath, inputPath);
builder.directory(new File(tmpdir));
builder.directory(getTempTestDir());
Map<String, String> env = builder.environment();
env.put("LD_PRELOAD", runtimePath + "/dist/libantlr4-runtime." + libExtension);
String output = runProcess(builder, "running test binary", false);
@ -688,156 +356,11 @@ public class BaseCppTest implements RuntimeTestSupport {
p = Paths.get(runtimeURL.toURI()).toFile().toString();
}
catch (URISyntaxException use) {
p = "Can't find runtime";
p = "Can't find runtime at " + runtimeURL.toString();
}
return p;
}
List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs, Class<? extends ANTLRMessage> c) {
List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
for (ANTLRMessage m : msgs) {
if ( m.getClass() == c ) filtered.add(m);
}
return filtered;
}
void checkRuleATN(Grammar g, String ruleName, String expecting) {
ParserATNFactory f = new ParserATNFactory(g);
ATN atn = f.createATN();
DOTGenerator dot = new DOTGenerator(g);
System.out.println(dot.getDOT(atn.ruleToStartState[g.getRule(ruleName).index]));
Rule r = g.getRule(ruleName);
ATNState startState = atn.ruleToStartState[r.index];
ATNPrinter serializer = new ATNPrinter(g, startState);
String result = serializer.asString();
//System.out.print(result);
assertEquals(expecting, result);
}
public void testActions(String templates, String actionName, String action, String expected) throws org.antlr.runtime.RecognitionException {
int lp = templates.indexOf('(');
String name = templates.substring(0, lp);
STGroup group = new STGroupString(templates);
ST st = group.getInstanceOf(name);
st.add(actionName, action);
String grammar = st.render();
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(grammar, equeue);
if ( g.ast!=null && !g.ast.hasErrors ) {
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
ATNFactory factory = new ParserATNFactory(g);
if ( g.isLexer() ) factory = new LexerATNFactory((LexerGrammar)g);
g.atn = factory.createATN();
CodeGenerator gen = new CodeGenerator(g);
ST outputFileST = gen.generateParser();
String output = outputFileST.render();
//System.out.println(output);
String b = "#" + actionName + "#";
int start = output.indexOf(b);
String e = "#end-" + actionName + "#";
int end = output.indexOf(e);
String snippet = output.substring(start+b.length(),end);
assertEquals(expected, snippet);
}
if ( equeue.size()>0 ) {
System.err.println(equeue.toString());
}
}
protected void checkGrammarSemanticsError(ErrorQueue equeue,
GrammarSemanticsMessage expectedMessage)
throws Exception
{
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.errors.size(); i++) {
ANTLRMessage m = equeue.errors.get(i);
if (m.getErrorType()==expectedMessage.getErrorType() ) {
foundMsg = m;
}
}
assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg);
assertTrue("error is not a GrammarSemanticsMessage",
foundMsg instanceof GrammarSemanticsMessage);
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
if ( equeue.size()!=1 ) {
System.err.println(equeue);
}
}
protected void checkGrammarSemanticsWarning(ErrorQueue equeue,
GrammarSemanticsMessage expectedMessage)
throws Exception
{
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.warnings.size(); i++) {
ANTLRMessage m = equeue.warnings.get(i);
if (m.getErrorType()==expectedMessage.getErrorType() ) {
foundMsg = m;
}
}
assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg);
assertTrue("error is not a GrammarSemanticsMessage",
foundMsg instanceof GrammarSemanticsMessage);
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
if ( equeue.size()!=1 ) {
System.err.println(equeue);
}
}
protected void checkError(ErrorQueue equeue,
ANTLRMessage expectedMessage)
throws Exception
{
//System.out.println("errors="+equeue);
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.errors.size(); i++) {
ANTLRMessage m = equeue.errors.get(i);
if (m.getErrorType()==expectedMessage.getErrorType() ) {
foundMsg = m;
}
}
assertTrue("no error; "+expectedMessage.getErrorType()+" expected", !equeue.errors.isEmpty());
assertTrue("too many errors; "+equeue.errors, equeue.errors.size()<=1);
assertNotNull("couldn't find expected error: "+expectedMessage.getErrorType(), foundMsg);
/*
assertTrue("error is not a GrammarSemanticsMessage",
foundMsg instanceof GrammarSemanticsMessage);
*/
assertArrayEquals(expectedMessage.getArgs(), foundMsg.getArgs());
}
public static class FilteringTokenStream extends CommonTokenStream {
public FilteringTokenStream(TokenSource src) { super(src); }
Set<Integer> hide = new HashSet<Integer>();
@Override
protected boolean sync(int i) {
if (!super.sync(i)) {
return false;
}
Token t = get(i);
if ( hide.contains(t.getType()) ) {
((WritableToken)t).setChannel(Token.HIDDEN_CHANNEL);
}
return true;
}
public void setTokenTypeChannel(int ttype, int channel) {
hide.add(ttype);
}
}
protected void mkdir(String dir) {
File f = new File(dir);
f.mkdirs();
}
protected void writeParserTestFile(String parserName, String lexerName,
String listenerName, String visitorName,
String parserStartRuleName, boolean debug, boolean trace) {
@ -898,7 +421,7 @@ public class BaseCppTest implements RuntimeTestSupport {
outputFileST.add("listenerName", listenerName);
outputFileST.add("visitorName", visitorName);
outputFileST.add("parserStartRuleName", parserStartRuleName);
writeFile(tmpdir, "Test.cpp", outputFileST.render());
writeFile(getTempDirPath(), "Test.cpp", outputFileST.render());
}
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
@ -923,208 +446,8 @@ public class BaseCppTest implements RuntimeTestSupport {
+ " return 0;\n"
+ "}\n");
outputFileST.add("lexerName", lexerName);
writeFile(tmpdir, "Test.cpp", outputFileST.render());
writeFile(getTempDirPath(), "Test.cpp", outputFileST.render());
}
public void writeRecognizer(String parserName, String lexerName,
String listenerName, String visitorName,
String parserStartRuleName, boolean debug, boolean trace) {
if ( parserName==null ) {
writeLexerTestFile(lexerName, debug);
}
else {
writeParserTestFile(parserName,
lexerName,
listenerName,
visitorName,
parserStartRuleName,
debug,
trace);
}
}
protected void eraseFiles(final String filesEndingWith) {
File tmpdirF = new File(tmpdir);
String[] files = tmpdirF.list();
for(int i = 0; files!=null && i < files.length; i++) {
if ( files[i].endsWith(filesEndingWith) ) {
new File(tmpdir+"/"+files[i]).delete();
}
}
}
protected void eraseFiles(File dir) {
String[] files = dir.list();
for(int i = 0; files!=null && i < files.length; i++) {
new File(dir,files[i]).delete();
}
}
@Override
public void eraseTempDir() {
boolean doErase = true;
String propName = getPropertyPrefix() + "-erase-test-dir";
String prop = System.getProperty(propName);
if(prop!=null && prop.length()>0)
doErase = Boolean.getBoolean(prop);
if(doErase) {
File tmpdirF = new File(tmpdir);
if ( tmpdirF.exists() ) {
eraseFiles(tmpdirF);
tmpdirF.delete();
}
}
}
public String getFirstLineOfException() {
if ( this.stderrDuringParse ==null ) {
return null;
}
String[] lines = this.stderrDuringParse.split("\n");
String prefix="Exception in thread \"main\" ";
return lines[0].substring(prefix.length(),lines[0].length());
}
/**
* When looking at a result set that consists of a Map/HashTable
* we cannot rely on the output order, as the hashing algorithm or other aspects
* of the implementation may be different on different JDKs or platforms. Hence
* we take the Map, convert the keys to a List, sort them and Stringify the Map, which is a
* bit of a hack, but guarantees that we get the same order on all systems. We assume that
* the keys are strings.
*
* @param m The Map that contains keys we wish to return in sorted order
* @return A string that represents all the keys in sorted order.
*/
public <K, V> String sortMapToString(Map<K, V> m) {
// Pass in crap, and get nothing back
//
if (m == null) {
return null;
}
System.out.println("Map toString looks like: " + m.toString());
// Sort the keys in the Map
//
TreeMap<K, V> nset = new TreeMap<K, V>(m);
System.out.println("Tree map looks like: " + nset.toString());
return nset.toString();
}
public List<String> realElements(List<String> elements) {
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
}
public void assertNotNullOrEmpty(String message, String text) {
assertNotNull(message, text);
assertFalse(message, text.isEmpty());
}
public void assertNotNullOrEmpty(String text) {
assertNotNull(text);
assertFalse(text.isEmpty());
}
public static class IntTokenStream implements TokenStream {
IntegerList types;
int p=0;
public IntTokenStream(IntegerList types) { this.types = types; }
@Override
public void consume() { p++; }
@Override
public int LA(int i) { return LT(i).getType(); }
@Override
public int mark() {
return index();
}
@Override
public int index() { return p; }
@Override
public void release(int marker) {
seek(marker);
}
@Override
public void seek(int index) {
p = index;
}
@Override
public int size() {
return types.size();
}
@Override
public String getSourceName() {
return null;
}
@Override
public Token LT(int i) {
CommonToken t;
int rawIndex = p + i - 1;
if ( rawIndex>=types.size() ) t = new CommonToken(Token.EOF);
else t = new CommonToken(types.get(rawIndex));
t.setTokenIndex(rawIndex);
return t;
}
@Override
public Token get(int i) {
return new org.antlr.v4.runtime.CommonToken(types.get(i));
}
@Override
public TokenSource getTokenSource() {
return null;
}
@Override
public String getText() {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(Interval interval) {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(RuleContext ctx) {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(Token start, Token stop) {
throw new UnsupportedOperationException("can't give strings");
}
}
/** Sort a list */
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
List<T> dup = new ArrayList<T>();
dup.addAll(data);
Collections.sort(dup);
return dup;
}
/** Return map sorted by key */
public <K extends Comparable<? super K>,V> LinkedHashMap<K,V> sort(Map<K,V> data) {
LinkedHashMap<K,V> dup = new LinkedHashMap<K, V>();
List<K> keys = new ArrayList<K>();
keys.addAll(data.keySet());
Collections.sort(keys);
for (K k : keys) {
dup.put(k, data.get(k));
}
return dup;
}
}

View File

@ -5,39 +5,14 @@
*/
package org.antlr.v4.test.runtime.csharp;
import org.antlr.v4.Tool;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenSource;
import org.antlr.v4.runtime.WritableToken;
import org.antlr.v4.runtime.misc.Utils;
import org.antlr.v4.test.runtime.ErrorQueue;
import org.antlr.v4.test.runtime.RuntimeTestSupport;
import org.antlr.v4.test.runtime.StreamVacuum;
import org.antlr.v4.test.runtime.TestOutputReading;
import org.antlr.v4.tool.ANTLRMessage;
import org.antlr.v4.tool.GrammarSemanticsMessage;
import org.junit.rules.TestRule;
import org.junit.rules.TestWatcher;
import org.junit.runner.Description;
import org.antlr.v4.test.runtime.*;
import org.stringtemplate.v4.ST;
import org.w3c.dom.Document;
import org.w3c.dom.Element;
import javax.xml.parsers.DocumentBuilderFactory;
import javax.xml.transform.OutputKeys;
import javax.xml.transform.Transformer;
import javax.xml.transform.TransformerFactory;
import javax.xml.transform.dom.DOMSource;
import javax.xml.transform.stream.StreamResult;
import javax.xml.xpath.XPathConstants;
import javax.xml.xpath.XPathExpression;
import javax.xml.xpath.XPathFactory;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.net.URL;
import java.nio.file.Path;
@ -56,114 +31,11 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
public class BaseCSharpTest implements RuntimeTestSupport {
public static final String newline = System.getProperty("line.separator");
/**
* When the {@code antlr.preserve-test-dir} runtime property is set to
* {@code true}, the temporary directories created by the test run will not
* be removed at the end of the test run, even for tests that completed
* successfully.
*
* <p>
* The default behavior (used in all other cases) is removing the temporary
* directories for all tests which completed successfully, and preserving
* the directories for tests which failed.</p>
*/
public static final boolean PRESERVE_TEST_DIR = Boolean.parseBoolean(System.getProperty("antlr-preserve-csharp-test-dir"));
/**
* The base test directory is the directory where generated files get placed
* during unit test execution.
*
* <p>
* The default value for this property is the {@code java.io.tmpdir} system
* property, and can be overridden by setting the
* {@code antlr.java-test-dir} property to a custom location. Note that the
* {@code antlr.java-test-dir} property directly affects the
* {@link #CREATE_PER_TEST_DIRECTORIES} value as well.</p>
*/
public static final String BASE_TEST_DIR;
/**
* When {@code true}, a temporary directory will be created for each test
* executed during the test run.
*
* <p>
* This value is {@code true} when the {@code antlr.java-test-dir} system
* property is set, and otherwise {@code false}.</p>
*/
public static final boolean CREATE_PER_TEST_DIRECTORIES;
static {
String baseTestDir = System.getProperty("antlr-csharp-test-dir");
boolean perTestDirectories = false;
if (baseTestDir == null || baseTestDir.isEmpty()) {
baseTestDir = System.getProperty("java.io.tmpdir");
perTestDirectories = true;
}
if (!new File(baseTestDir).isDirectory()) {
throw new UnsupportedOperationException("The specified base test directory does not exist: " + baseTestDir);
}
BASE_TEST_DIR = baseTestDir;
CREATE_PER_TEST_DIRECTORIES = perTestDirectories;
}
public String tmpdir = null;
/**
* If error during parser execution, store stderr here; can't return
* stdout and stderr. This doesn't trap errors from running antlr.
*/
protected String stderrDuringParse;
/**
* Errors found while running antlr
*/
protected StringBuilder antlrToolErrors;
public class BaseCSharpTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
@Override
public void testSetUp() throws Exception {
if (CREATE_PER_TEST_DIRECTORIES) {
// new output dir for each test
String testDirectory = getClass().getSimpleName() + "-" + Thread.currentThread().getName() + "-" + System.currentTimeMillis();
tmpdir = new File(BASE_TEST_DIR, testDirectory).getAbsolutePath();
} else {
tmpdir = new File(BASE_TEST_DIR).getAbsolutePath();
if (!PRESERVE_TEST_DIR && new File(tmpdir).exists()) {
eraseDirectory(new File(tmpdir));
}
}
antlrToolErrors = new StringBuilder();
}
@Override
public void testTearDown() throws Exception {
}
@Override
public String getTmpDir() {
return tmpdir;
}
@Override
public String getStdout() {
return null;
}
@Override
public String getParseErrors() {
return stderrDuringParse;
}
@Override
public String getANTLRToolErrors() {
if (antlrToolErrors.length() == 0) {
return null;
}
return antlrToolErrors.toString();
protected String getPropertyPrefix() {
return "antlr4-csharp";
}
protected String execLexer(String grammarFileName,
@ -184,12 +56,12 @@ public class BaseCSharpTest implements RuntimeTestSupport {
null,
lexerName);
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
writeLexerTestFile(lexerName, showDFA);
addSourceFiles("Test.cs");
if (!compile()) {
System.err.println("Failed to compile!");
return stderrDuringParse;
return getParseErrors();
}
String output = execTest();
if (output != null && output.length() == 0) {
@ -201,8 +73,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
Set<String> sourceFiles = new HashSet<>();
private void addSourceFiles(String... files) {
for (String file : files)
this.sourceFiles.add(file);
Collections.addAll(sourceFiles, files);
}
@Override
@ -221,7 +92,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
lexerName,
"-visitor");
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
return rawExecRecognizer(parserName,
lexerName,
startRuleName,
@ -248,7 +119,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
String lexerName,
boolean defaultListener,
String... extraOptions) {
ErrorQueue equeue = antlrOnString(getTmpDir(), "CSharp", grammarFileName, grammarStr, defaultListener, extraOptions);
ErrorQueue equeue = antlrOnString(getTempDirPath(), "CSharp", grammarFileName, grammarStr, defaultListener, extraOptions);
if (!equeue.errors.isEmpty()) {
return false;
}
@ -278,7 +149,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
String lexerName,
String parserStartRuleName,
boolean debug) {
this.stderrDuringParse = null;
setParseErrors(null);
if (parserName == null) {
writeLexerTestFile(lexerName, false);
} else {
@ -313,14 +184,14 @@ public class BaseCSharpTest implements RuntimeTestSupport {
}
private String locateExec() {
return new File(tmpdir, "bin/Release/netcoreapp3.1/Test.dll").getAbsolutePath();
return new File(getTempTestDir(), "bin/Release/netcoreapp3.1/Test.dll").getAbsolutePath();
}
public boolean buildProject() {
try {
// save auxiliary files
String pack = BaseCSharpTest.class.getPackage().getName().replace(".", "/") + "/";
saveResourceAsFile(pack + "Antlr4.Test.csproj", new File(tmpdir, "Antlr4.Test.csproj"));
saveResourceAsFile(pack + "Antlr4.Test.csproj", new File(getTempTestDir(), "Antlr4.Test.csproj"));
// find runtime package
final ClassLoader loader = Thread.currentThread().getContextClassLoader();
@ -339,7 +210,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
"reference",
runtimeProjPath
};
boolean success = runProcess(args, tmpdir);
boolean success = runProcess(args, getTempDirPath());
assertTrue(success);
// build test
@ -350,7 +221,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
"-c",
"Release"
};
success = runProcess(args, tmpdir);
success = runProcess(args, getTempDirPath());
assertTrue(success);
} catch (Exception e) {
e.printStackTrace(System.err);
@ -378,11 +249,11 @@ public class BaseCSharpTest implements RuntimeTestSupport {
int exitValue = process.exitValue();
boolean success = (exitValue == 0);
if (!success) {
this.stderrDuringParse = stderrVacuum.toString();
setParseErrors(stderrVacuum.toString());
System.err.println("runProcess command: " + Utils.join(args, " "));
System.err.println("runProcess exitValue: " + exitValue);
System.err.println("runProcess stdoutVacuum: " + stdoutVacuum.toString());
System.err.println("runProcess stderrVacuum: " + stderrDuringParse);
System.err.println("runProcess stderrVacuum: " + getParseErrors());
}
if (exitValue == 132) {
// Retry after SIGILL. We are seeing this intermittently on
@ -417,7 +288,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
public String execTest() {
String exec = locateExec();
try {
File tmpdirFile = new File(tmpdir);
File tmpdirFile = new File(getTempDirPath());
Path output = tmpdirFile.toPath().resolve("output");
Path errorOutput = tmpdirFile.toPath().resolve("error-output");
String[] args = getExecTestArgs(exec, output, errorOutput);
@ -432,7 +303,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
stdoutVacuum.join();
stderrVacuum.join();
String writtenOutput = TestOutputReading.read(output);
this.stderrDuringParse = TestOutputReading.read(errorOutput);
setParseErrors(TestOutputReading.read(errorOutput));
int exitValue = process.exitValue();
String stdoutString = stdoutVacuum.toString().trim();
String stderrString = stderrVacuum.toString().trim();
@ -456,7 +327,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
private String[] getExecTestArgs(String exec, Path output, Path errorOutput) {
return new String[]{
"dotnet", exec, new File(tmpdir, "input").getAbsolutePath(),
"dotnet", exec, new File(getTempTestDir(), "input").getAbsolutePath(),
output.toAbsolutePath().toString(),
errorOutput.toAbsolutePath().toString()
};
@ -516,7 +387,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
outputFileST.add("parserName", parserName);
outputFileST.add("lexerName", lexerName);
outputFileST.add("parserStartRuleName", parserStartRuleName);
writeFile(tmpdir, "Test.cs", outputFileST.render());
writeFile(getTempDirPath(), "Test.cs", outputFileST.render());
}
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
@ -545,32 +416,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
);
outputFileST.add("lexerName", lexerName);
writeFile(tmpdir, "Test.cs", outputFileST.render());
}
protected void eraseDirectory(File dir) {
File[] files = dir.listFiles();
if (files != null) {
for (File file : files) {
if (file.isDirectory()) {
eraseDirectory(file);
} else {
file.delete();
}
}
}
dir.delete();
}
@Override
public void eraseTempDir() {
if (!PRESERVE_TEST_DIR) {
File tmpdirF = new File(tmpdir);
if (tmpdirF.exists()) {
eraseDirectory(tmpdirF);
tmpdirF.delete();
}
}
writeFile(getTempDirPath(), "Test.cs", outputFileST.render());
}
/**
@ -578,8 +424,7 @@ public class BaseCSharpTest implements RuntimeTestSupport {
*/
public <K extends Comparable<? super K>, V> LinkedHashMap<K, V> sort(Map<K, V> data) {
LinkedHashMap<K, V> dup = new LinkedHashMap<K, V>();
List<K> keys = new ArrayList<K>();
keys.addAll(data.keySet());
List<K> keys = new ArrayList<K>(data.keySet());
Collections.sort(keys);
for (K k : keys) {
dup.put(k, data.get(k));

View File

@ -6,39 +6,14 @@
package org.antlr.v4.test.runtime.dart;
import org.antlr.v4.Tool;
import org.antlr.v4.analysis.AnalysisPipeline;
import org.antlr.v4.automata.ATNFactory;
import org.antlr.v4.automata.ATNPrinter;
import org.antlr.v4.automata.LexerATNFactory;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.codegen.CodeGenerator;
import org.antlr.v4.misc.Utils;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntegerList;
import org.antlr.v4.runtime.misc.Interval;
import org.antlr.v4.runtime.misc.Pair;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.ErrorQueue;
import org.antlr.v4.test.runtime.RuntimeTestSupport;
import org.antlr.v4.test.runtime.StreamVacuum;
import org.antlr.v4.test.runtime.*;
import org.antlr.v4.test.runtime.descriptors.LexerExecDescriptors;
import org.antlr.v4.test.runtime.descriptors.PerformanceDescriptors;
import org.antlr.v4.tool.*;
import org.stringtemplate.v4.ST;
import org.stringtemplate.v4.STGroup;
import org.stringtemplate.v4.STGroupString;
import java.io.*;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.util.*;
import static junit.framework.TestCase.*;
@ -47,333 +22,19 @@ import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
import static org.junit.Assert.assertArrayEquals;
public class BaseDartTest implements RuntimeTestSupport {
public class BaseDartTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
private static final List<String> AOT_COMPILE_TESTS = Arrays.asList(
new PerformanceDescriptors.DropLoopEntryBranchInLRRule_4().input,
new LexerExecDescriptors.LargeLexer().input
);
public static final String newline = System.getProperty("line.separator");
public static final String pathSep = System.getProperty("path.separator");
/**
* When the {@code antlr.preserve-test-dir} runtime property is set to
* {@code true}, the temporary directories created by the test run will not
* be removed at the end of the test run, even for tests that completed
* successfully.
* <p>
* <p>
* The default behavior (used in all other cases) is removing the temporary
* directories for all tests which completed successfully, and preserving
* the directories for tests which failed.</p>
*/
public static final boolean PRESERVE_TEST_DIR = Boolean.parseBoolean(System.getProperty("antlr.preserve-test-dir", "false"));
/**
* The base test directory is the directory where generated files get placed
* during unit test execution.
* <p>
* <p>
* The default value for this property is the {@code java.io.tmpdir} system
* property, and can be overridden by setting the
* {@code antlr.java-test-dir} property to a custom location. Note that the
* {@code antlr.java-test-dir} property directly affects the
* {@link #CREATE_PER_TEST_DIRECTORIES} value as well.</p>
*/
public static final String BASE_TEST_DIR;
/**
* When {@code true}, a temporary directory will be created for each test
* executed during the test run.
* <p>
* <p>
* This value is {@code true} when the {@code antlr.java-test-dir} system
* property is set, and otherwise {@code false}.</p>
*/
public static final boolean CREATE_PER_TEST_DIRECTORIES;
static {
String baseTestDir = System.getProperty("antlr.dart-test-dir");
boolean perTestDirectories = false;
if (baseTestDir == null || baseTestDir.isEmpty()) {
baseTestDir = System.getProperty("java.io.tmpdir");
perTestDirectories = true;
}
if (!new File(baseTestDir).isDirectory()) {
throw new UnsupportedOperationException("The specified base test directory does not exist: " + baseTestDir);
}
BASE_TEST_DIR = baseTestDir;
CREATE_PER_TEST_DIRECTORIES = perTestDirectories;
}
/**
* Build up the full classpath we need, including the surefire path (if present)
*/
public static final String CLASSPATH = System.getProperty("java.class.path");
public String tmpdir = null;
/**
* If error during parser execution, store stderr here; can't return
* stdout and stderr. This doesn't trap errors from running antlr.
*/
protected String stderrDuringParse;
/**
* Errors found while running antlr
*/
protected StringBuilder antlrToolErrors;
private static String cacheDartPackages;
private String getPropertyPrefix() {
public String getPropertyPrefix() {
return "antlr-dart";
}
@Override
public void testSetUp() throws Exception {
if (CREATE_PER_TEST_DIRECTORIES) {
// new output dir for each test
String threadName = Thread.currentThread().getName();
String testDirectory = getClass().getSimpleName() + "-" + threadName + "-" + System.nanoTime();
tmpdir = new File(BASE_TEST_DIR, testDirectory).getAbsolutePath();
} else {
tmpdir = new File(BASE_TEST_DIR).getAbsolutePath();
if (!PRESERVE_TEST_DIR && new File(tmpdir).exists()) {
eraseFiles();
}
}
antlrToolErrors = new StringBuilder();
}
@Override
public void testTearDown() throws Exception {
}
@Override
public String getTmpDir() {
return tmpdir;
}
@Override
public String getStdout() {
return null;
}
@Override
public String getParseErrors() {
return stderrDuringParse;
}
@Override
public String getANTLRToolErrors() {
if (antlrToolErrors.length() == 0) {
return null;
}
return antlrToolErrors.toString();
}
protected Tool newTool(String[] args) {
Tool tool = new Tool(args);
return tool;
}
protected ATN createATN(Grammar g, boolean useSerializer) {
if (g.atn == null) {
semanticProcess(g);
assertEquals(0, g.tool.getNumErrors());
ParserATNFactory f;
if (g.isLexer()) {
f = new LexerATNFactory((LexerGrammar) g);
} else {
f = new ParserATNFactory(g);
}
g.atn = f.createATN();
assertEquals(0, g.tool.getNumErrors());
}
ATN atn = g.atn;
if (useSerializer) {
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
return new ATNDeserializer().deserialize(serialized);
}
return atn;
}
protected void semanticProcess(Grammar g) {
if (g.ast != null && !g.ast.hasErrors) {
// System.out.println(g.ast.toStringTree());
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if (g.getImportedGrammars() != null) { // process imported grammars (if any)
for (Grammar imp : g.getImportedGrammars()) {
antlr.processNonCombinedGrammar(imp, false);
}
}
}
}
public DFA createDFA(Grammar g, DecisionState s) {
// PredictionDFAFactory conv = new PredictionDFAFactory(g, s);
// DFA dfa = conv.createDFA();
// conv.issueAmbiguityWarnings();
// System.out.print("DFA="+dfa);
// return dfa;
return null;
}
// public void minimizeDFA(DFA dfa) {
// DFAMinimizer dmin = new DFAMinimizer(dfa);
// dfa.minimized = dmin.minimize();
// }
IntegerList getTypesFromString(Grammar g, String expecting) {
IntegerList expectingTokenTypes = new IntegerList();
if (expecting != null && !expecting.trim().isEmpty()) {
for (String tname : expecting.replace(" ", "").split(",")) {
int ttype = g.getTokenType(tname);
expectingTokenTypes.add(ttype);
}
}
return expectingTokenTypes;
}
public IntegerList getTokenTypesViaATN(String input, LexerATNSimulator lexerATN) {
ANTLRInputStream in = new ANTLRInputStream(input);
IntegerList tokenTypes = new IntegerList();
int ttype;
do {
ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
tokenTypes.add(ttype);
} while (ttype != Token.EOF);
return tokenTypes;
}
public List<String> getTokenTypes(LexerGrammar lg,
ATN atn,
CharStream input) {
LexerATNSimulator interp = new LexerATNSimulator(atn, new DFA[]{new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE))}, null);
List<String> tokenTypes = new ArrayList<String>();
int ttype;
boolean hitEOF = false;
do {
if (hitEOF) {
tokenTypes.add("EOF");
break;
}
int t = input.LA(1);
ttype = interp.match(input, Lexer.DEFAULT_MODE);
if (ttype == Token.EOF) {
tokenTypes.add("EOF");
} else {
tokenTypes.add(lg.typeToTokenList.get(ttype));
}
if (t == IntStream.EOF) {
hitEOF = true;
}
} while (ttype != Token.EOF);
return tokenTypes;
}
List<ANTLRMessage> checkRuleDFA(String gtext, String ruleName, String expecting)
throws Exception {
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(gtext, equeue);
ATN atn = createATN(g, false);
ATNState s = atn.ruleToStartState[g.getRule(ruleName).index];
if (s == null) {
System.err.println("no such rule: " + ruleName);
return null;
}
ATNState t = s.transition(0).target;
if (!(t instanceof DecisionState)) {
System.out.println(ruleName + " has no decision");
return null;
}
DecisionState blk = (DecisionState) t;
checkRuleDFA(g, blk, expecting);
return equeue.all;
}
List<ANTLRMessage> checkRuleDFA(String gtext, int decision, String expecting)
throws Exception {
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(gtext, equeue);
ATN atn = createATN(g, false);
DecisionState blk = atn.decisionToState.get(decision);
checkRuleDFA(g, blk, expecting);
return equeue.all;
}
void checkRuleDFA(Grammar g, DecisionState blk, String expecting)
throws Exception {
DFA dfa = createDFA(g, blk);
String result = null;
if (dfa != null) result = dfa.toString();
assertEquals(expecting, result);
}
List<ANTLRMessage> checkLexerDFA(String gtext, String expecting)
throws Exception {
return checkLexerDFA(gtext, LexerGrammar.DEFAULT_MODE_NAME, expecting);
}
List<ANTLRMessage> checkLexerDFA(String gtext, String modeName, String expecting)
throws Exception {
ErrorQueue equeue = new ErrorQueue();
LexerGrammar g = new LexerGrammar(gtext, equeue);
g.atn = createATN(g, false);
// LexerATNToDFAConverter conv = new LexerATNToDFAConverter(g);
// DFA dfa = conv.createDFA(modeName);
// g.setLookaheadDFA(0, dfa); // only one decision to worry about
//
// String result = null;
// if ( dfa!=null ) result = dfa.toString();
// assertEquals(expecting, result);
//
// return equeue.all;
return null;
}
protected String load(String fileName, String encoding)
throws IOException {
if (fileName == null) {
return null;
}
String fullFileName = getClass().getPackage().getName().replace('.', '/') + '/' + fileName;
int size = 65000;
InputStreamReader isr;
InputStream fis = getClass().getClassLoader().getResourceAsStream(fullFileName);
if (encoding != null) {
isr = new InputStreamReader(fis, encoding);
} else {
isr = new InputStreamReader(fis);
}
try {
char[] data = new char[size];
int n = isr.read(data);
return new String(data, 0, n);
} finally {
isr.close();
}
}
protected String execLexer(String grammarFileName,
String grammarStr,
String lexerName,
String input) {
return execLexer(grammarFileName, grammarStr, lexerName, input, false);
}
@Override
public String execLexer(String grammarFileName,
String grammarStr,
@ -385,71 +46,12 @@ public class BaseDartTest implements RuntimeTestSupport {
null,
lexerName);
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
writeLexerTestFile(lexerName, showDFA);
String output = execClass("Test", AOT_COMPILE_TESTS.contains(input));
return output;
}
public ParseTree execParser(String startRuleName, String input,
String parserName, String lexerName)
throws Exception {
Pair<Parser, Lexer> pl = getParserAndLexer(input, parserName, lexerName);
Parser parser = pl.a;
return execStartRule(startRuleName, parser);
}
public ParseTree execStartRule(String startRuleName, Parser parser)
throws IllegalAccessException, InvocationTargetException,
NoSuchMethodException {
Method startRule = null;
Object[] args = null;
try {
startRule = parser.getClass().getMethod(startRuleName);
} catch (NoSuchMethodException nsme) {
// try with int _p arg for recursive func
startRule = parser.getClass().getMethod(startRuleName, int.class);
args = new Integer[]{0};
}
ParseTree result = (ParseTree) startRule.invoke(parser, args);
// System.out.println("parse tree = "+result.toStringTree(parser));
return result;
}
public Pair<Parser, Lexer> getParserAndLexer(String input,
String parserName, String lexerName)
throws Exception {
final Class<? extends Lexer> lexerClass = loadLexerClassFromTempDir(lexerName);
final Class<? extends Parser> parserClass = loadParserClassFromTempDir(parserName);
ANTLRInputStream in = new ANTLRInputStream(new StringReader(input));
Class<? extends Lexer> c = lexerClass.asSubclass(Lexer.class);
Constructor<? extends Lexer> ctor = c.getConstructor(CharStream.class);
Lexer lexer = ctor.newInstance(in);
Class<? extends Parser> pc = parserClass.asSubclass(Parser.class);
Constructor<? extends Parser> pctor = pc.getConstructor(TokenStream.class);
CommonTokenStream tokens = new CommonTokenStream(lexer);
Parser parser = pctor.newInstance(tokens);
return new Pair<Parser, Lexer>(parser, lexer);
}
public Class<?> loadClassFromTempDir(String name) throws Exception {
ClassLoader loader =
new URLClassLoader(new URL[]{new File(tmpdir).toURI().toURL()},
ClassLoader.getSystemClassLoader());
return loader.loadClass(name);
}
public Class<? extends Lexer> loadLexerClassFromTempDir(String name) throws Exception {
return loadClassFromTempDir(name).asSubclass(Lexer.class);
}
public Class<? extends Parser> loadParserClassFromTempDir(String name) throws Exception {
return loadClassFromTempDir(name).asSubclass(Parser.class);
}
@Override
public String execParser(String grammarFileName,
String grammarStr,
@ -480,7 +82,7 @@ public class BaseDartTest implements RuntimeTestSupport {
lexerName,
"-visitor");
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
return rawExecRecognizer(parserName,
lexerName,
startRuleName,
@ -510,7 +112,7 @@ public class BaseDartTest implements RuntimeTestSupport {
boolean defaultListener,
String... extraOptions) {
ErrorQueue equeue =
BaseRuntimeTest.antlrOnString(getTmpDir(), "Dart", grammarFileName, grammarStr, defaultListener, extraOptions);
BaseRuntimeTest.antlrOnString(getTempDirPath(), "Dart", grammarFileName, grammarStr, defaultListener, extraOptions);
if (!equeue.errors.isEmpty()) {
return false;
}
@ -534,17 +136,29 @@ public class BaseDartTest implements RuntimeTestSupport {
}
String runtime = locateRuntime();
writeFile(tmpdir, "pubspec.yaml",
writeFile(getTempDirPath(), "pubspec.yaml",
"name: \"test\"\n" +
"dependencies:\n" +
" antlr4:\n" +
" path: " + runtime + "\n");
if (cacheDartPackages == null) {
try {
Process process = Runtime.getRuntime().exec(new String[]{locatePub(), "get"}, null, new File(tmpdir));
final Process process = Runtime.getRuntime().exec(new String[]{locatePub(), "get"}, null, getTempTestDir());
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
stderrVacuum.start();
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
try {
process.destroy();
} catch(Exception e) {
e.printStackTrace(System.err);
}
}
}, 30_000);
process.waitFor();
timer.cancel();
stderrVacuum.join();
String stderrDuringPubGet = stderrVacuum.toString();
if (!stderrDuringPubGet.isEmpty()) {
@ -554,9 +168,9 @@ public class BaseDartTest implements RuntimeTestSupport {
e.printStackTrace();
return false;
}
cacheDartPackages = readFile(tmpdir, ".packages");
cacheDartPackages = readFile(getTempDirPath(), ".packages");
} else {
writeFile(tmpdir, ".packages", cacheDartPackages);
writeFile(getTempDirPath(), ".packages", cacheDartPackages);
}
return true; // allIsWell: no compile
}
@ -567,7 +181,7 @@ public class BaseDartTest implements RuntimeTestSupport {
boolean debug,
boolean profile,
boolean aotCompile) {
this.stderrDuringParse = null;
setParseErrors(null);
if (parserName == null) {
writeLexerTestFile(lexerName, false);
} else {
@ -590,11 +204,23 @@ public class BaseDartTest implements RuntimeTestSupport {
};
String cmdLine = Utils.join(args, " ");
System.err.println("Compile: " + cmdLine);
Process process =
Runtime.getRuntime().exec(args, null, new File(tmpdir));
final Process process =
Runtime.getRuntime().exec(args, null, getTempTestDir());
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
stderrVacuum.start();
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
try {
process.destroy();
} catch(Exception e) {
e.printStackTrace(System.err);
}
}
}, 30_000);
int result = process.waitFor();
timer.cancel();
if (result != 0) {
stderrVacuum.join();
System.err.print("Error compiling dart file: " + stderrVacuum.toString());
@ -604,23 +230,35 @@ public class BaseDartTest implements RuntimeTestSupport {
String[] args;
if (compile) {
args = new String[]{
new File(tmpdir, className).getAbsolutePath(), new File(tmpdir, "input").getAbsolutePath()
new File(getTempTestDir(), className).getAbsolutePath(), new File(getTempTestDir(), "input").getAbsolutePath()
};
} else {
args = new String[]{
locateDart(),
className + ".dart", new File(tmpdir, "input").getAbsolutePath()
className + ".dart", new File(getTempTestDir(), "input").getAbsolutePath()
};
}
//String cmdLine = Utils.join(args, " ");
//System.err.println("execParser: " + cmdLine);
Process process =
Runtime.getRuntime().exec(args, null, new File(tmpdir));
final Process process =
Runtime.getRuntime().exec(args, null, getTempTestDir());
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
stdoutVacuum.start();
stderrVacuum.start();
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
try {
process.destroy();
} catch(Exception e) {
e.printStackTrace(System.err);
}
}
}, 30_000);
process.waitFor();
timer.cancel();
stdoutVacuum.join();
stderrVacuum.join();
String output = stdoutVacuum.toString();
@ -628,7 +266,7 @@ public class BaseDartTest implements RuntimeTestSupport {
output = null;
}
if (stderrVacuum.toString().length() > 0) {
this.stderrDuringParse = stderrVacuum.toString();
setParseErrors(stderrVacuum.toString());
}
return output;
} catch (Exception e) {
@ -731,187 +369,6 @@ public class BaseDartTest implements RuntimeTestSupport {
return runtimeSrc.getPath();
}
private boolean isWindows() {
return System.getProperty("os.name").toLowerCase().contains("windows");
}
// void ambig(List<Message> msgs, int[] expectedAmbigAlts, String expectedAmbigInput)
// throws Exception
// {
// ambig(msgs, 0, expectedAmbigAlts, expectedAmbigInput);
// }
// void ambig(List<Message> msgs, int i, int[] expectedAmbigAlts, String expectedAmbigInput)
// throws Exception
// {
// List<Message> amsgs = getMessagesOfType(msgs, AmbiguityMessage.class);
// AmbiguityMessage a = (AmbiguityMessage)amsgs.get(i);
// if ( a==null ) assertNull(expectedAmbigAlts);
// else {
// assertEquals(a.conflictingAlts.toString(), Arrays.toString(expectedAmbigAlts));
// }
// assertEquals(expectedAmbigInput, a.input);
// }
// void unreachable(List<Message> msgs, int[] expectedUnreachableAlts)
// throws Exception
// {
// unreachable(msgs, 0, expectedUnreachableAlts);
// }
// void unreachable(List<Message> msgs, int i, int[] expectedUnreachableAlts)
// throws Exception
// {
// List<Message> amsgs = getMessagesOfType(msgs, UnreachableAltsMessage.class);
// UnreachableAltsMessage u = (UnreachableAltsMessage)amsgs.get(i);
// if ( u==null ) assertNull(expectedUnreachableAlts);
// else {
// assertEquals(u.conflictingAlts.toString(), Arrays.toString(expectedUnreachableAlts));
// }
// }
List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs, Class<? extends ANTLRMessage> c) {
List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
for (ANTLRMessage m : msgs) {
if (m.getClass() == c) filtered.add(m);
}
return filtered;
}
public void checkRuleATN(Grammar g, String ruleName, String expecting) {
// DOTGenerator dot = new DOTGenerator(g);
// System.out.println(dot.getDOT(g.atn.ruleToStartState[g.getRule(ruleName).index]));
Rule r = g.getRule(ruleName);
ATNState startState = g.getATN().ruleToStartState[r.index];
ATNPrinter serializer = new ATNPrinter(g, startState);
String result = serializer.asString();
//System.out.print(result);
assertEquals(expecting, result);
}
public void testActions(String templates, String actionName, String action, String expected) throws org.antlr.runtime.RecognitionException {
int lp = templates.indexOf('(');
String name = templates.substring(0, lp);
STGroup group = new STGroupString(templates);
ST st = group.getInstanceOf(name);
st.add(actionName, action);
String grammar = st.render();
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(grammar, equeue);
if (g.ast != null && !g.ast.hasErrors) {
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
ATNFactory factory = new ParserATNFactory(g);
if (g.isLexer()) factory = new LexerATNFactory((LexerGrammar) g);
g.atn = factory.createATN();
AnalysisPipeline anal = new AnalysisPipeline(g);
anal.process();
CodeGenerator gen = new CodeGenerator(g);
ST outputFileST = gen.generateParser(false);
String output = outputFileST.render();
//System.out.println(output);
String b = "#" + actionName + "#";
int start = output.indexOf(b);
String e = "#end-" + actionName + "#";
int end = output.indexOf(e);
String snippet = output.substring(start + b.length(), end);
assertEquals(expected, snippet);
}
if (equeue.size() > 0) {
// System.err.println(equeue.toString());
}
}
protected void checkGrammarSemanticsError(ErrorQueue equeue,
GrammarSemanticsMessage expectedMessage)
throws Exception {
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.errors.size(); i++) {
ANTLRMessage m = equeue.errors.get(i);
if (m.getErrorType() == expectedMessage.getErrorType()) {
foundMsg = m;
}
}
assertNotNull("no error; " + expectedMessage.getErrorType() + " expected", foundMsg);
assertTrue("error is not a GrammarSemanticsMessage",
foundMsg instanceof GrammarSemanticsMessage);
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
if (equeue.size() != 1) {
System.err.println(equeue);
}
}
protected void checkGrammarSemanticsWarning(ErrorQueue equeue,
GrammarSemanticsMessage expectedMessage)
throws Exception {
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.warnings.size(); i++) {
ANTLRMessage m = equeue.warnings.get(i);
if (m.getErrorType() == expectedMessage.getErrorType()) {
foundMsg = m;
}
}
assertNotNull("no error; " + expectedMessage.getErrorType() + " expected", foundMsg);
assertTrue("error is not a GrammarSemanticsMessage",
foundMsg instanceof GrammarSemanticsMessage);
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
if (equeue.size() != 1) {
System.err.println(equeue);
}
}
protected void checkError(ErrorQueue equeue,
ANTLRMessage expectedMessage)
throws Exception {
//System.out.println("errors="+equeue);
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.errors.size(); i++) {
ANTLRMessage m = equeue.errors.get(i);
if (m.getErrorType() == expectedMessage.getErrorType()) {
foundMsg = m;
}
}
assertTrue("no error; " + expectedMessage.getErrorType() + " expected", !equeue.errors.isEmpty());
assertTrue("too many errors; " + equeue.errors, equeue.errors.size() <= 1);
assertNotNull("couldn't find expected error: " + expectedMessage.getErrorType(), foundMsg);
/*
* assertTrue("error is not a GrammarSemanticsMessage", foundMsg
* instanceof GrammarSemanticsMessage);
*/
assertArrayEquals(expectedMessage.getArgs(), foundMsg.getArgs());
}
public static class FilteringTokenStream extends CommonTokenStream {
public FilteringTokenStream(TokenSource src) {
super(src);
}
Set<Integer> hide = new HashSet<Integer>();
@Override
protected boolean sync(int i) {
if (!super.sync(i)) {
return false;
}
Token t = get(i);
if (hide.contains(t.getType())) {
((WritableToken) t).setChannel(Token.HIDDEN_CHANNEL);
}
return true;
}
public void setTokenTypeChannel(int ttype, int channel) {
hide.add(ttype);
}
}
protected void writeTestFile(String parserName,
String lexerName,
String parserStartRuleName,
@ -971,7 +428,7 @@ public class BaseDartTest implements RuntimeTestSupport {
outputFileST.add("parserName", parserName);
outputFileST.add("lexerName", lexerName);
outputFileST.add("parserStartRuleName", parserStartRuleName);
writeFile(tmpdir, "Test.dart", outputFileST.render());
writeFile(getTempDirPath(), "Test.dart", outputFileST.render());
}
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
@ -995,191 +452,7 @@ public class BaseDartTest implements RuntimeTestSupport {
);
outputFileST.add("lexerName", lexerName);
writeFile(tmpdir, "Test.dart", outputFileST.render());
writeFile(getTempDirPath(), "Test.dart", outputFileST.render());
}
protected void eraseFiles() {
if (tmpdir == null) {
return;
}
File tmpdirF = new File(tmpdir);
String[] files = tmpdirF.list();
for (int i = 0; files != null && i < files.length; i++) {
new File(tmpdir + "/" + files[i]).delete();
}
}
@Override
public void eraseTempDir() {
File tmpdirF = new File(tmpdir);
if (tmpdirF.exists()) {
eraseFiles();
tmpdirF.delete();
}
}
public String getFirstLineOfException() {
if (this.stderrDuringParse == null) {
return null;
}
String[] lines = this.stderrDuringParse.split("\n");
String prefix = "Exception in thread \"main\" ";
return lines[0].substring(prefix.length(), lines[0].length());
}
/**
* When looking at a result set that consists of a Map/HashTable
* we cannot rely on the output order, as the hashing algorithm or other aspects
* of the implementation may be different on differnt JDKs or platforms. Hence
* we take the Map, convert the keys to a List, sort them and Stringify the Map, which is a
* bit of a hack, but guarantees that we get the same order on all systems. We assume that
* the keys are strings.
*
* @param m The Map that contains keys we wish to return in sorted order
* @return A string that represents all the keys in sorted order.
*/
public <K, V> String sortMapToString(Map<K, V> m) {
// Pass in crap, and get nothing back
//
if (m == null) {
return null;
}
System.out.println("Map toString looks like: " + m.toString());
// Sort the keys in the Map
//
TreeMap<K, V> nset = new TreeMap<K, V>(m);
System.out.println("Tree map looks like: " + nset.toString());
return nset.toString();
}
public List<String> realElements(List<String> elements) {
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
}
public void assertNotNullOrEmpty(String message, String text) {
assertNotNull(message, text);
assertFalse(message, text.isEmpty());
}
public void assertNotNullOrEmpty(String text) {
assertNotNull(text);
assertFalse(text.isEmpty());
}
public static class IntTokenStream implements TokenStream {
public IntegerList types;
int p = 0;
public IntTokenStream(IntegerList types) {
this.types = types;
}
@Override
public void consume() {
p++;
}
@Override
public int LA(int i) {
return LT(i).getType();
}
@Override
public int mark() {
return index();
}
@Override
public int index() {
return p;
}
@Override
public void release(int marker) {
seek(marker);
}
@Override
public void seek(int index) {
p = index;
}
@Override
public int size() {
return types.size();
}
@Override
public String getSourceName() {
return UNKNOWN_SOURCE_NAME;
}
@Override
public Token LT(int i) {
CommonToken t;
int rawIndex = p + i - 1;
if (rawIndex >= types.size()) t = new CommonToken(Token.EOF);
else t = new CommonToken(types.get(rawIndex));
t.setTokenIndex(rawIndex);
return t;
}
@Override
public Token get(int i) {
return new CommonToken(types.get(i));
}
@Override
public TokenSource getTokenSource() {
return null;
}
@Override
public String getText() {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(Interval interval) {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(RuleContext ctx) {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(Token start, Token stop) {
throw new UnsupportedOperationException("can't give strings");
}
}
/**
* Sort a list
*/
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
List<T> dup = new ArrayList<T>();
dup.addAll(data);
Collections.sort(dup);
return dup;
}
/**
* Return map sorted by key
*/
public <K extends Comparable<? super K>, V> LinkedHashMap<K, V> sort(Map<K, V> data) {
LinkedHashMap<K, V> dup = new LinkedHashMap<K, V>();
List<K> keys = new ArrayList<K>();
keys.addAll(data.keySet());
Collections.sort(keys);
for (K k : keys) {
dup.put(k, data.get(k));
}
return dup;
}
}

View File

@ -5,92 +5,36 @@
*/
package org.antlr.v4.test.runtime.go;
import org.antlr.v4.Tool;
import org.antlr.v4.automata.ATNFactory;
import org.antlr.v4.automata.ATNPrinter;
import org.antlr.v4.automata.LexerATNFactory;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.codegen.CodeGenerator;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.IntStream;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.RuleContext;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenSource;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.WritableToken;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.ATNSerializer;
import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntegerList;
import org.antlr.v4.runtime.misc.Interval;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.test.runtime.ErrorQueue;
import org.antlr.v4.test.runtime.RuntimeTestSupport;
import org.antlr.v4.test.runtime.StreamVacuum;
import org.antlr.v4.tool.ANTLRMessage;
import org.antlr.v4.tool.DOTGenerator;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.GrammarSemanticsMessage;
import org.antlr.v4.tool.LexerGrammar;
import org.antlr.v4.tool.Rule;
import org.antlr.v4.test.runtime.*;
import org.stringtemplate.v4.ST;
import org.stringtemplate.v4.STGroup;
import org.stringtemplate.v4.STGroupString;
import java.io.File;
import java.io.FileFilter;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertFalse;
import static junit.framework.TestCase.assertNotNull;
import static junit.framework.TestCase.assertTrue;
import static junit.framework.TestCase.*;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
import static org.junit.Assert.assertArrayEquals;
public class BaseGoTest implements RuntimeTestSupport {
public File overall_tmpdir = null;
public File tmpdir = null; // this is where the parser package is stored, typically inside the tmpdir
public class BaseGoTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
private static File tmpGopath = null;
private static final String GO_RUNTIME_IMPORT_PATH = "github.com/antlr/antlr4/runtime/Go/antlr"; // TODO: Change this before merging with upstream
/**
* If error during parser execution, store stderr here; can't return stdout
* and stderr. This doesn't trap errors from running antlr.
*/
protected String stderrDuringParse;
private File parserTempDir; // "parser" with tempDir
/** Errors found while running antlr */
protected StringBuilder antlrToolErrors;
@Override
protected String getPropertyPrefix() {
return "antlr4-go";
}
/**
* Copies all files from go runtime to a temporary folder that is inside a valid GOPATH project structure.
*/
public static void groupSetUp() throws Exception {
tmpGopath = new File(System.getProperty("java.io.tmpdir"), "antlr-goruntime-tmpgopath-"
+ Long.toHexString(System.currentTimeMillis()));
tmpGopath = new File(System.getProperty("java.io.tmpdir"), "antlr-goruntime-tmpgopath-" + Long.toHexString(System.currentTimeMillis()));
ArrayList<String> pathsegments = new ArrayList<String>();
pathsegments.add("src");
@ -110,39 +54,12 @@ public class BaseGoTest implements RuntimeTestSupport {
}
for (File runtimeFile : runtimeFiles) {
File dest = new File(tmpPackageDir, runtimeFile.getName());
copyFile(runtimeFile, dest);
RuntimeTestUtils.copyFile(runtimeFile, dest);
}
cacheGoRuntime(tmpPackageDir);
}
@Override
public void testTearDown() throws Exception {
}
@Override
public String getTmpDir() {
return tmpdir.getPath();
}
@Override
public String getStdout() {
return null;
}
@Override
public String getParseErrors() {
return stderrDuringParse;
}
@Override
public String getANTLRToolErrors() {
if ( antlrToolErrors.length()==0 ) {
return null;
}
return antlrToolErrors.toString();
}
public static void groupTearDown() throws Exception {
eraseDirectory(tmpGopath);
}
@ -163,139 +80,22 @@ public class BaseGoTest implements RuntimeTestSupport {
}
}
private static void copyFile(File source, File dest) throws IOException {
InputStream is = new FileInputStream(source);
OutputStream os = new FileOutputStream(dest);
byte[] buf = new byte[4 << 10];
int l;
while ((l = is.read(buf)) > -1) {
os.write(buf, 0, l);
}
is.close();
os.close();
}
public void testSetUp() throws Exception {
// new output dir for each test
String prop = System.getProperty("antlr-go-test-dir");
if (prop != null && prop.length() > 0) {
overall_tmpdir = new File(prop);
}
else {
String threadName = Thread.currentThread().getName();
overall_tmpdir = new File(System.getProperty("java.io.tmpdir"),
getClass().getSimpleName()+"-"+threadName+"-"+System.currentTimeMillis());
eraseParserTempDir();
super.testSetUp();
parserTempDir = new File(getTempTestDir(), "parser");
}
if ( overall_tmpdir.exists())
this.eraseDirectory(overall_tmpdir);
tmpdir = new File(overall_tmpdir, "parser");
if ( tmpdir.exists()) {
this.eraseDirectory(tmpdir);
}
antlrToolErrors = new StringBuilder();
@Override
public File getTempParserDir() {
return parserTempDir;
}
protected org.antlr.v4.Tool newTool(String[] args) {
return new Tool(args);
private void eraseParserTempDir() {
if(parserTempDir != null) {
eraseDirectory(parserTempDir);
parserTempDir = null;
}
protected Tool newTool() {
return new Tool(new String[]{"-o", tmpdir.getPath()});
}
protected ATN createATN(Grammar g, boolean useSerializer) {
if (g.atn == null) {
semanticProcess(g);
assertEquals(0, g.tool.getNumErrors());
ParserATNFactory f;
if (g.isLexer()) {
f = new LexerATNFactory((LexerGrammar) g);
}
else {
f = new ParserATNFactory(g);
}
g.atn = f.createATN();
assertEquals(0, g.tool.getNumErrors());
}
ATN atn = g.atn;
if (useSerializer) {
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
return new ATNDeserializer().deserialize(serialized);
}
return atn;
}
protected void semanticProcess(Grammar g) {
if (g.ast != null && !g.ast.hasErrors) {
System.out.println(g.ast.toStringTree());
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if (g.getImportedGrammars() != null) { // process imported grammars
// (if any)
for (Grammar imp : g.getImportedGrammars()) {
antlr.processNonCombinedGrammar(imp, false);
}
}
}
}
IntegerList getTypesFromString(Grammar g, String expecting) {
IntegerList expectingTokenTypes = new IntegerList();
if (expecting != null && !expecting.trim().isEmpty()) {
for (String tname : expecting.replace(" ", "").split(",")) {
int ttype = g.getTokenType(tname);
expectingTokenTypes.add(ttype);
}
}
return expectingTokenTypes;
}
public IntegerList getTokenTypesViaATN(String input,
LexerATNSimulator lexerATN) {
ANTLRInputStream in = new ANTLRInputStream(input);
IntegerList tokenTypes = new IntegerList();
int ttype;
do {
ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
tokenTypes.add(ttype);
} while (ttype != Token.EOF);
return tokenTypes;
}
public List<String> getTokenTypes(LexerGrammar lg, ATN atn, CharStream input) {
LexerATNSimulator interp = new LexerATNSimulator(atn,
new DFA[] { new DFA(
atn.modeToStartState.get(Lexer.DEFAULT_MODE)) }, null);
List<String> tokenTypes = new ArrayList<String>();
int ttype;
boolean hitEOF = false;
do {
if (hitEOF) {
tokenTypes.add("EOF");
break;
}
int t = input.LA(1);
ttype = interp.match(input, Lexer.DEFAULT_MODE);
if (ttype == Token.EOF) {
tokenTypes.add("EOF");
}
else {
tokenTypes.add(lg.typeToTokenList.get(ttype));
}
if (t == IntStream.EOF) {
hitEOF = true;
}
} while (ttype != Token.EOF);
return tokenTypes;
}
protected String execLexer(String grammarFileName, String grammarStr,
@ -309,25 +109,10 @@ public class BaseGoTest implements RuntimeTestSupport {
boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
grammarStr, null, lexerName, "-no-listener");
assertTrue(success);
writeFile(overall_tmpdir.toString(), "input", input);
writeFile(getTempDirPath(), "input", input);
writeLexerTestFile(lexerName, showDFA);
String output = execModule("Test.go");
return output;
return execModule("Test.go");
}
//
// public String execParser(String grammarFileName, String grammarStr,
// String parserName, String lexerName, String listenerName,
// String visitorName, String startRuleName, String input,
// boolean debug)
// {
// boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
// grammarStr, parserName, lexerName, "-visitor");
// assertTrue(success);
// writeFile(overall_tmpdir, "input", input);
// rawBuildRecognizerTestFile(parserName, lexerName, listenerName,
// visitorName, startRuleName, debug);
// return execRecognizer();
// }
@Override
public String execParser(String grammarFileName, String grammarStr,
@ -338,7 +123,7 @@ public class BaseGoTest implements RuntimeTestSupport {
boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
grammarStr, parserName, lexerName, "-visitor");
assertTrue(success);
writeFile(overall_tmpdir.toString(), "input", input);
writeFile(getTempDirPath(), "input", input);
rawBuildRecognizerTestFile(parserName, lexerName, listenerName,
visitorName, startRuleName, showDiagnosticErrors);
return execRecognizer();
@ -356,7 +141,7 @@ public class BaseGoTest implements RuntimeTestSupport {
protected boolean rawGenerateAndBuildRecognizer(String grammarFileName,
String grammarStr, String parserName, String lexerName,
boolean defaultListener, String... extraOptions) {
ErrorQueue equeue = antlrOnString(getTmpDir(), "Go", grammarFileName, grammarStr,
ErrorQueue equeue = antlrOnString(getTempParserDirPath(), "Go", grammarFileName, grammarStr,
defaultListener, extraOptions);
if (!equeue.errors.isEmpty()) {
return false;
@ -367,7 +152,7 @@ public class BaseGoTest implements RuntimeTestSupport {
protected void rawBuildRecognizerTestFile(String parserName,
String lexerName, String listenerName, String visitorName,
String parserStartRuleName, boolean debug) {
this.stderrDuringParse = null;
setParseErrors(null);
if (parserName == null) {
writeLexerTestFile(lexerName, false);
}
@ -383,12 +168,12 @@ public class BaseGoTest implements RuntimeTestSupport {
public String execModule(String fileName) {
String goExecutable = locateGo();
String modulePath = new File(overall_tmpdir, fileName).getAbsolutePath();
String inputPath = new File(overall_tmpdir, "input").getAbsolutePath();
String modulePath = new File(getTempTestDir(), fileName).getAbsolutePath();
String inputPath = new File(getTempTestDir(), "input").getAbsolutePath();
try {
ProcessBuilder builder = new ProcessBuilder(goExecutable, "run", modulePath, inputPath);
builder.environment().put("GOPATH", tmpGopath.getPath());
builder.directory(overall_tmpdir);
builder.directory(getTempTestDir());
Process process = builder.start();
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
@ -402,7 +187,7 @@ public class BaseGoTest implements RuntimeTestSupport {
output = null;
}
if (stderrVacuum.toString().length() > 0) {
this.stderrDuringParse = stderrVacuum.toString();
setParseErrors(stderrVacuum.toString());
}
return output;
}
@ -469,203 +254,6 @@ public class BaseGoTest implements RuntimeTestSupport {
return runtimeDir;
}
// void ambig(List<Message> msgs, int[] expectedAmbigAlts, String
// expectedAmbigInput)
// throws Exception
// {
// ambig(msgs, 0, expectedAmbigAlts, expectedAmbigInput);
// }
// void ambig(List<Message> msgs, int i, int[] expectedAmbigAlts, String
// expectedAmbigInput)
// throws Exception
// {
// List<Message> amsgs = getMessagesOfType(msgs, AmbiguityMessage.class);
// AmbiguityMessage a = (AmbiguityMessage)amsgs.get(i);
// if ( a==null ) assertNull(expectedAmbigAlts);
// else {
// assertEquals(a.conflictingAlts.toString(),
// Arrays.toString(expectedAmbigAlts));
// }
// assertEquals(expectedAmbigInput, a.input);
// }
// void unreachable(List<Message> msgs, int[] expectedUnreachableAlts)
// throws Exception
// {
// unreachable(msgs, 0, expectedUnreachableAlts);
// }
// void unreachable(List<Message> msgs, int i, int[]
// expectedUnreachableAlts)
// throws Exception
// {
// List<Message> amsgs = getMessagesOfType(msgs,
// UnreachableAltsMessage.class);
// UnreachableAltsMessage u = (UnreachableAltsMessage)amsgs.get(i);
// if ( u==null ) assertNull(expectedUnreachableAlts);
// else {
// assertEquals(u.conflictingAlts.toString(),
// Arrays.toString(expectedUnreachableAlts));
// }
// }
List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs,
Class<? extends ANTLRMessage> c) {
List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
for (ANTLRMessage m : msgs) {
if (m.getClass() == c)
filtered.add(m);
}
return filtered;
}
void checkRuleATN(Grammar g, String ruleName, String expecting) {
ParserATNFactory f = new ParserATNFactory(g);
ATN atn = f.createATN();
DOTGenerator dot = new DOTGenerator(g);
System.out
.println(dot.getDOT(atn.ruleToStartState[g.getRule(ruleName).index]));
Rule r = g.getRule(ruleName);
ATNState startState = atn.ruleToStartState[r.index];
ATNPrinter serializer = new ATNPrinter(g, startState);
String result = serializer.asString();
// System.out.print(result);
assertEquals(expecting, result);
}
public void testActions(String templates, String actionName, String action,
String expected) throws org.antlr.runtime.RecognitionException {
int lp = templates.indexOf('(');
String name = templates.substring(0, lp);
STGroup group = new STGroupString(templates);
ST st = group.getInstanceOf(name);
st.add(actionName, action);
String grammar = st.render();
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(grammar, equeue);
if (g.ast != null && !g.ast.hasErrors) {
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
ATNFactory factory = new ParserATNFactory(g);
if (g.isLexer())
factory = new LexerATNFactory((LexerGrammar) g);
g.atn = factory.createATN();
CodeGenerator gen = new CodeGenerator(g);
ST outputFileST = gen.generateParser();
String output = outputFileST.render();
// System.out.println(output);
String b = "#" + actionName + "#";
int start = output.indexOf(b);
String e = "#end-" + actionName + "#";
int end = output.indexOf(e);
String snippet = output.substring(start + b.length(), end);
assertEquals(expected, snippet);
}
if (equeue.size() > 0) {
System.err.println(equeue.toString());
}
}
protected void checkGrammarSemanticsError(ErrorQueue equeue,
GrammarSemanticsMessage expectedMessage) throws Exception {
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.errors.size(); i++) {
ANTLRMessage m = equeue.errors.get(i);
if (m.getErrorType() == expectedMessage.getErrorType()) {
foundMsg = m;
}
}
assertNotNull("no error; " + expectedMessage.getErrorType()
+ " expected", foundMsg);
assertTrue("error is not a GrammarSemanticsMessage",
foundMsg instanceof GrammarSemanticsMessage);
assertEquals(Arrays.toString(expectedMessage.getArgs()),
Arrays.toString(foundMsg.getArgs()));
if (equeue.size() != 1) {
System.err.println(equeue);
}
}
protected void checkGrammarSemanticsWarning(ErrorQueue equeue,
GrammarSemanticsMessage expectedMessage) throws Exception {
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.warnings.size(); i++) {
ANTLRMessage m = equeue.warnings.get(i);
if (m.getErrorType() == expectedMessage.getErrorType()) {
foundMsg = m;
}
}
assertNotNull("no error; " + expectedMessage.getErrorType()
+ " expected", foundMsg);
assertTrue("error is not a GrammarSemanticsMessage",
foundMsg instanceof GrammarSemanticsMessage);
assertEquals(Arrays.toString(expectedMessage.getArgs()),
Arrays.toString(foundMsg.getArgs()));
if (equeue.size() != 1) {
System.err.println(equeue);
}
}
protected void checkError(ErrorQueue equeue, ANTLRMessage expectedMessage)
throws Exception {
// System.out.println("errors="+equeue);
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.errors.size(); i++) {
ANTLRMessage m = equeue.errors.get(i);
if (m.getErrorType() == expectedMessage.getErrorType()) {
foundMsg = m;
}
}
assertTrue("no error; " + expectedMessage.getErrorType() + " expected",
!equeue.errors.isEmpty());
assertTrue("too many errors; " + equeue.errors,
equeue.errors.size() <= 1);
assertNotNull(
"couldn't find expected error: "
+ expectedMessage.getErrorType(), foundMsg);
/*
* assertTrue("error is not a GrammarSemanticsMessage", foundMsg
* instanceof GrammarSemanticsMessage);
*/
assertArrayEquals(expectedMessage.getArgs(), foundMsg.getArgs());
}
public static class FilteringTokenStream extends CommonTokenStream {
public FilteringTokenStream(TokenSource src) {
super(src);
}
Set<Integer> hide = new HashSet<Integer>();
@Override
protected boolean sync(int i) {
if (!super.sync(i)) {
return false;
}
Token t = get(i);
if (hide.contains(t.getType())) {
((WritableToken) t).setChannel(Token.HIDDEN_CHANNEL);
}
return true;
}
public void setTokenTypeChannel(int ttype, int channel) {
hide.add(ttype);
}
}
protected void mkdir(File dir) {
dir.mkdirs();
}
protected void writeParserTestFile(String parserName, String lexerName,
String listenerName, String visitorName,
String parserStartRuleName, boolean debug) {
@ -723,7 +311,7 @@ public class BaseGoTest implements RuntimeTestSupport {
outputFileST.add("listenerName", listenerName);
outputFileST.add("visitorName", visitorName);
outputFileST.add("parserStartRuleName", parserStartRuleName.substring(0, 1).toUpperCase() + parserStartRuleName.substring(1) );
writeFile(overall_tmpdir.toString(), "Test.go", outputFileST.render());
writeFile(getTempDirPath(), "Test.go", outputFileST.render());
}
@ -755,222 +343,7 @@ public class BaseGoTest implements RuntimeTestSupport {
+ "}\n"
+ "\n");
outputFileST.add("lexerName", lexerName);
writeFile(overall_tmpdir.toString(), "Test.go", outputFileST.render());
writeFile(getTempDirPath(), "Test.go", outputFileST.render());
}
public void writeRecognizer(String parserName, String lexerName,
String listenerName, String visitorName,
String parserStartRuleName, boolean debug) {
if (parserName == null) {
writeLexerTestFile(lexerName, debug);
}
else {
writeParserTestFile(parserName, lexerName, listenerName,
visitorName, parserStartRuleName, debug);
}
}
protected void eraseFilesEndingWith(final String filesEndingWith) {
File[] files = overall_tmpdir.listFiles(new FileFilter() {
@Override
public boolean accept(File pathname) {
return pathname.getName().endsWith(filesEndingWith);
}
});
for (File file : files) {
file.delete();
}
}
protected static void eraseDirectory(File dir) {
File[] files = dir.listFiles();
if (files != null) {
for (File file : files) {
if (file.isDirectory()) {
eraseDirectory(file);
}
else {
file.delete();
}
}
}
dir.delete();
}
public void eraseTempDir() {
boolean doErase = true;
String propName = "antlr-go-erase-test-dir";
String prop = System.getProperty(propName);
if (prop != null && prop.length() > 0)
doErase = Boolean.getBoolean(prop);
if (doErase) {
if ( overall_tmpdir.exists()) {
eraseDirectory(overall_tmpdir);
}
}
}
public String getFirstLineOfException() {
if (this.stderrDuringParse == null) {
return null;
}
String[] lines = this.stderrDuringParse.split("\n");
String prefix = "Exception in thread \"main\" ";
return lines[0].substring(prefix.length(), lines[0].length());
}
/**
* When looking at a result set that consists of a Map/HashTable we cannot
* rely on the output order, as the hashing algorithm or other aspects of
* the implementation may be different on differnt JDKs or platforms. Hence
* we take the Map, convert the keys to a List, sort them and Stringify the
* Map, which is a bit of a hack, but guarantees that we get the same order
* on all systems. We assume that the keys are strings.
*
* @param m
* The Map that contains keys we wish to return in sorted order
* @return A string that represents all the keys in sorted order.
*/
public <K, V> String sortMapToString(Map<K, V> m) {
// Pass in crap, and get nothing back
//
if (m == null) {
return null;
}
System.out.println("Map toString looks like: " + m.toString());
// Sort the keys in the Map
//
TreeMap<K, V> nset = new TreeMap<K, V>(m);
System.out.println("Tree map looks like: " + nset.toString());
return nset.toString();
}
public List<String> realElements(List<String> elements) {
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
}
public void assertNotNullOrEmpty(String message, String text) {
assertNotNull(message, text);
assertFalse(message, text.isEmpty());
}
public void assertNotNullOrEmpty(String text) {
assertNotNull(text);
assertFalse(text.isEmpty());
}
public static class IntTokenStream implements TokenStream {
IntegerList types;
int p = 0;
public IntTokenStream(IntegerList types) {
this.types = types;
}
@Override
public void consume() {
p++;
}
@Override
public int LA(int i) {
return LT(i).getType();
}
@Override
public int mark() {
return index();
}
@Override
public int index() {
return p;
}
@Override
public void release(int marker) {
seek(marker);
}
@Override
public void seek(int index) {
p = index;
}
@Override
public int size() {
return types.size();
}
@Override
public String getSourceName() {
return null;
}
@Override
public Token LT(int i) {
CommonToken t;
int rawIndex = p + i - 1;
if (rawIndex >= types.size())
t = new CommonToken(Token.EOF);
else
t = new CommonToken(types.get(rawIndex));
t.setTokenIndex(rawIndex);
return t;
}
@Override
public Token get(int i) {
return new org.antlr.v4.runtime.CommonToken(types.get(i));
}
@Override
public TokenSource getTokenSource() {
return null;
}
@Override
public String getText() {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(Interval interval) {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(RuleContext ctx) {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(Token start, Token stop) {
throw new UnsupportedOperationException("can't give strings");
}
}
/** Sort a list */
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
List<T> dup = new ArrayList<T>();
dup.addAll(data);
Collections.sort(dup);
return dup;
}
/** Return map sorted by key */
public <K extends Comparable<? super K>, V> LinkedHashMap<K, V> sort(
Map<K, V> data) {
LinkedHashMap<K, V> dup = new LinkedHashMap<K, V>();
List<K> keys = new ArrayList<K>();
keys.addAll(data.keySet());
Collections.sort(keys);
for (K k : keys) {
dup.put(k, data.get(k));
}
return dup;
}
}

View File

@ -5,7 +5,6 @@
*/
package org.antlr.v4.test.runtime.java;
import org.antlr.v4.Tool;
import org.antlr.v4.analysis.AnalysisPipeline;
import org.antlr.v4.automata.ATNFactory;
import org.antlr.v4.automata.ATNPrinter;
@ -14,32 +13,16 @@ import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.codegen.CodeGenerator;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.IntStream;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.RuleContext;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenSource;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.WritableToken;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.ATNSerializer;
import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.atn.DecisionState;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntegerList;
import org.antlr.v4.runtime.misc.Interval;
import org.antlr.v4.runtime.misc.Pair;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.ErrorQueue;
import org.antlr.v4.test.runtime.RuntimeTestSupport;
import org.antlr.v4.test.runtime.StreamVacuum;
import org.antlr.v4.test.runtime.*;
import org.antlr.v4.tool.ANTLRMessage;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.GrammarSemanticsMessage;
@ -68,13 +51,9 @@ import java.net.URL;
import java.net.URLClassLoader;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import static junit.framework.TestCase.assertEquals;
import static junit.framework.TestCase.assertFalse;
@ -83,9 +62,7 @@ import static junit.framework.TestCase.assertTrue;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
import static org.junit.Assert.assertArrayEquals;
public class BaseJavaTest implements RuntimeTestSupport {
public static final String newline = System.getProperty("line.separator");
public static final String pathSep = System.getProperty("path.separator");
public class BaseJavaTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
/**
* When the {@code antlr.testinprocess} runtime property is set to
@ -103,289 +80,14 @@ public class BaseJavaTest implements RuntimeTestSupport {
*/
public static final boolean TEST_IN_SAME_PROCESS = Boolean.parseBoolean(System.getProperty("antlr.testinprocess"));
/**
* When the {@code antlr.preserve-test-dir} runtime property is set to
* {@code true}, the temporary directories created by the test run will not
* be removed at the end of the test run, even for tests that completed
* successfully.
* <p>
* <p>
* The default behavior (used in all other cases) is removing the temporary
* directories for all tests which completed successfully, and preserving
* the directories for tests which failed.</p>
*/
public static final boolean PRESERVE_TEST_DIR = true; //Boolean.parseBoolean(System.getProperty("antlr.preserve-test-dir"));
/**
* The base test directory is the directory where generated files get placed
* during unit test execution.
* <p>
* <p>
* The default value for this property is the {@code java.io.tmpdir} system
* property, and can be overridden by setting the
* {@code antlr.java-test-dir} property to a custom location. Note that the
* {@code antlr.java-test-dir} property directly affects the
* {@link #CREATE_PER_TEST_DIRECTORIES} value as well.</p>
*/
public static final String BASE_TEST_DIR;
/**
* When {@code true}, a temporary directory will be created for each test
* executed during the test run.
* <p>
* <p>
* This value is {@code true} when the {@code antlr.java-test-dir} system
* property is set, and otherwise {@code false}.</p>
*/
public static final boolean CREATE_PER_TEST_DIRECTORIES;
static {
String baseTestDir = System.getProperty("antlr.java-test-dir");
boolean perTestDirectories = false;
if ( baseTestDir==null || baseTestDir.isEmpty() ) {
baseTestDir = System.getProperty("java.io.tmpdir");
perTestDirectories = true;
}
if ( !new File(baseTestDir).isDirectory() ) {
throw new UnsupportedOperationException("The specified base test directory does not exist: "+baseTestDir);
}
BASE_TEST_DIR = baseTestDir;
CREATE_PER_TEST_DIRECTORIES = perTestDirectories;
}
/**
* Build up the full classpath we need, including the surefire path (if present)
*/
public static final String CLASSPATH = System.getProperty("java.class.path");
public String tmpdir = null;
/**
* If error during parser execution, store stderr here; can't return
* stdout and stderr. This doesn't trap errors from running antlr.
*/
protected String stderrDuringParse;
/**
* Errors found while running antlr
*/
protected StringBuilder antlrToolErrors;
@Override
public void testSetUp() throws Exception {
// STGroup.verbose = true;
// System.err.println("testSetUp "+Thread.currentThread().getName());
if ( CREATE_PER_TEST_DIRECTORIES ) {
// new output dir for each test
String threadName = Thread.currentThread().getName();
String testDirectory = getClass().getSimpleName()+"-"+threadName+"-"+System.nanoTime();
tmpdir = new File(BASE_TEST_DIR, testDirectory).getAbsolutePath();
}
else {
tmpdir = new File(BASE_TEST_DIR).getAbsolutePath();
if ( !PRESERVE_TEST_DIR && new File(tmpdir).exists() ) {
eraseFiles();
}
}
antlrToolErrors = new StringBuilder();
}
@Override
public void testTearDown() throws Exception {
}
@Override
public String getTmpDir() {
return tmpdir;
}
@Override
public String getStdout() {
return null;
}
@Override
public String getParseErrors() {
return stderrDuringParse;
}
@Override
public String getANTLRToolErrors() {
if ( antlrToolErrors.length()==0 ) {
return null;
}
return antlrToolErrors.toString();
}
protected org.antlr.v4.Tool newTool(String[] args) {
Tool tool = new Tool(args);
return tool;
}
protected ATN createATN(Grammar g, boolean useSerializer) {
if ( g.atn==null ) {
semanticProcess(g);
assertEquals(0, g.tool.getNumErrors());
ParserATNFactory f;
if ( g.isLexer() ) {
f = new LexerATNFactory((LexerGrammar) g);
}
else {
f = new ParserATNFactory(g);
}
g.atn = f.createATN();
assertEquals(0, g.tool.getNumErrors());
}
ATN atn = g.atn;
if ( useSerializer ) {
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
return new ATNDeserializer().deserialize(serialized);
}
return atn;
}
protected void semanticProcess(Grammar g) {
if ( g.ast!=null && !g.ast.hasErrors ) {
// System.out.println(g.ast.toStringTree());
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
for (Grammar imp : g.getImportedGrammars()) {
antlr.processNonCombinedGrammar(imp, false);
}
}
}
}
public DFA createDFA(Grammar g, DecisionState s) {
// PredictionDFAFactory conv = new PredictionDFAFactory(g, s);
// DFA dfa = conv.createDFA();
// conv.issueAmbiguityWarnings();
// System.out.print("DFA="+dfa);
// return dfa;
return null;
}
// public void minimizeDFA(DFA dfa) {
// DFAMinimizer dmin = new DFAMinimizer(dfa);
// dfa.minimized = dmin.minimize();
// }
IntegerList getTypesFromString(Grammar g, String expecting) {
IntegerList expectingTokenTypes = new IntegerList();
if ( expecting!=null && !expecting.trim().isEmpty() ) {
for (String tname : expecting.replace(" ", "").split(",")) {
int ttype = g.getTokenType(tname);
expectingTokenTypes.add(ttype);
}
}
return expectingTokenTypes;
}
public IntegerList getTokenTypesViaATN(String input, LexerATNSimulator lexerATN) {
ANTLRInputStream in = new ANTLRInputStream(input);
IntegerList tokenTypes = new IntegerList();
int ttype;
do {
ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
tokenTypes.add(ttype);
} while ( ttype!=Token.EOF );
return tokenTypes;
}
public List<String> getTokenTypes(LexerGrammar lg,
ATN atn,
CharStream input) {
LexerATNSimulator interp = new LexerATNSimulator(atn, new DFA[]{new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE))}, null);
List<String> tokenTypes = new ArrayList<String>();
int ttype;
boolean hitEOF = false;
do {
if ( hitEOF ) {
tokenTypes.add("EOF");
break;
}
int t = input.LA(1);
ttype = interp.match(input, Lexer.DEFAULT_MODE);
if ( ttype==Token.EOF ) {
tokenTypes.add("EOF");
}
else {
tokenTypes.add(lg.typeToTokenList.get(ttype));
}
if ( t==IntStream.EOF ) {
hitEOF = true;
}
} while ( ttype!=Token.EOF );
return tokenTypes;
}
List<ANTLRMessage> checkRuleDFA(String gtext, String ruleName, String expecting)
throws Exception {
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(gtext, equeue);
ATN atn = createATN(g, false);
ATNState s = atn.ruleToStartState[g.getRule(ruleName).index];
if ( s==null ) {
System.err.println("no such rule: "+ruleName);
return null;
}
ATNState t = s.transition(0).target;
if ( !(t instanceof DecisionState) ) {
System.out.println(ruleName+" has no decision");
return null;
}
DecisionState blk = (DecisionState) t;
checkRuleDFA(g, blk, expecting);
return equeue.all;
}
List<ANTLRMessage> checkRuleDFA(String gtext, int decision, String expecting)
throws Exception {
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(gtext, equeue);
ATN atn = createATN(g, false);
DecisionState blk = atn.decisionToState.get(decision);
checkRuleDFA(g, blk, expecting);
return equeue.all;
}
void checkRuleDFA(Grammar g, DecisionState blk, String expecting)
throws Exception {
DFA dfa = createDFA(g, blk);
String result = null;
if ( dfa!=null ) result = dfa.toString();
assertEquals(expecting, result);
}
List<ANTLRMessage> checkLexerDFA(String gtext, String expecting)
throws Exception {
return checkLexerDFA(gtext, LexerGrammar.DEFAULT_MODE_NAME, expecting);
}
List<ANTLRMessage> checkLexerDFA(String gtext, String modeName, String expecting)
throws Exception {
ErrorQueue equeue = new ErrorQueue();
LexerGrammar g = new LexerGrammar(gtext, equeue);
g.atn = createATN(g, false);
// LexerATNToDFAConverter conv = new LexerATNToDFAConverter(g);
// DFA dfa = conv.createDFA(modeName);
// g.setLookaheadDFA(0, dfa); // only one decision to worry about
//
// String result = null;
// if ( dfa!=null ) result = dfa.toString();
// assertEquals(expecting, result);
//
// return equeue.all;
return null;
protected String getPropertyPrefix() {
return "antrl4-java";
}
protected String load(String fileName, String encoding)
@ -420,7 +122,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
protected boolean compile(String... fileNames) {
List<File> files = new ArrayList<File>();
for (String fileName : fileNames) {
File f = new File(tmpdir, fileName);
File f = new File(getTempTestDir(), fileName);
files.add(f);
}
@ -435,7 +137,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
fileManager.getJavaFileObjectsFromFiles(files);
Iterable<String> compileOptions =
Arrays.asList("-g", "-source", "1.6", "-target", "1.6", "-implicit:class", "-Xlint:-options", "-d", tmpdir, "-cp", tmpdir+pathSep+CLASSPATH);
Arrays.asList("-g", "-source", "1.6", "-target", "1.6", "-implicit:class", "-Xlint:-options", "-d", getTempDirPath(), "-cp", getTempDirPath() + PATH_SEP + CLASSPATH);
JavaCompiler.CompilationTask task =
compiler.getTask(null, fileManager, null, compileOptions, null,
@ -469,11 +171,10 @@ public class BaseJavaTest implements RuntimeTestSupport {
null,
lexerName);
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
writeLexerTestFile(lexerName, showDFA);
compile("Test.java");
String output = execClass("Test");
return output;
return execClass("Test");
}
public ParseTree execParser(String startRuleName, String input,
@ -523,7 +224,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
public Class<?> loadClassFromTempDir(String name) throws Exception {
ClassLoader loader =
new URLClassLoader(new URL[]{new File(tmpdir).toURI().toURL()},
new URLClassLoader(new URL[]{getTempTestDir().toURI().toURL()},
ClassLoader.getSystemClassLoader());
return loader.loadClass(name);
}
@ -571,7 +272,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
lexerName,
"-visitor");
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
return rawExecRecognizer(parserName,
lexerName,
startRuleName,
@ -598,7 +299,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
String... extraOptions)
{
ErrorQueue equeue =
BaseRuntimeTest.antlrOnString(getTmpDir(), "Java", grammarFileName, grammarStr, defaultListener, extraOptions);
BaseRuntimeTest.antlrOnString(getTempDirPath(), "Java", grammarFileName, grammarStr, defaultListener, extraOptions);
if (!equeue.errors.isEmpty()) {
return false;
}
@ -620,8 +321,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
files.add(grammarName+"BaseVisitor.java");
}
}
boolean allIsWell = compile(files.toArray(new String[files.size()]));
return allIsWell;
return compile(files.toArray(new String[0]));
}
protected String rawExecRecognizer(String parserName,
@ -630,7 +330,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
boolean debug,
boolean profile)
{
this.stderrDuringParse = null;
setParseErrors(null);
if ( parserName==null ) {
writeLexerTestFile(lexerName, false);
}
@ -653,7 +353,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
public String execClass(String className) {
if (TEST_IN_SAME_PROCESS) {
try {
ClassLoader loader = new URLClassLoader(new URL[] { new File(tmpdir).toURI().toURL() }, ClassLoader.getSystemClassLoader());
ClassLoader loader = new URLClassLoader(new URL[] { getTempTestDir().toURI().toURL() }, ClassLoader.getSystemClassLoader());
final Class<?> mainClass = (Class<?>)loader.loadClass(className);
final Method mainMethod = mainClass.getDeclaredMethod("main", String[].class);
PipedInputStream stdoutIn = new PipedInputStream();
@ -671,7 +371,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
System.setErr(new PrintStream(stderrOut));
stdoutVacuum.start();
stderrVacuum.start();
mainMethod.invoke(null, (Object)new String[] { new File(tmpdir, "input").getAbsolutePath() });
mainMethod.invoke(null, (Object)new String[] { new File(getTempTestDir(), "input").getAbsolutePath() });
}
finally {
System.setErr(originalErr);
@ -690,7 +390,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
output = null;
}
if ( stderrVacuum.toString().length()>0 ) {
this.stderrDuringParse = stderrVacuum.toString();
setParseErrors(stderrVacuum.toString());
}
return output;
}
@ -701,14 +401,14 @@ public class BaseJavaTest implements RuntimeTestSupport {
try {
String[] args = new String[] {
"java", "-classpath", tmpdir+pathSep+CLASSPATH,
"java", "-classpath", getTempDirPath() + PATH_SEP + CLASSPATH,
"-Dfile.encoding=UTF-8",
className, new File(tmpdir, "input").getAbsolutePath()
className, new File(getTempTestDir(), "input").getAbsolutePath()
};
// String cmdLine = Utils.join(args, " ");
// System.err.println("execParser: "+cmdLine);
Process process =
Runtime.getRuntime().exec(args, null, new File(tmpdir));
Runtime.getRuntime().exec(args, null, getTempTestDir());
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
stdoutVacuum.start();
@ -721,7 +421,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
output = null;
}
if ( stderrVacuum.toString().length()>0 ) {
this.stderrDuringParse = stderrVacuum.toString();
setParseErrors(stderrVacuum.toString());
}
return output;
}
@ -732,49 +432,6 @@ public class BaseJavaTest implements RuntimeTestSupport {
return null;
}
// void ambig(List<Message> msgs, int[] expectedAmbigAlts, String expectedAmbigInput)
// throws Exception
// {
// ambig(msgs, 0, expectedAmbigAlts, expectedAmbigInput);
// }
// void ambig(List<Message> msgs, int i, int[] expectedAmbigAlts, String expectedAmbigInput)
// throws Exception
// {
// List<Message> amsgs = getMessagesOfType(msgs, AmbiguityMessage.class);
// AmbiguityMessage a = (AmbiguityMessage)amsgs.get(i);
// if ( a==null ) assertNull(expectedAmbigAlts);
// else {
// assertEquals(a.conflictingAlts.toString(), Arrays.toString(expectedAmbigAlts));
// }
// assertEquals(expectedAmbigInput, a.input);
// }
// void unreachable(List<Message> msgs, int[] expectedUnreachableAlts)
// throws Exception
// {
// unreachable(msgs, 0, expectedUnreachableAlts);
// }
// void unreachable(List<Message> msgs, int i, int[] expectedUnreachableAlts)
// throws Exception
// {
// List<Message> amsgs = getMessagesOfType(msgs, UnreachableAltsMessage.class);
// UnreachableAltsMessage u = (UnreachableAltsMessage)amsgs.get(i);
// if ( u==null ) assertNull(expectedUnreachableAlts);
// else {
// assertEquals(u.conflictingAlts.toString(), Arrays.toString(expectedUnreachableAlts));
// }
// }
List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs, Class<? extends ANTLRMessage> c) {
List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
for (ANTLRMessage m : msgs) {
if ( m.getClass() == c ) filtered.add(m);
}
return filtered;
}
public void checkRuleATN(Grammar g, String ruleName, String expecting) {
// DOTGenerator dot = new DOTGenerator(g);
// System.out.println(dot.getDOT(g.atn.ruleToStartState[g.getRule(ruleName).index]));
@ -824,25 +481,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
}
}
protected void checkGrammarSemanticsError(ErrorQueue equeue,
GrammarSemanticsMessage expectedMessage)
throws Exception
{
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.errors.size(); i++) {
ANTLRMessage m = equeue.errors.get(i);
if (m.getErrorType()==expectedMessage.getErrorType() ) {
foundMsg = m;
}
}
assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg);
assertTrue("error is not a GrammarSemanticsMessage",
foundMsg instanceof GrammarSemanticsMessage);
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
if ( equeue.size()!=1 ) {
System.err.println(equeue);
}
}
protected void checkGrammarSemanticsWarning(ErrorQueue equeue,
GrammarSemanticsMessage expectedMessage)
@ -864,49 +503,6 @@ public class BaseJavaTest implements RuntimeTestSupport {
}
}
protected void checkError(ErrorQueue equeue,
ANTLRMessage expectedMessage)
throws Exception
{
//System.out.println("errors="+equeue);
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.errors.size(); i++) {
ANTLRMessage m = equeue.errors.get(i);
if (m.getErrorType()==expectedMessage.getErrorType() ) {
foundMsg = m;
}
}
assertTrue("no error; "+expectedMessage.getErrorType()+" expected", !equeue.errors.isEmpty());
assertTrue("too many errors; "+equeue.errors, equeue.errors.size()<=1);
assertNotNull("couldn't find expected error: "+expectedMessage.getErrorType(), foundMsg);
/*
assertTrue("error is not a GrammarSemanticsMessage",
foundMsg instanceof GrammarSemanticsMessage);
*/
assertArrayEquals(expectedMessage.getArgs(), foundMsg.getArgs());
}
public static class FilteringTokenStream extends CommonTokenStream {
public FilteringTokenStream(TokenSource src) { super(src); }
Set<Integer> hide = new HashSet<Integer>();
@Override
protected boolean sync(int i) {
if (!super.sync(i)) {
return false;
}
Token t = get(i);
if ( hide.contains(t.getType()) ) {
((WritableToken)t).setChannel(Token.HIDDEN_CHANNEL);
}
return true;
}
public void setTokenTypeChannel(int ttype, int channel) {
hide.add(ttype);
}
}
protected void writeTestFile(String parserName,
String lexerName,
String parserStartRuleName,
@ -969,7 +565,7 @@ public class BaseJavaTest implements RuntimeTestSupport {
outputFileST.add("parserName", parserName);
outputFileST.add("lexerName", lexerName);
outputFileST.add("parserStartRuleName", parserStartRuleName);
writeFile(tmpdir, "Test.java", outputFileST.render());
writeFile(getTempDirPath(), "Test.java", outputFileST.render());
}
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
@ -990,210 +586,13 @@ public class BaseJavaTest implements RuntimeTestSupport {
);
outputFileST.add("lexerName", lexerName);
writeFile(tmpdir, "Test.java", outputFileST.render());
writeFile(getTempDirPath(), "Test.java", outputFileST.render());
}
public void writeRecognizerAndCompile(String parserName, String lexerName,
String parserStartRuleName,
boolean debug,
boolean profile) {
if ( parserName==null ) {
writeLexerTestFile(lexerName, debug);
}
else {
writeTestFile(parserName,
lexerName,
parserStartRuleName,
debug,
profile);
}
compile("Test.java");
}
protected void eraseFiles(final String filesEndingWith) {
File tmpdirF = new File(tmpdir);
String[] files = tmpdirF.list();
for(int i = 0; files!=null && i < files.length; i++) {
if ( files[i].endsWith(filesEndingWith) ) {
new File(tmpdir+"/"+files[i]).delete();
}
}
}
protected void eraseFiles() {
if (tmpdir == null) {
return;
}
File tmpdirF = new File(tmpdir);
String[] files = tmpdirF.list();
for(int i = 0; files!=null && i < files.length; i++) {
new File(tmpdir+"/"+files[i]).delete();
}
}
public void eraseTempDir() {
File tmpdirF = new File(tmpdir);
if ( tmpdirF.exists() ) {
eraseFiles();
tmpdirF.delete();
}
}
public String getFirstLineOfException() {
if ( this.stderrDuringParse ==null ) {
return null;
}
String[] lines = this.stderrDuringParse.split("\n");
String prefix="Exception in thread \"main\" ";
return lines[0].substring(prefix.length(),lines[0].length());
}
/**
* When looking at a result set that consists of a Map/HashTable
* we cannot rely on the output order, as the hashing algorithm or other aspects
* of the implementation may be different on differnt JDKs or platforms. Hence
* we take the Map, convert the keys to a List, sort them and Stringify the Map, which is a
* bit of a hack, but guarantees that we get the same order on all systems. We assume that
* the keys are strings.
*
* @param m The Map that contains keys we wish to return in sorted order
* @return A string that represents all the keys in sorted order.
*/
public <K, V> String sortMapToString(Map<K, V> m) {
// Pass in crap, and get nothing back
//
if (m == null) {
return null;
}
System.out.println("Map toString looks like: " + m.toString());
// Sort the keys in the Map
//
TreeMap<K, V> nset = new TreeMap<K, V>(m);
System.out.println("Tree map looks like: " + nset.toString());
return nset.toString();
}
public List<String> realElements(List<String> elements) {
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
}
public void assertNotNullOrEmpty(String message, String text) {
assertNotNull(message, text);
assertFalse(message, text.isEmpty());
}
public void assertNotNullOrEmpty(String text) {
assertNotNull(text);
assertFalse(text.isEmpty());
}
public static class IntTokenStream implements TokenStream {
public IntegerList types;
int p=0;
public IntTokenStream(IntegerList types) { this.types = types; }
@Override
public void consume() { p++; }
@Override
public int LA(int i) { return LT(i).getType(); }
@Override
public int mark() {
return index();
}
@Override
public int index() { return p; }
@Override
public void release(int marker) {
seek(marker);
}
@Override
public void seek(int index) {
p = index;
}
@Override
public int size() {
return types.size();
}
@Override
public String getSourceName() {
return UNKNOWN_SOURCE_NAME;
}
@Override
public Token LT(int i) {
CommonToken t;
int rawIndex = p + i - 1;
if ( rawIndex>=types.size() ) t = new CommonToken(Token.EOF);
else t = new CommonToken(types.get(rawIndex));
t.setTokenIndex(rawIndex);
return t;
}
@Override
public Token get(int i) {
return new org.antlr.v4.runtime.CommonToken(types.get(i));
}
@Override
public TokenSource getTokenSource() {
return null;
}
@Override
public String getText() {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(Interval interval) {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(RuleContext ctx) {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(Token start, Token stop) {
throw new UnsupportedOperationException("can't give strings");
}
}
/** Sort a list */
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
List<T> dup = new ArrayList<T>();
dup.addAll(data);
Collections.sort(dup);
return dup;
}
/** Return map sorted by key */
public <K extends Comparable<? super K>,V> LinkedHashMap<K,V> sort(Map<K,V> data) {
LinkedHashMap<K,V> dup = new LinkedHashMap<K, V>();
List<K> keys = new ArrayList<K>();
keys.addAll(data.keySet());
Collections.sort(keys);
for (K k : keys) {
dup.put(k, data.get(k));
}
return dup;
}
}

View File

@ -5,171 +5,23 @@
*/
package org.antlr.v4.test.runtime.javascript;
import org.antlr.v4.Tool;
import org.antlr.v4.automata.ATNFactory;
import org.antlr.v4.automata.ATNPrinter;
import org.antlr.v4.automata.LexerATNFactory;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.codegen.CodeGenerator;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.IntStream;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.RuleContext;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenSource;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.WritableToken;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.ATNSerializer;
import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntegerList;
import org.antlr.v4.runtime.misc.Interval;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.test.runtime.ErrorQueue;
import org.antlr.v4.test.runtime.RuntimeTestSupport;
import org.antlr.v4.test.runtime.StreamVacuum;
import org.antlr.v4.test.runtime.TestContext;
import org.antlr.v4.tool.ANTLRMessage;
import org.antlr.v4.tool.DOTGenerator;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.GrammarSemanticsMessage;
import org.antlr.v4.tool.LexerGrammar;
import org.antlr.v4.tool.Rule;
import org.antlr.v4.test.runtime.*;
import org.stringtemplate.v4.ST;
import org.stringtemplate.v4.STGroup;
import org.stringtemplate.v4.STGroupString;
import java.io.File;
import java.io.IOException;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import java.util.concurrent.TimeUnit;
import java.util.*;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.*;
public class BaseNodeTest implements RuntimeTestSupport {
// -J-Dorg.antlr.v4.test.BaseTest.level=FINE
// private static final Logger LOGGER =
// Logger.getLogger(BaseTest.class.getName());
public static final String newline = System.getProperty("line.separator");
public static final String pathSep = System.getProperty("path.separator");
public String tmpdir = null;
/**
* If error during parser execution, store stderr here; can't return stdout
* and stderr. This doesn't trap errors from running antlr.
*/
protected String stderrDuringParse;
/** Errors found while running antlr */
protected StringBuilder antlrToolErrors;
public class BaseNodeTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
@Override
public void testSetUp() throws Exception {
// new output dir for each test
String prop = System.getProperty("antlr-javascript-test-dir");
if (prop != null && prop.length() > 0) {
tmpdir = prop;
}
else {
tmpdir = new File(System.getProperty("java.io.tmpdir"), getClass()
.getSimpleName()+"-"+Thread.currentThread().getName()+"-"+System.currentTimeMillis())
.getAbsolutePath();
}
File dir = new File(tmpdir);
if (dir.exists())
this.eraseFiles(dir);
antlrToolErrors = new StringBuilder();
}
@Override
public void testTearDown() throws Exception {
}
@Override
public String getTmpDir() {
return tmpdir;
}
@Override
public String getStdout() {
return null;
}
@Override
public String getParseErrors() {
return stderrDuringParse;
}
@Override
public String getANTLRToolErrors() {
if ( antlrToolErrors.length()==0 ) {
return null;
}
return antlrToolErrors.toString();
}
protected ATN createATN(Grammar g, boolean useSerializer) {
if (g.atn == null) {
semanticProcess(g);
assertEquals(0, g.tool.getNumErrors());
ParserATNFactory f;
if (g.isLexer()) {
f = new LexerATNFactory((LexerGrammar) g);
}
else {
f = new ParserATNFactory(g);
}
g.atn = f.createATN();
assertEquals(0, g.tool.getNumErrors());
}
ATN atn = g.atn;
if (useSerializer) {
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
return new ATNDeserializer().deserialize(serialized);
}
return atn;
}
protected void semanticProcess(Grammar g) {
if (g.ast != null && !g.ast.hasErrors) {
System.out.println(g.ast.toStringTree());
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if (g.getImportedGrammars() != null) { // process imported grammars
// (if any)
for (Grammar imp : g.getImportedGrammars()) {
antlr.processNonCombinedGrammar(imp, false);
}
}
}
protected String getPropertyPrefix() {
return "antlr4-javascript";
}
protected String execLexer(String grammarFileName, String grammarStr,
@ -183,9 +35,9 @@ public class BaseNodeTest implements RuntimeTestSupport {
boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
grammarStr, null, lexerName, "-no-listener");
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
writeLexerTestFile(lexerName, showDFA);
writeFile(tmpdir, "package.json", "{\"type\": \"module\"}");
writeFile(getTempDirPath(), "package.json", "{\"type\": \"module\"}");
String output = execModule("Test.js");
if ( output!=null && output.length()==0 ) {
output = null;
@ -202,10 +54,10 @@ public class BaseNodeTest implements RuntimeTestSupport {
boolean success = rawGenerateAndBuildRecognizer(grammarFileName,
grammarStr, parserName, lexerName, "-visitor");
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
rawBuildRecognizerTestFile(parserName, lexerName, listenerName,
visitorName, startRuleName, showDiagnosticErrors);
writeFile(tmpdir, "package.json", "{\"type\": \"module\"}");
writeFile(getTempDirPath(), "package.json", "{\"type\": \"module\"}");
return execRecognizer();
}
@ -221,7 +73,7 @@ public class BaseNodeTest implements RuntimeTestSupport {
protected boolean rawGenerateAndBuildRecognizer(String grammarFileName,
String grammarStr, String parserName, String lexerName,
boolean defaultListener, String... extraOptions) {
ErrorQueue equeue = antlrOnString(getTmpDir(), "JavaScript", grammarFileName, grammarStr,
ErrorQueue equeue = antlrOnString(getTempDirPath(), "JavaScript", grammarFileName, grammarStr,
defaultListener, extraOptions);
if (!equeue.errors.isEmpty()) {
return false;
@ -252,7 +104,7 @@ public class BaseNodeTest implements RuntimeTestSupport {
protected void rawBuildRecognizerTestFile(String parserName,
String lexerName, String listenerName, String visitorName,
String parserStartRuleName, boolean debug) {
this.stderrDuringParse = null;
setParseErrors(null);
if (parserName == null) {
writeLexerTestFile(lexerName, false);
}
@ -269,20 +121,20 @@ public class BaseNodeTest implements RuntimeTestSupport {
public String execModule(String fileName) {
try {
String npmPath = locateNpm();
if(!TestContext.isTravisCI()) {
if(!TestContext.isTravisCI() && !TestContext.isCircleCI()) {
installRuntime(npmPath);
registerRuntime(npmPath);
}
String modulePath = new File(new File(tmpdir), fileName)
String modulePath = new File(getTempTestDir(), fileName)
.getAbsolutePath();
linkRuntime(npmPath);
String nodejsPath = locateNodeJS();
String inputPath = new File(new File(tmpdir), "input")
String inputPath = new File(getTempTestDir(), "input")
.getAbsolutePath();
ProcessBuilder builder = new ProcessBuilder(nodejsPath, modulePath,
inputPath);
builder.environment().put("NODE_PATH", tmpdir);
builder.directory(new File(tmpdir));
builder.environment().put("NODE_PATH", getTempDirPath());
builder.directory(getTempTestDir());
Process process = builder.start();
StreamVacuum stdoutVacuum = new StreamVacuum(
process.getInputStream());
@ -301,7 +153,7 @@ public class BaseNodeTest implements RuntimeTestSupport {
output = null;
}
if (stderrVacuum.toString().length() > 0) {
this.stderrDuringParse = stderrVacuum.toString();
setParseErrors(stderrVacuum.toString());
}
return output;
} catch (Exception e) {
@ -316,8 +168,8 @@ public class BaseNodeTest implements RuntimeTestSupport {
String runtimePath = locateRuntime();
ProcessBuilder builder = new ProcessBuilder(npmPath, "install");
builder.directory(new File(runtimePath));
builder.redirectError(new File(tmpdir, "error.txt"));
builder.redirectOutput(new File(tmpdir, "output.txt"));
builder.redirectError(new File(getTempTestDir(), "error.txt"));
builder.redirectOutput(new File(getTempTestDir(), "output.txt"));
Process process = builder.start();
// TODO switch to jdk 8
process.waitFor();
@ -332,8 +184,8 @@ public class BaseNodeTest implements RuntimeTestSupport {
String runtimePath = locateRuntime();
ProcessBuilder builder = new ProcessBuilder(npmPath, "link");
builder.directory(new File(runtimePath));
builder.redirectError(new File(tmpdir, "error.txt"));
builder.redirectOutput(new File(tmpdir, "output.txt"));
builder.redirectError(new File(getTempTestDir(), "error.txt"));
builder.redirectOutput(new File(getTempTestDir(), "output.txt"));
Process process = builder.start();
// TODO switch to jdk 8
process.waitFor();
@ -345,10 +197,14 @@ public class BaseNodeTest implements RuntimeTestSupport {
}
private void linkRuntime(String npmPath) throws IOException, InterruptedException {
ProcessBuilder builder = new ProcessBuilder(npmPath, "link", "antlr4");
builder.directory(new File(tmpdir));
builder.redirectError(new File(tmpdir, "error.txt"));
builder.redirectOutput(new File(tmpdir, "output.txt"));
List<String> args = new ArrayList<>();
if(TestContext.isCircleCI())
args.add("sudo");
args.addAll(Arrays.asList(npmPath, "link", "antlr4"));
ProcessBuilder builder = new ProcessBuilder(args.toArray(new String[0]));
builder.directory(getTempTestDir());
builder.redirectError(new File(getTempTestDir(), "error.txt"));
builder.redirectOutput(new File(getTempTestDir(), "output.txt"));
Process process = builder.start();
// TODO switch to jdk 8
process.waitFor();
@ -410,11 +266,6 @@ public class BaseNodeTest implements RuntimeTestSupport {
return runtimeSrc.getPath();
}
private boolean isWindows() {
return System.getProperty("os.name").toLowerCase().contains("windows");
}
protected void writeParserTestFile(String parserName, String lexerName,
String listenerName, String visitorName,
String parserStartRuleName, boolean debug) {
@ -465,7 +316,7 @@ public class BaseNodeTest implements RuntimeTestSupport {
outputFileST.add("listenerName", listenerName);
outputFileST.add("visitorName", visitorName);
outputFileST.add("parserStartRuleName", parserStartRuleName);
writeFile(tmpdir, "Test.js", outputFileST.render());
writeFile(getTempDirPath(), "Test.js", outputFileST.render());
}
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
@ -485,49 +336,7 @@ public class BaseNodeTest implements RuntimeTestSupport {
: "") + "}\n" + "\n" + "main(process.argv);\n"
+ "\n");
outputFileST.add("lexerName", lexerName);
writeFile(tmpdir, "Test.js", outputFileST.render());
writeFile(getTempDirPath(), "Test.js", outputFileST.render());
}
protected void eraseFiles(File dir) {
String[] files = dir.list();
for (int i = 0; files != null && i < files.length; i++) {
new File(dir, files[i]).delete();
}
}
@Override
public void eraseTempDir() {
boolean doErase = true;
String propName = "antlr-javascript-erase-test-dir";
String prop = System.getProperty(propName);
if (prop != null && prop.length() > 0)
doErase = Boolean.getBoolean(prop);
if (doErase) {
File tmpdirF = new File(tmpdir);
if (tmpdirF.exists()) {
eraseFiles(tmpdirF);
tmpdirF.delete();
}
}
}
/** Sort a list */
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
List<T> dup = new ArrayList<T>(data);
Collections.sort(dup);
return dup;
}
/** Return map sorted by key */
public <K extends Comparable<? super K>, V> LinkedHashMap<K, V> sort(
Map<K, V> data) {
LinkedHashMap<K, V> dup = new LinkedHashMap<K, V>();
List<K> keys = new ArrayList<K>(data.keySet());
Collections.sort(keys);
for (K k : keys) {
dup.put(k, data.get(k));
}
return dup;
}
}

View File

@ -11,24 +11,11 @@ import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.antlr.v4.Tool;
import org.antlr.v4.automata.LexerATNFactory;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.ATNSerializer;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.test.runtime.ErrorQueue;
import org.antlr.v4.test.runtime.RuntimeTestSupport;
import org.antlr.v4.test.runtime.StreamVacuum;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.LexerGrammar;
import org.antlr.v4.test.runtime.*;
import org.stringtemplate.v4.ST;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
@ -36,124 +23,12 @@ import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class BasePHPTest implements RuntimeTestSupport {
public static final String newline = System.getProperty("line.separator");
public class BasePHPTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
public String tmpdir = null;
/**
* If error during parser execution, store stderr here; can't return
* stdout and stderr. This doesn't trap errors from running antlr.
*/
protected String stderrDuringParse;
/**
* Errors found while running antlr
*/
protected StringBuilder antlrToolErrors;
private String getPropertyPrefix() {
public String getPropertyPrefix() {
return "antlr-php";
}
@Override
public void testSetUp() throws Exception {
// new output dir for each test
String propName = getPropertyPrefix() + "-test-dir";
String prop = System.getProperty(propName);
if (prop != null && prop.length() > 0) {
tmpdir = prop;
} else {
String classSimpleName = getClass().getSimpleName();
String threadName = Thread.currentThread().getName();
String childPath = String.format("%s-%s-%s", classSimpleName, threadName, System.currentTimeMillis());
tmpdir = new File(System.getProperty("java.io.tmpdir"), childPath).getAbsolutePath();
}
antlrToolErrors = new StringBuilder();
}
@Override
public void testTearDown() throws Exception {
}
@Override
public String getTmpDir() {
return tmpdir;
}
@Override
public String getStdout() {
return null;
}
@Override
public String getParseErrors() {
return stderrDuringParse;
}
@Override
public String getANTLRToolErrors() {
if (antlrToolErrors.length() == 0) {
return null;
}
return antlrToolErrors.toString();
}
protected ATN createATN(Grammar g, boolean useSerializer) {
if (g.atn == null) {
semanticProcess(g);
assertEquals(0, g.tool.getNumErrors());
ParserATNFactory f;
if (g.isLexer()) {
f = new LexerATNFactory((LexerGrammar) g);
} else {
f = new ParserATNFactory(g);
}
g.atn = f.createATN();
assertEquals(0, g.tool.getNumErrors());
}
ATN atn = g.atn;
if (useSerializer) {
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
return new ATNDeserializer().deserialize(serialized);
}
return atn;
}
protected void semanticProcess(Grammar g) {
if (g.ast != null && !g.ast.hasErrors) {
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if (g.getImportedGrammars() != null) {
for (Grammar imp: g.getImportedGrammars()) {
antlr.processNonCombinedGrammar(imp, false);
}
}
}
}
protected String execLexer(
String grammarFileName,
String grammarStr,
String lexerName,
String input
) {
return execLexer(grammarFileName, grammarStr, lexerName, input, false);
}
@Override
public String execLexer(
String grammarFileName,
@ -170,11 +45,9 @@ public class BasePHPTest implements RuntimeTestSupport {
"-no-listener"
);
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
writeLexerTestFile(lexerName, showDFA);
String output = execModule("Test.php");
return output;
return execModule("Test.php");
}
public String execParser(
@ -224,7 +97,7 @@ public class BasePHPTest implements RuntimeTestSupport {
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
rawBuildRecognizerTestFile(
parserName,
@ -270,7 +143,7 @@ public class BasePHPTest implements RuntimeTestSupport {
boolean defaultListener,
String... extraOptions
) {
ErrorQueue equeue = antlrOnString(getTmpDir(), "PHP", grammarFileName, grammarStr, defaultListener, extraOptions);
ErrorQueue equeue = antlrOnString(getTempDirPath(), "PHP", grammarFileName, grammarStr, defaultListener, extraOptions);
if (!equeue.errors.isEmpty()) {
return false;
@ -307,7 +180,7 @@ public class BasePHPTest implements RuntimeTestSupport {
boolean debug,
boolean trace
) {
this.stderrDuringParse = null;
setParseErrors(null);
if (parserName == null) {
writeLexerTestFile(lexerName, false);
} else {
@ -331,15 +204,14 @@ public class BasePHPTest implements RuntimeTestSupport {
String phpPath = locatePhp();
String runtimePath = locateRuntime();
File tmpdirFile = new File(tmpdir);
String modulePath = new File(tmpdirFile, fileName).getAbsolutePath();
String inputPath = new File(tmpdirFile, "input").getAbsolutePath();
Path outputPath = tmpdirFile.toPath().resolve("output").toAbsolutePath();
String modulePath = new File(getTempTestDir(), fileName).getAbsolutePath();
String inputPath = new File(getTempTestDir(), "input").getAbsolutePath();
Path outputPath = getTempTestDir().toPath().resolve("output").toAbsolutePath();
try {
ProcessBuilder builder = new ProcessBuilder(phpPath, modulePath, inputPath, outputPath.toString());
builder.environment().put("RUNTIME", runtimePath);
builder.directory(tmpdirFile);
builder.directory(getTempTestDir());
Process process = builder.start();
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
@ -355,7 +227,7 @@ public class BasePHPTest implements RuntimeTestSupport {
}
if (stderrVacuum.toString().length() > 0) {
this.stderrDuringParse = stderrVacuum.toString();
setParseErrors(stderrVacuum.toString());
}
return output;
@ -464,7 +336,7 @@ public class BasePHPTest implements RuntimeTestSupport {
outputFileST.add("lexerName", lexerName);
writeFile(tmpdir, "Test.php", outputFileST.render());
writeFile(getTempDirPath(), "Test.php", outputFileST.render());
}
protected void writeParserTestFile(
@ -546,54 +418,7 @@ public class BasePHPTest implements RuntimeTestSupport {
outputFileST.add("visitorName", visitorName);
outputFileST.add("parserStartRuleName", parserStartRuleName);
writeFile(tmpdir, "Test.php", outputFileST.render());
writeFile(getTempDirPath(), "Test.php", outputFileST.render());
}
protected void eraseFiles(File dir) {
String[] files = dir.list();
for (int i = 0; files != null && i < files.length; i++) {
new File(dir, files[i]).delete();
}
}
@Override
public void eraseTempDir() {
boolean doErase = true;
String propName = getPropertyPrefix() + "-erase-test-dir";
String prop = System.getProperty(propName);
if (prop != null && prop.length() > 0) {
doErase = Boolean.getBoolean(prop);
}
if (doErase) {
File tmpdirF = new File(tmpdir);
if (tmpdirF.exists()) {
eraseFiles(tmpdirF);
tmpdirF.delete();
}
}
}
/**
* Sort a list
*/
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
List<T> dup = new ArrayList<T>();
dup.addAll(data);
Collections.sort(dup);
return dup;
}
/**
* Return map sorted by key
*/
public <K extends Comparable<? super K>, V> LinkedHashMap<K, V> sort(Map<K, V> data) {
LinkedHashMap<K, V> dup = new LinkedHashMap<K, V>();
List<K> keys = new ArrayList<K>();
keys.addAll(data.keySet());
Collections.sort(keys);
for (K k: keys) {
dup.put(k, data.get(k));
}
return dup;
}
}

View File

@ -5,339 +5,38 @@
*/
package org.antlr.v4.test.runtime.python;
import org.antlr.v4.Tool;
import org.antlr.v4.automata.ATNFactory;
import org.antlr.v4.automata.ATNPrinter;
import org.antlr.v4.automata.LexerATNFactory;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.codegen.CodeGenerator;
import org.antlr.v4.runtime.ANTLRInputStream;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.CommonToken;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.IntStream;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.Parser;
import org.antlr.v4.runtime.RuleContext;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenSource;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.WritableToken;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.ATNSerializer;
import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.atn.DecisionState;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntegerList;
import org.antlr.v4.runtime.misc.Interval;
import org.antlr.v4.runtime.tree.ParseTree;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.test.runtime.ErrorQueue;
import org.antlr.v4.test.runtime.RuntimeTestSupport;
import org.antlr.v4.test.runtime.StreamVacuum;
import org.antlr.v4.test.runtime.TestOutputReading;
import org.antlr.v4.tool.ANTLRMessage;
import org.antlr.v4.tool.DOTGenerator;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.GrammarSemanticsMessage;
import org.antlr.v4.tool.LexerGrammar;
import org.antlr.v4.tool.Rule;
import org.junit.rules.TestRule;
import org.junit.rules.TestWatcher;
import org.antlr.v4.test.runtime.*;
import org.junit.runner.Description;
import org.stringtemplate.v4.ST;
import org.stringtemplate.v4.STGroup;
import org.stringtemplate.v4.STGroupString;
import java.io.File;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.net.URL;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.*;
public abstract class BasePythonTest implements RuntimeTestSupport {
// -J-Dorg.antlr.v4.test.BaseTest.level=FINE
// private static final Logger LOGGER = Logger.getLogger(BaseTest.class.getName());
public static final String newline = System.getProperty("line.separator");
public static final String pathSep = System.getProperty("path.separator");
public String tmpdir = null;
/** If error during parser execution, store stderr here; can't return
* stdout and stderr. This doesn't trap errors from running antlr.
*/
protected String stderrDuringParse;
/** Errors found while running antlr */
protected StringBuilder antlrToolErrors;
@org.junit.Rule
public final TestRule testWatcher = new TestWatcher() {
public abstract class BasePythonTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
@Override
protected void succeeded(Description description) {
// remove tmpdir if no error.
protected void testSucceeded(Description description) {
eraseTempPyCache();
eraseTempDir();
}
};
private String getPropertyPrefix() {
@Override
protected String getPropertyPrefix() {
return "antlr-" + getLanguage().toLowerCase();
}
@Override
public void testSetUp() throws Exception {
// new output dir for each test
String propName = getPropertyPrefix() + "-test-dir";
String prop = System.getProperty(propName);
if(prop!=null && prop.length()>0) {
tmpdir = prop;
}
else {
tmpdir = new File(System.getProperty("java.io.tmpdir"), getClass().getSimpleName()+
"-"+Thread.currentThread().getName()+"-"+System.currentTimeMillis()).getAbsolutePath();
}
antlrToolErrors = new StringBuilder();
}
@Override
public void testTearDown() throws Exception {
}
@Override
public String getTmpDir() {
return tmpdir;
}
@Override
public String getStdout() {
return null;
}
@Override
public String getParseErrors() {
return stderrDuringParse;
}
@Override
public String getANTLRToolErrors() {
if ( antlrToolErrors.length()==0 ) {
return null;
}
return antlrToolErrors.toString();
}
protected org.antlr.v4.Tool newTool(String[] args) {
Tool tool = new Tool(args);
return tool;
}
protected Tool newTool() {
org.antlr.v4.Tool tool = new Tool(new String[] {"-o", tmpdir});
return tool;
}
protected ATN createATN(Grammar g, boolean useSerializer) {
if ( g.atn==null ) {
semanticProcess(g);
assertEquals(0, g.tool.getNumErrors());
ParserATNFactory f;
if ( g.isLexer() ) {
f = new LexerATNFactory((LexerGrammar)g);
}
else {
f = new ParserATNFactory(g);
}
g.atn = f.createATN();
assertEquals(0, g.tool.getNumErrors());
}
ATN atn = g.atn;
if (useSerializer) {
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
return new ATNDeserializer().deserialize(serialized);
}
return atn;
}
protected void semanticProcess(Grammar g) {
if ( g.ast!=null && !g.ast.hasErrors ) {
System.out.println(g.ast.toStringTree());
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if ( g.getImportedGrammars()!=null ) { // process imported grammars (if any)
for (Grammar imp : g.getImportedGrammars()) {
antlr.processNonCombinedGrammar(imp, false);
}
}
}
}
public DFA createDFA(Grammar g, DecisionState s) {
// PredictionDFAFactory conv = new PredictionDFAFactory(g, s);
// DFA dfa = conv.createDFA();
// conv.issueAmbiguityWarnings();
// System.out.print("DFA="+dfa);
// return dfa;
return null;
}
// public void minimizeDFA(DFA dfa) {
// DFAMinimizer dmin = new DFAMinimizer(dfa);
// dfa.minimized = dmin.minimize();
// }
IntegerList getTypesFromString(Grammar g, String expecting) {
IntegerList expectingTokenTypes = new IntegerList();
if ( expecting!=null && !expecting.trim().isEmpty() ) {
for (String tname : expecting.replace(" ", "").split(",")) {
int ttype = g.getTokenType(tname);
expectingTokenTypes.add(ttype);
}
}
return expectingTokenTypes;
}
public IntegerList getTokenTypesViaATN(String input, LexerATNSimulator lexerATN) {
ANTLRInputStream in = new ANTLRInputStream(input);
IntegerList tokenTypes = new IntegerList();
int ttype;
do {
ttype = lexerATN.match(in, Lexer.DEFAULT_MODE);
tokenTypes.add(ttype);
} while ( ttype!= Token.EOF );
return tokenTypes;
}
public List<String> getTokenTypes(LexerGrammar lg,
ATN atn,
CharStream input)
{
LexerATNSimulator interp = new LexerATNSimulator(atn,new DFA[] { new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE)) },null);
List<String> tokenTypes = new ArrayList<String>();
int ttype;
boolean hitEOF = false;
do {
if ( hitEOF ) {
tokenTypes.add("EOF");
break;
}
int t = input.LA(1);
ttype = interp.match(input, Lexer.DEFAULT_MODE);
if ( ttype == Token.EOF ) {
tokenTypes.add("EOF");
}
else {
tokenTypes.add(lg.typeToTokenList.get(ttype));
}
if ( t== IntStream.EOF ) {
hitEOF = true;
}
} while ( ttype!=Token.EOF );
return tokenTypes;
}
List<ANTLRMessage> checkRuleDFA(String gtext, String ruleName, String expecting)
throws Exception
{
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(gtext, equeue);
ATN atn = createATN(g, false);
ATNState s = atn.ruleToStartState[g.getRule(ruleName).index];
if ( s==null ) {
System.err.println("no such rule: "+ruleName);
return null;
}
ATNState t = s.transition(0).target;
if ( !(t instanceof DecisionState) ) {
System.out.println(ruleName+" has no decision");
return null;
}
DecisionState blk = (DecisionState)t;
checkRuleDFA(g, blk, expecting);
return equeue.all;
}
List<ANTLRMessage> checkRuleDFA(String gtext, int decision, String expecting)
throws Exception
{
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(gtext, equeue);
ATN atn = createATN(g, false);
DecisionState blk = atn.decisionToState.get(decision);
checkRuleDFA(g, blk, expecting);
return equeue.all;
}
void checkRuleDFA(Grammar g, DecisionState blk, String expecting)
throws Exception
{
DFA dfa = createDFA(g, blk);
String result = null;
if ( dfa!=null ) result = dfa.toString();
assertEquals(expecting, result);
}
List<ANTLRMessage> checkLexerDFA(String gtext, String expecting)
throws Exception
{
return checkLexerDFA(gtext, LexerGrammar.DEFAULT_MODE_NAME, expecting);
}
List<ANTLRMessage> checkLexerDFA(String gtext, String modeName, String expecting)
throws Exception
{
ErrorQueue equeue = new ErrorQueue();
LexerGrammar g = new LexerGrammar(gtext, equeue);
g.atn = createATN(g, false);
// LexerATNToDFAConverter conv = new LexerATNToDFAConverter(g);
// DFA dfa = conv.createDFA(modeName);
// g.setLookaheadDFA(0, dfa); // only one decision to worry about
//
// String result = null;
// if ( dfa!=null ) result = dfa.toString();
// assertEquals(expecting, result);
//
// return equeue.all;
return null;
}
protected abstract String getLanguage();
protected String execLexer(String grammarFileName,
String grammarStr,
String lexerName,
String input)
{
return execLexer(grammarFileName, grammarStr, lexerName, input, false);
}
@Override
public String execLexer(String grammarFileName,
String grammarStr,
@ -350,31 +49,12 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
null,
lexerName,"-no-listener");
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
writeLexerTestFile(lexerName, showDFA);
String output = execModule("Test.py");
return output;
}
public ParseTree execStartRule(String startRuleName, Parser parser)
throws IllegalAccessException, InvocationTargetException,
NoSuchMethodException
{
Method startRule = null;
Object[] args = null;
try {
startRule = parser.getClass().getMethod(startRuleName);
}
catch (NoSuchMethodException nsme) {
// try with int _p arg for recursive func
startRule = parser.getClass().getMethod(startRuleName, int.class);
args = new Integer[] {0};
}
ParseTree result = (ParseTree)startRule.invoke(parser, args);
// System.out.println("parse tree = "+result.toStringTree(parser));
return result;
return execModule("Test.py");
}
@Override
public String execParser(String grammarFileName,
String grammarStr,
String parserName,
@ -405,7 +85,7 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
lexerName,
"-visitor");
assertTrue(success);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
rawBuildRecognizerTestFile(parserName,
lexerName,
listenerName,
@ -434,8 +114,7 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
boolean defaultListener,
String... extraOptions)
{
ErrorQueue equeue =
antlrOnString(getTmpDir(), getLanguage(), grammarFileName, grammarStr, defaultListener, extraOptions);
ErrorQueue equeue = antlrOnString(getTempDirPath(), getLanguage(), grammarFileName, grammarStr, defaultListener, extraOptions);
if (!equeue.errors.isEmpty()) {
return false;
}
@ -465,7 +144,7 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
boolean debug,
boolean trace)
{
this.stderrDuringParse = null;
setParseErrors(null);
if ( parserName==null ) {
writeLexerTestFile(lexerName, false);
}
@ -486,7 +165,7 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
public String execModule(String fileName) {
String pythonPath = locatePython();
String runtimePath = locateRuntime();
File tmpdirFile = new File(tmpdir);
File tmpdirFile = new File(getTempDirPath());
String modulePath = new File(tmpdirFile, fileName).getAbsolutePath();
String inputPath = new File(tmpdirFile, "input").getAbsolutePath();
Path outputPath = tmpdirFile.toPath().resolve("output").toAbsolutePath();
@ -502,7 +181,7 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
stderrVacuum.join();
String output = TestOutputReading.read(outputPath);
if ( stderrVacuum.toString().length()>0 ) {
this.stderrDuringParse = stderrVacuum.toString();
setParseErrors(stderrVacuum.toString());
}
return output;
}
@ -555,190 +234,6 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
return runtimeSrc.getPath();
}
private boolean isWindows() {
return System.getProperty("os.name").toLowerCase().contains("windows");
}
// void ambig(List<Message> msgs, int[] expectedAmbigAlts, String expectedAmbigInput)
// throws Exception
// {
// ambig(msgs, 0, expectedAmbigAlts, expectedAmbigInput);
// }
// void ambig(List<Message> msgs, int i, int[] expectedAmbigAlts, String expectedAmbigInput)
// throws Exception
// {
// List<Message> amsgs = getMessagesOfType(msgs, AmbiguityMessage.class);
// AmbiguityMessage a = (AmbiguityMessage)amsgs.get(i);
// if ( a==null ) assertNull(expectedAmbigAlts);
// else {
// assertEquals(a.conflictingAlts.toString(), Arrays.toString(expectedAmbigAlts));
// }
// assertEquals(expectedAmbigInput, a.input);
// }
// void unreachable(List<Message> msgs, int[] expectedUnreachableAlts)
// throws Exception
// {
// unreachable(msgs, 0, expectedUnreachableAlts);
// }
// void unreachable(List<Message> msgs, int i, int[] expectedUnreachableAlts)
// throws Exception
// {
// List<Message> amsgs = getMessagesOfType(msgs, UnreachableAltsMessage.class);
// UnreachableAltsMessage u = (UnreachableAltsMessage)amsgs.get(i);
// if ( u==null ) assertNull(expectedUnreachableAlts);
// else {
// assertEquals(u.conflictingAlts.toString(), Arrays.toString(expectedUnreachableAlts));
// }
// }
List<ANTLRMessage> getMessagesOfType(List<ANTLRMessage> msgs, Class<? extends ANTLRMessage> c) {
List<ANTLRMessage> filtered = new ArrayList<ANTLRMessage>();
for (ANTLRMessage m : msgs) {
if ( m.getClass() == c ) filtered.add(m);
}
return filtered;
}
void checkRuleATN(Grammar g, String ruleName, String expecting) {
ParserATNFactory f = new ParserATNFactory(g);
ATN atn = f.createATN();
DOTGenerator dot = new DOTGenerator(g);
System.out.println(dot.getDOT(atn.ruleToStartState[g.getRule(ruleName).index]));
Rule r = g.getRule(ruleName);
ATNState startState = atn.ruleToStartState[r.index];
ATNPrinter serializer = new ATNPrinter(g, startState);
String result = serializer.asString();
//System.out.print(result);
assertEquals(expecting, result);
}
public void testActions(String templates, String actionName, String action, String expected) throws org.antlr.runtime.RecognitionException {
int lp = templates.indexOf('(');
String name = templates.substring(0, lp);
STGroup group = new STGroupString(templates);
ST st = group.getInstanceOf(name);
st.add(actionName, action);
String grammar = st.render();
ErrorQueue equeue = new ErrorQueue();
Grammar g = new Grammar(grammar, equeue);
if ( g.ast!=null && !g.ast.hasErrors ) {
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
ATNFactory factory = new ParserATNFactory(g);
if ( g.isLexer() ) factory = new LexerATNFactory((LexerGrammar)g);
g.atn = factory.createATN();
CodeGenerator gen = new CodeGenerator(g);
ST outputFileST = gen.generateParser();
String output = outputFileST.render();
//System.out.println(output);
String b = "#" + actionName + "#";
int start = output.indexOf(b);
String e = "#end-" + actionName + "#";
int end = output.indexOf(e);
String snippet = output.substring(start+b.length(),end);
assertEquals(expected, snippet);
}
if ( equeue.size()>0 ) {
System.err.println(equeue.toString());
}
}
protected void checkGrammarSemanticsError(ErrorQueue equeue,
GrammarSemanticsMessage expectedMessage)
throws Exception
{
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.errors.size(); i++) {
ANTLRMessage m = equeue.errors.get(i);
if (m.getErrorType()==expectedMessage.getErrorType() ) {
foundMsg = m;
}
}
assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg);
assertTrue("error is not a GrammarSemanticsMessage",
foundMsg instanceof GrammarSemanticsMessage);
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
if ( equeue.size()!=1 ) {
System.err.println(equeue);
}
}
protected void checkGrammarSemanticsWarning(ErrorQueue equeue,
GrammarSemanticsMessage expectedMessage)
throws Exception
{
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.warnings.size(); i++) {
ANTLRMessage m = equeue.warnings.get(i);
if (m.getErrorType()==expectedMessage.getErrorType() ) {
foundMsg = m;
}
}
assertNotNull("no error; "+expectedMessage.getErrorType()+" expected", foundMsg);
assertTrue("error is not a GrammarSemanticsMessage",
foundMsg instanceof GrammarSemanticsMessage);
assertEquals(Arrays.toString(expectedMessage.getArgs()), Arrays.toString(foundMsg.getArgs()));
if ( equeue.size()!=1 ) {
System.err.println(equeue);
}
}
protected void checkError(ErrorQueue equeue,
ANTLRMessage expectedMessage)
throws Exception
{
//System.out.println("errors="+equeue);
ANTLRMessage foundMsg = null;
for (int i = 0; i < equeue.errors.size(); i++) {
ANTLRMessage m = equeue.errors.get(i);
if (m.getErrorType()==expectedMessage.getErrorType() ) {
foundMsg = m;
}
}
assertTrue("no error; "+expectedMessage.getErrorType()+" expected", !equeue.errors.isEmpty());
assertTrue("too many errors; "+equeue.errors, equeue.errors.size()<=1);
assertNotNull("couldn't find expected error: "+expectedMessage.getErrorType(), foundMsg);
/*
assertTrue("error is not a GrammarSemanticsMessage",
foundMsg instanceof GrammarSemanticsMessage);
*/
assertArrayEquals(expectedMessage.getArgs(), foundMsg.getArgs());
}
public static class FilteringTokenStream extends CommonTokenStream {
public FilteringTokenStream(TokenSource src) { super(src); }
Set<Integer> hide = new HashSet<Integer>();
@Override
protected boolean sync(int i) {
if (!super.sync(i)) {
return false;
}
Token t = get(i);
if ( hide.contains(t.getType()) ) {
((WritableToken)t).setChannel(Token.HIDDEN_CHANNEL);
}
return true;
}
public void setTokenTypeChannel(int ttype, int channel) {
hide.add(ttype);
}
}
protected void mkdir(String dir) {
File f = new File(dir);
f.mkdirs();
}
protected abstract void writeParserTestFile(String parserName,
String lexerName,
String listenerName,
@ -751,213 +246,13 @@ public abstract class BasePythonTest implements RuntimeTestSupport {
protected abstract void writeLexerTestFile(String lexerName, boolean showDFA);
public void writeRecognizer(String parserName, String lexerName,
String listenerName, String visitorName,
String parserStartRuleName, boolean debug, boolean trace) {
if ( parserName==null ) {
writeLexerTestFile(lexerName, debug);
}
else {
writeParserTestFile(parserName,
lexerName,
listenerName,
visitorName,
parserStartRuleName,
debug,
trace);
}
}
protected void eraseFiles(final String filesEndingWith) {
File tmpdirF = new File(tmpdir);
String[] files = tmpdirF.list();
for(int i = 0; files!=null && i < files.length; i++) {
if ( files[i].endsWith(filesEndingWith) ) {
new File(tmpdir+"/"+files[i]).delete();
}
}
}
protected void eraseFiles(File dir) {
String[] files = dir.list();
for(int i = 0; files!=null && i < files.length; i++) {
new File(dir,files[i]).delete();
}
}
@Override
public void eraseTempDir() {
boolean doErase = true;
String propName = getPropertyPrefix() + "-erase-test-dir";
String prop = System.getProperty(propName);
if(prop!=null && prop.length()>0)
doErase = Boolean.getBoolean(prop);
if(doErase) {
File tmpdirF = new File(tmpdir);
if ( tmpdirF.exists() ) {
eraseFiles(tmpdirF);
tmpdirF.delete();
}
}
}
protected void eraseTempPyCache() {
File tmpdirF = new File(tmpdir+"/__pycache__");
File tmpdirF = new File(getTempTestDir() + "/__pycache__");
if ( tmpdirF.exists() ) {
eraseFiles(tmpdirF);
eraseFilesInDir(tmpdirF);
tmpdirF.delete();
}
}
public String getFirstLineOfException() {
if ( this.stderrDuringParse ==null ) {
return null;
}
String[] lines = this.stderrDuringParse.split("\n");
String prefix="Exception in thread \"main\" ";
return lines[0].substring(prefix.length(),lines[0].length());
}
/**
* When looking at a result set that consists of a Map/HashTable
* we cannot rely on the output order, as the hashing algorithm or other aspects
* of the implementation may be different on differnt JDKs or platforms. Hence
* we take the Map, convert the keys to a List, sort them and Stringify the Map, which is a
* bit of a hack, but guarantees that we get the same order on all systems. We assume that
* the keys are strings.
*
* @param m The Map that contains keys we wish to return in sorted order
* @return A string that represents all the keys in sorted order.
*/
public <K, V> String sortMapToString(Map<K, V> m) {
// Pass in crap, and get nothing back
//
if (m == null) {
return null;
}
System.out.println("Map toString looks like: " + m.toString());
// Sort the keys in the Map
//
TreeMap<K, V> nset = new TreeMap<K, V>(m);
System.out.println("Tree map looks like: " + nset.toString());
return nset.toString();
}
public List<String> realElements(List<String> elements) {
return elements.subList(Token.MIN_USER_TOKEN_TYPE, elements.size());
}
public void assertNotNullOrEmpty(String message, String text) {
assertNotNull(message, text);
assertFalse(message, text.isEmpty());
}
public void assertNotNullOrEmpty(String text) {
assertNotNull(text);
assertFalse(text.isEmpty());
}
public static class IntTokenStream implements TokenStream {
IntegerList types;
int p=0;
public IntTokenStream(IntegerList types) { this.types = types; }
@Override
public void consume() { p++; }
@Override
public int LA(int i) { return LT(i).getType(); }
@Override
public int mark() {
return index();
}
@Override
public int index() { return p; }
@Override
public void release(int marker) {
seek(marker);
}
@Override
public void seek(int index) {
p = index;
}
@Override
public int size() {
return types.size();
}
@Override
public String getSourceName() {
return null;
}
@Override
public Token LT(int i) {
CommonToken t;
int rawIndex = p + i - 1;
if ( rawIndex>=types.size() ) t = new CommonToken(Token.EOF);
else t = new CommonToken(types.get(rawIndex));
t.setTokenIndex(rawIndex);
return t;
}
@Override
public Token get(int i) {
return new org.antlr.v4.runtime.CommonToken(types.get(i));
}
@Override
public TokenSource getTokenSource() {
return null;
}
@Override
public String getText() {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(Interval interval) {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(RuleContext ctx) {
throw new UnsupportedOperationException("can't give strings");
}
@Override
public String getText(Token start, Token stop) {
throw new UnsupportedOperationException("can't give strings");
}
}
/** Sort a list */
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
List<T> dup = new ArrayList<T>();
dup.addAll(data);
Collections.sort(dup);
return dup;
}
/** Return map sorted by key */
public <K extends Comparable<? super K>,V> LinkedHashMap<K,V> sort(Map<K,V> data) {
LinkedHashMap<K,V> dup = new LinkedHashMap<K, V>();
List<K> keys = new ArrayList<K>();
keys.addAll(data.keySet());
Collections.sort(keys);
for (K k : keys) {
dup.put(k, data.get(k));
}
return dup;
}
}

View File

@ -47,7 +47,7 @@ public class BasePython2Test extends BasePythonTest {
: "") + "\n" + "if __name__ == '__main__':\n"
+ " main(sys.argv)\n" + "\n");
outputFileST.add("lexerName", lexerName);
writeFile(tmpdir, "Test.py", outputFileST.render());
writeFile(getTempDirPath(), "Test.py", outputFileST.render());
}
@Override
@ -105,6 +105,6 @@ public class BasePython2Test extends BasePythonTest {
outputFileST.add("listenerName", listenerName);
outputFileST.add("visitorName", visitorName);
outputFileST.add("parserStartRuleName", parserStartRuleName);
writeFile(tmpdir, "Test.py", outputFileST.render());
writeFile(getTempDirPath(), "Test.py", outputFileST.render());
}
}

View File

@ -44,7 +44,7 @@ public class BasePython3Test extends BasePythonTest {
: "") + "\n" + "if __name__ == '__main__':\n"
+ " main(sys.argv)\n" + "\n");
outputFileST.add("lexerName", lexerName);
writeFile(tmpdir, "Test.py", outputFileST.render());
writeFile(getTempDirPath(), "Test.py", outputFileST.render());
}
@Override
@ -102,6 +102,6 @@ public class BasePython3Test extends BasePythonTest {
outputFileST.add("listenerName", listenerName);
outputFileST.add("visitorName", visitorName);
outputFileST.add("parserStartRuleName", parserStartRuleName);
writeFile(tmpdir, "Test.py", outputFileST.render());
writeFile(getTempDirPath(), "Test.py", outputFileST.render());
}
}

View File

@ -7,38 +7,35 @@
package org.antlr.v4.test.runtime.swift;
import org.antlr.v4.runtime.misc.Pair;
import org.antlr.v4.test.runtime.ErrorQueue;
import org.antlr.v4.test.runtime.RuntimeTestSupport;
import org.antlr.v4.test.runtime.StreamVacuum;
import org.antlr.v4.test.runtime.*;
import org.stringtemplate.v4.ST;
import java.io.BufferedReader;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.net.URL;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.*;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.mkdir;
import static org.antlr.v4.test.runtime.RuntimeTestUtils.mkdir;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
import static org.junit.Assert.assertTrue;
public class BaseSwiftTest implements RuntimeTestSupport {
public class BaseSwiftTest extends BaseRuntimeTestSupport implements RuntimeTestSupport {
private static final boolean USE_ARCH_ARM64 = false;
private static final boolean VERBOSE = false;
/**
* Path of the ANTLR runtime.
*/
private static String ANTLR_RUNTIME_PATH;
private static final String ANTLR_RUNTIME_PATH;
/**
* Absolute path to swift command.
*/
private static String SWIFT_CMD;
private static final String SWIFT_CMD;
/**
* Environment variable name for swift home.
@ -54,7 +51,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
// build swift runtime
URL swiftRuntime = loader.getResource("Swift");
if (swiftRuntime == null) {
throw new RuntimeException("Swift runtime file not found at:" + swiftRuntime.getPath());
throw new RuntimeException("Swift runtime file not found");
}
ANTLR_RUNTIME_PATH = swiftRuntime.getPath();
try {
@ -78,71 +75,16 @@ public class BaseSwiftTest implements RuntimeTestSupport {
});
}
public String tmpdir = null;
/**
* If error during parser execution, store stderr here; can't return
* stdout and stderr. This doesn't trap errors from running antlr.
*/
private String stderrDuringParse;
/**
* Errors found while running antlr
*/
private StringBuilder antlrToolErrors;
@Override
protected String getPropertyPrefix() {
return "antrl4-swift";
}
/**
* Source files used in each small swift project.
*/
private Set<String> sourceFiles = new HashSet<>();
private final Set<String> sourceFiles = new HashSet<>();
@Override
public void testSetUp() throws Exception {
// new output dir for each test
String propName = "antlr-swift-test-dir";
String prop = System.getProperty(propName);
if (prop != null && prop.length() > 0) {
tmpdir = prop;
}
else {
String classSimpleName = getClass().getSimpleName();
String threadName = Thread.currentThread().getName();
String childPath = String.format("%s-%s-%s", classSimpleName, threadName, System.currentTimeMillis());
tmpdir = new File(System.getProperty("java.io.tmpdir"), childPath).getAbsolutePath();
}
antlrToolErrors = new StringBuilder();
}
@Override
public void testTearDown() throws Exception {
}
@Override
public void eraseTempDir() {
}
@Override
public String getTmpDir() {
return tmpdir;
}
@Override
public String getStdout() {
return null;
}
@Override
public String getParseErrors() {
return stderrDuringParse;
}
@Override
public String getANTLRToolErrors() {
if (antlrToolErrors.length() == 0) {
return null;
}
return antlrToolErrors.toString();
}
@Override
public String execLexer(String grammarFileName, String grammarStr, String lexerName, String input, boolean showDFA) {
@ -150,12 +92,12 @@ public class BaseSwiftTest implements RuntimeTestSupport {
grammarStr,
null,
lexerName);
writeFile(tmpdir, "input", input);
writeFile(getTempDirPath(), "input", input);
writeLexerTestFile(lexerName, showDFA);
addSourceFiles("main.swift");
String projectName = "testcase-" + System.currentTimeMillis();
String projectDir = getTmpDir() + "/" + projectName;
String projectDir = new File(getTempTestDir(), projectName).getAbsolutePath();
try {
buildProject(projectDir, projectName);
return execTest(projectDir, projectName);
@ -173,7 +115,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
parserName,
lexerName,
"-visitor");
writeFile(getTmpDir(), "input", input);
writeFile(getTempDirPath(), "input", input);
return execParser(parserName,
lexerName,
startRuleName,
@ -184,7 +126,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
try {
Pair<String, String> output = runProcess(projectDir, "./.build/debug/" + projectName, "input");
if (output.b.length() > 0) {
stderrDuringParse = output.b;
setParseErrors(output.b);
}
String stdout = output.a;
return stdout.length() > 0 ? stdout : null;
@ -204,10 +146,10 @@ public class BaseSwiftTest implements RuntimeTestSupport {
mkdir(projectDir);
fastFailRunProcess(projectDir, SWIFT_CMD, "package", "init", "--type", "executable");
for (String sourceFile: sourceFiles) {
String absPath = getTmpDir() + "/" + sourceFile;
fastFailRunProcess(getTmpDir(), "mv", "-f", absPath, projectDir + "/Sources/" + projectName);
String absPath = new File(getTempTestDir(), sourceFile).getAbsolutePath();
fastFailRunProcess(getTempDirPath(), "mv", "-f", absPath, projectDir + "/Sources/" + projectName);
}
fastFailRunProcess(getTmpDir(), "mv", "-f", "input", projectDir);
fastFailRunProcess(getTempDirPath(), "mv", "-f", "input", projectDir);
String dylibPath = ANTLR_RUNTIME_PATH + "/.build/debug/";
// System.err.println(dylibPath);
Pair<String, String> buildResult = runProcess(projectDir, SWIFT_CMD, "build",
@ -221,31 +163,99 @@ public class BaseSwiftTest implements RuntimeTestSupport {
}
}
static Boolean IS_MAC_ARM_64 = null;
private static boolean isMacOSArm64() {
if (IS_MAC_ARM_64 == null) {
IS_MAC_ARM_64 = computeIsMacOSArm64();
System.err.println("IS_MAC_ARM_64 = " + IS_MAC_ARM_64);
}
return IS_MAC_ARM_64;
}
private static boolean computeIsMacOSArm64() {
String os = System.getenv("RUNNER_OS");
if(os==null || !os.equalsIgnoreCase("macos"))
return false;
try {
Process p = Runtime.getRuntime().exec("uname -a");
BufferedReader in = new BufferedReader(new InputStreamReader(p.getInputStream()));
String uname = in.readLine();
return uname.contains("_ARM64_");
} catch (IOException e) {
e.printStackTrace();
return false;
}
}
private static Pair<String,String> runProcess(String execPath, String... args) throws IOException, InterruptedException {
Process process = Runtime.getRuntime().exec(args, null, new File(execPath));
List<String> argsWithArch = new ArrayList<>();
if(USE_ARCH_ARM64 && isMacOSArm64())
argsWithArch.addAll(Arrays.asList("arch", "-arm64"));
argsWithArch.addAll(Arrays.asList(args));
if(VERBOSE)
System.err.println("Executing " + argsWithArch.toString() + " " + execPath);
final Process process = Runtime.getRuntime().exec(argsWithArch.toArray(new String[0]), null, new File(execPath));
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
stdoutVacuum.start();
stderrVacuum.start();
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
try {
process.destroy();
} catch(Exception e) {
e.printStackTrace(System.err);
}
}
}, 120_000);
int status = process.waitFor();
timer.cancel();
stdoutVacuum.join();
stderrVacuum.join();
if(VERBOSE)
System.err.println("Done executing " + argsWithArch.toString() + " " + execPath);
if (status != 0) {
System.err.println("Process exited with status " + status);
throw new IOException("Process exited with status " + status + ":\n" + stdoutVacuum.toString() + "\n" + stderrVacuum.toString());
}
return new Pair<>(stdoutVacuum.toString(), stderrVacuum.toString());
}
private static void fastFailRunProcess(String workingDir, String... command) throws IOException, InterruptedException {
ProcessBuilder builder = new ProcessBuilder(command);
List<String> argsWithArch = new ArrayList<>();
if(USE_ARCH_ARM64 && isMacOSArm64())
argsWithArch.addAll(Arrays.asList("arch", "-arm64"));
argsWithArch.addAll(Arrays.asList(command));
if(VERBOSE)
System.err.println("Executing " + argsWithArch.toString() + " " + workingDir);
ProcessBuilder builder = new ProcessBuilder(argsWithArch.toArray(new String[0]));
builder.directory(new File(workingDir));
Process p = builder.start();
int status = p.waitFor();
final Process process = builder.start();
Timer timer = new Timer();
timer.schedule(new TimerTask() {
@Override
public void run() {
try {
process.destroy();
} catch(Exception e) {
e.printStackTrace(System.err);
}
}
}, 120_000);
int status = process.waitFor();
timer.cancel();
if(VERBOSE)
System.err.println("Done executing " + argsWithArch.toString() + " " + workingDir);
if (status != 0) {
System.err.println("Process exited with status " + status);
throw new IOException("Process exited with status " + status);
}
}
@SuppressWarnings("SameParameterValue")
private String execParser(String parserName,
String lexerName,
String parserStartRuleName,
@ -265,7 +275,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
addSourceFiles("main.swift");
String projectName = "testcase-" + System.currentTimeMillis();
String projectDir = getTmpDir() + "/" + projectName;
String projectDir = new File(getTempTestDir(), projectName).getAbsolutePath();
try {
buildProject(projectDir, projectName);
return execTest(projectDir, projectName);
@ -324,13 +334,13 @@ public class BaseSwiftTest implements RuntimeTestSupport {
"parser.setInterpreter(profiler)");
}
else {
outputFileST.add("profile", new ArrayList<Object>());
outputFileST.add("profile", new ArrayList<>());
}
outputFileST.add("createParser", createParserST);
outputFileST.add("parserName", parserName);
outputFileST.add("lexerName", lexerName);
outputFileST.add("parserStartRuleName", parserStartRuleName);
writeFile(tmpdir, "main.swift", outputFileST.render());
writeFile(getTempDirPath(), "main.swift", outputFileST.render());
}
private void writeLexerTestFile(String lexerName, boolean showDFA) {
@ -352,7 +362,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
(showDFA ? "print(lex.getInterpreter().getDFA(Lexer.DEFAULT_MODE).toLexerString(), terminator: \"\" )\n" : ""));
outputFileST.add("lexerName", lexerName);
writeFile(tmpdir, "main.swift", outputFileST.render());
writeFile(getTempDirPath(), "main.swift", outputFileST.render());
}
/**
@ -363,7 +373,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
String parserName,
String lexerName,
String... extraOptions) {
ErrorQueue equeue = antlrOnString(getTmpDir(), "Swift", grammarFileName, grammarStr, false, extraOptions);
ErrorQueue equeue = antlrOnString(getTempDirPath(), "Swift", grammarFileName, grammarStr, false, extraOptions);
assertTrue(equeue.errors.isEmpty());
// System.out.println(getTmpDir());
@ -387,6 +397,7 @@ public class BaseSwiftTest implements RuntimeTestSupport {
files.add(grammarName + "BaseVisitor.swift");
}
}
addSourceFiles(files.toArray(new String[files.size()]));
addSourceFiles(files.toArray(new String[0]));
}
}

View File

@ -32,6 +32,7 @@
<GenerateAssemblyFileVersionAttribute>false</GenerateAssemblyFileVersionAttribute>
<GenerateAssemblyInformationalVersionAttribute>false</GenerateAssemblyInformationalVersionAttribute>
<RootNamespace>Antlr4.Runtime</RootNamespace>
<GeneratePackageOnBuild>true</GeneratePackageOnBuild>
</PropertyGroup>
<PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Debug|AnyCPU' ">
<DebugSymbols>true</DebugSymbols>

View File

@ -3,7 +3,6 @@
* can be found in the LICENSE.txt file in the project root.
*/
using System.Collections.Generic;
using Antlr4.Runtime.Atn;
using Antlr4.Runtime.Misc;
using Antlr4.Runtime.Sharpen;
@ -11,12 +10,9 @@ namespace Antlr4.Runtime.Atn
{
public class LL1Analyzer
{
/// <summary>
/// Special value added to the lookahead sets to indicate that we hit
/// a predicate during analysis if
/// <c>seeThruPreds==false</c>
/// .
/// </summary>
/** Special value added to the lookahead sets to indicate that we hit
* a predicate during analysis if {@code seeThruPreds==false}.
*/
public const int HitPred = TokenConstants.InvalidType;
[NotNull]
@ -27,25 +23,16 @@ namespace Antlr4.Runtime.Atn
this.atn = atn;
}
/// <summary>
/// Calculates the SLL(1) expected lookahead set for each outgoing transition
/// of an
/// <see cref="ATNState"/>
/// . The returned array has one element for each
/// outgoing transition in
/// <paramref name="s"/>
/// . If the closure from transition
/// <em>i</em> leads to a semantic predicate before matching a symbol, the
/// element at index <em>i</em> of the result will be
/// <see langword="null"/>
/// .
/// </summary>
/// <param name="s">the ATN state</param>
/// <returns>
/// the expected symbols for each outgoing transition of
/// <paramref name="s"/>
/// .
/// </returns>
/**
* Calculates the SLL(1) expected lookahead set for each outgoing transition
* of an {@link ATNState}. The returned array has one element for each
* outgoing transition in {@code s}. If the closure from transition
* <em>i</em> leads to a semantic predicate before matching a symbol, the
* element at index <em>i</em> of the result will be {@code null}.
*
* @param s the ATN state
* @return the expected symbols for each outgoing transition of {@code s}.
*/
[return: Nullable]
public virtual IntervalSet[] GetDecisionLookahead(ATNState s)
{
@ -61,7 +48,7 @@ namespace Antlr4.Runtime.Atn
HashSet<ATNConfig> lookBusy = new HashSet<ATNConfig>();
bool seeThruPreds = false;
// fail to get lookahead upon pred
Look(s.Transition(alt).target, null, PredictionContext.EMPTY, look[alt], lookBusy, new BitSet(), seeThruPreds, false);
Look_(s.Transition(alt).target, null, PredictionContext.EMPTY, look[alt], lookBusy, new BitSet(), seeThruPreds, false);
// Wipe out lookahead for this alternative if we found nothing
// or we had a predicate when we !seeThruPreds
if (look[alt].Count == 0 || look[alt].Contains(HitPred))
@ -72,190 +59,88 @@ namespace Antlr4.Runtime.Atn
return look;
}
/// <summary>
/// Compute set of tokens that can follow
/// <paramref name="s"/>
/// in the ATN in the
/// specified
/// <paramref name="ctx"/>
/// .
/// <p>If
/// <paramref name="ctx"/>
/// is
/// <see langword="null"/>
/// and the end of the rule containing
/// <paramref name="s"/>
/// is reached,
/// <see cref="TokenConstants.EPSILON"/>
/// is added to the result set.
/// If
/// <paramref name="ctx"/>
/// is not
/// <see langword="null"/>
/// and the end of the outermost rule is
/// reached,
/// <see cref="TokenConstants.EOF"/>
/// is added to the result set.</p>
/// </summary>
/// <param name="s">the ATN state</param>
/// <param name="ctx">
/// the complete parser context, or
/// <see langword="null"/>
/// if the context
/// should be ignored
/// </param>
/// <returns>
/// The set of tokens that can follow
/// <paramref name="s"/>
/// in the ATN in the
/// specified
/// <paramref name="ctx"/>
/// .
/// </returns>
/**
* Compute set of tokens that can follow {@code s} in the ATN in the
* specified {@code ctx}.
*
* <p>If {@code ctx} is {@code null} and the end of the rule containing
* {@code s} is reached, {@link Token#EPSILON} is added to the result set.
* If {@code ctx} is not {@code null} and the end of the outermost rule is
* reached, {@link Token#EOF} is added to the result set.</p>
*
* @param s the ATN state
* @param ctx the complete parser context, or {@code null} if the context
* should be ignored
*
* @return The set of tokens that can follow {@code s} in the ATN in the
* specified {@code ctx}.
*/
[return: NotNull]
public virtual IntervalSet Look(ATNState s, RuleContext ctx)
{
return Look(s, null, ctx);
}
/// <summary>
/// Compute set of tokens that can follow
/// <paramref name="s"/>
/// in the ATN in the
/// specified
/// <paramref name="ctx"/>
/// .
/// <p>If
/// <paramref name="ctx"/>
/// is
/// <see langword="null"/>
/// and the end of the rule containing
/// <paramref name="s"/>
/// is reached,
/// <see cref="TokenConstants.EPSILON"/>
/// is added to the result set.
/// If
/// <paramref name="ctx"/>
/// is not
/// <c>PredictionContext#EMPTY_LOCAL</c>
/// and the end of the outermost rule is
/// reached,
/// <see cref="TokenConstants.EOF"/>
/// is added to the result set.</p>
/// </summary>
/// <param name="s">the ATN state</param>
/// <param name="stopState">
/// the ATN state to stop at. This can be a
/// <see cref="BlockEndState"/>
/// to detect epsilon paths through a closure.
/// </param>
/// <param name="ctx">
/// the complete parser context, or
/// <see langword="null"/>
/// if the context
/// should be ignored
/// </param>
/// <returns>
/// The set of tokens that can follow
/// <paramref name="s"/>
/// in the ATN in the
/// specified
/// <paramref name="ctx"/>
/// .
/// </returns>
/**
* Compute set of tokens that can follow {@code s} in the ATN in the
* specified {@code ctx}.
*
* <p>If {@code ctx} is {@code null} and the end of the rule containing
* {@code s} is reached, {@link Token#EPSILON} is added to the result set.
* If {@code ctx} is not {@code null} and the end of the outermost rule is
* reached, {@link Token#EOF} is added to the result set.</p>
*
* @param s the ATN state
* @param stopState the ATN state to stop at. This can be a
* {@link BlockEndState} to detect epsilon paths through a closure.
* @param ctx the complete parser context, or {@code null} if the context
* should be ignored
*
* @return The set of tokens that can follow {@code s} in the ATN in the
* specified {@code ctx}.
*/
[return: NotNull]
public virtual IntervalSet Look(ATNState s, ATNState stopState, RuleContext ctx)
{
IntervalSet r = new IntervalSet();
bool seeThruPreds = true;
PredictionContext lookContext = ctx != null ? PredictionContext.FromRuleContext(s.atn, ctx) : null;
Look(s, stopState, lookContext, r, new HashSet<ATNConfig>(), new BitSet(), seeThruPreds, true);
Look_(s, stopState, lookContext, r, new HashSet<ATNConfig>(), new BitSet(), seeThruPreds, true);
return r;
}
/// <summary>
/// Compute set of tokens that can follow
/// <paramref name="s"/>
/// in the ATN in the
/// specified
/// <paramref name="ctx"/>
/// .
/// <p/>
/// If
/// <paramref name="ctx"/>
/// is
/// <see cref="PredictionContext.EMPTY"/>
/// and
/// <paramref name="stopState"/>
/// or the end of the rule containing
/// <paramref name="s"/>
/// is reached,
/// <see cref="TokenConstants.EPSILON"/>
/// is added to the result set. If
/// <paramref name="ctx"/>
/// is not
/// <see cref="PredictionContext.EMPTY"/>
/// and
/// <paramref name="addEOF"/>
/// is
/// <see langword="true"/>
/// and
/// <paramref name="stopState"/>
/// or the end of the outermost rule is reached,
/// <see cref="TokenConstants.EOF"/>
/// is added to the result set.
/// </summary>
/// <param name="s">the ATN state.</param>
/// <param name="stopState">
/// the ATN state to stop at. This can be a
/// <see cref="BlockEndState"/>
/// to detect epsilon paths through a closure.
/// </param>
/// <param name="ctx">
/// The outer context, or
/// <see cref="PredictionContext.EMPTY"/>
/// if
/// the outer context should not be used.
/// </param>
/// <param name="look">The result lookahead set.</param>
/// <param name="lookBusy">
/// A set used for preventing epsilon closures in the ATN
/// from causing a stack overflow. Outside code should pass
/// <c>new HashSet&lt;ATNConfig&gt;</c>
/// for this argument.
/// </param>
/// <param name="calledRuleStack">
/// A set used for preventing left recursion in the
/// ATN from causing a stack overflow. Outside code should pass
/// <c>new BitSet()</c>
/// for this argument.
/// </param>
/// <param name="seeThruPreds">
///
/// <see langword="true"/>
/// to true semantic predicates as
/// implicitly
/// <see langword="true"/>
/// and "see through them", otherwise
/// <see langword="false"/>
/// to treat semantic predicates as opaque and add
/// <see cref="HitPred"/>
/// to the
/// result if one is encountered.
/// </param>
/// <param name="addEOF">
/// Add
/// <see cref="TokenConstants.EOF"/>
/// to the result if the end of the
/// outermost context is reached. This parameter has no effect if
/// <paramref name="ctx"/>
/// is
/// <see cref="PredictionContext.EMPTY"/>
/// .
/// </param>
protected internal virtual void Look(ATNState s, ATNState stopState, PredictionContext ctx, IntervalSet look, HashSet<ATNConfig> lookBusy, BitSet calledRuleStack, bool seeThruPreds, bool addEOF)
/**
* Compute set of tokens that can follow {@code s} in the ATN in the
* specified {@code ctx}.
*
* <p>If {@code ctx} is {@code null} and {@code stopState} or the end of the
* rule containing {@code s} is reached, {@link Token#EPSILON} is added to
* the result set. If {@code ctx} is not {@code null} and {@code addEOF} is
* {@code true} and {@code stopState} or the end of the outermost rule is
* reached, {@link Token#EOF} is added to the result set.</p>
*
* @param s the ATN state.
* @param stopState the ATN state to stop at. This can be a
* {@link BlockEndState} to detect epsilon paths through a closure.
* @param ctx The outer context, or {@code null} if the outer context should
* not be used.
* @param look The result lookahead set.
* @param lookBusy A set used for preventing epsilon closures in the ATN
* from causing a stack overflow. Outside code should pass
* {@code new HashSet<ATNConfig>} for this argument.
* @param calledRuleStack A set used for preventing left recursion in the
* ATN from causing a stack overflow. Outside code should pass
* {@code new BitSet()} for this argument.
* @param seeThruPreds {@code true} to true semantic predicates as
* implicitly {@code true} and "see through them", otherwise {@code false}
* to treat semantic predicates as opaque and add {@link #HIT_PRED} to the
* result if one is encountered.
* @param addEOF Add {@link Token#EOF} to the result if the end of the
* outermost context is reached. This parameter has no effect if {@code ctx}
* is {@code null}.
*/
protected internal virtual void Look_(ATNState s, ATNState stopState, PredictionContext ctx, IntervalSet look, HashSet<ATNConfig> lookBusy, BitSet calledRuleStack, bool seeThruPreds, bool addEOF)
{
// System.out.println("_LOOK("+s.stateNumber+", ctx="+ctx);
ATNConfig c = new ATNConfig(s, 0, ctx);
if (!lookBusy.Add(c))
{
@ -268,7 +153,8 @@ namespace Antlr4.Runtime.Atn
look.Add(TokenConstants.EPSILON);
return;
}
else if (ctx.IsEmpty && addEOF) {
else if (ctx.IsEmpty && addEOF)
{
look.Add(TokenConstants.EOF);
return;
}
@ -287,21 +173,21 @@ namespace Antlr4.Runtime.Atn
}
if (ctx != PredictionContext.EMPTY)
{
bool removed = calledRuleStack.Get(s.ruleIndex);
try
{
calledRuleStack.Clear(s.ruleIndex);
for (int i = 0; i < ctx.Size; i++)
{
ATNState returnState = atn.states[ctx.GetReturnState(i)];
bool removed = calledRuleStack.Get(returnState.ruleIndex);
try
{
calledRuleStack.Clear(returnState.ruleIndex);
Look(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
Look_(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
}
}
finally
{
if (removed)
{
calledRuleStack.Set(returnState.ruleIndex);
}
calledRuleStack.Set(s.ruleIndex);
}
}
return;
@ -311,7 +197,7 @@ namespace Antlr4.Runtime.Atn
for (int i_1 = 0; i_1 < n; i_1++)
{
Transition t = s.Transition(i_1);
if (t is RuleTransition)
if (t.GetType() == typeof(RuleTransition))
{
RuleTransition ruleTransition = (RuleTransition)t;
if (calledRuleStack.Get(ruleTransition.ruleIndex))
@ -322,35 +208,29 @@ namespace Antlr4.Runtime.Atn
try
{
calledRuleStack.Set(ruleTransition.target.ruleIndex);
Look(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
Look_(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
}
finally
{
calledRuleStack.Clear(ruleTransition.target.ruleIndex);
}
}
else
{
if (t is AbstractPredicateTransition)
else if (t is AbstractPredicateTransition)
{
if (seeThruPreds)
{
Look(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
Look_(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
}
else
{
look.Add(HitPred);
}
}
else
else if (t.IsEpsilon)
{
if (t.IsEpsilon)
{
Look(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
Look_(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
}
else
{
if (t is WildcardTransition)
else if (t.GetType() == typeof(WildcardTransition))
{
look.AddAll(IntervalSet.Of(TokenConstants.MinUserTokenType, atn.maxTokenType));
}
@ -369,7 +249,4 @@ namespace Antlr4.Runtime.Atn
}
}
}
}
}
}
}

View File

@ -42,6 +42,21 @@ namespace Antlr4.Runtime
protected internal IntervalSet lastErrorStates;
/**
* This field is used to propagate information about the lookahead following
* the previous match. Since prediction prefers completing the current rule
* to error recovery efforts, error reporting may occur later than the
* original point where it was discoverable. The original context is used to
* compute the true expected sets as though the reporting occurred as early
* as possible.
*/
protected ParserRuleContext nextTokensContext;
/**
* @see #nextTokensContext
*/
protected int nextTokensState;
/// <summary>
/// <inheritDoc/>
/// <p>The default implementation simply calls
@ -264,8 +279,22 @@ namespace Antlr4.Runtime
int la = tokens.LA(1);
// try cheaper subset first; might get lucky. seems to shave a wee bit off
var nextTokens = recognizer.Atn.NextTokens(s);
if (nextTokens.Contains(TokenConstants.EPSILON) || nextTokens.Contains(la))
if (nextTokens.Contains(la))
{
nextTokensContext = null;
nextTokensState = ATNState.InvalidStateNumber;
return;
}
if (nextTokens.Contains(TokenConstants.EPSILON))
{
if (nextTokensContext == null)
{
// It's possible the next token won't match; information tracked
// by sync is restricted for performance.
nextTokensContext = recognizer.Context;
nextTokensState = recognizer.State;
}
return;
}
switch (s.StateType)

View File

@ -3,5 +3,7 @@
* can be found in the LICENSE.txt file in the project root.
*/
using System;
using System.Reflection;
[assembly: CLSCompliant(true)]
[assembly: AssemblyVersion("4.9.1")]

View File

@ -100,18 +100,16 @@ void LL1Analyzer::_LOOK(ATNState *s, ATNState *stopState, Ref<PredictionContext>
}
if (ctx != PredictionContext::EMPTY) {
bool removed = calledRuleStack.test(s->ruleIndex);
calledRuleStack[s->ruleIndex] = false;
auto onExit = finally([removed, &calledRuleStack, s] {
if (removed) {
calledRuleStack.set(s->ruleIndex);
}
});
// run thru all possible stack tops in ctx
for (size_t i = 0; i < ctx->size(); i++) {
ATNState *returnState = _atn.states[ctx->getReturnState(i)];
bool removed = calledRuleStack.test(returnState->ruleIndex);
auto onExit = finally([removed, &calledRuleStack, returnState] {
if (removed) {
calledRuleStack.set(returnState->ruleIndex);
}
});
calledRuleStack[returnState->ruleIndex] = false;
_LOOK(returnState, stopState, ctx->getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
}
return;

View File

@ -112,16 +112,6 @@ func (la *LL1Analyzer) Look(s, stopState ATNState, ctx RuleContext) *IntervalSet
func (la *LL1Analyzer) look2(s, stopState ATNState, ctx PredictionContext, look *IntervalSet, lookBusy *Set, calledRuleStack *BitSet, seeThruPreds, addEOF bool, i int) {
returnState := la.atn.states[ctx.getReturnState(i)]
removed := calledRuleStack.contains(returnState.GetRuleIndex())
defer func() {
if removed {
calledRuleStack.add(returnState.GetRuleIndex())
}
}()
calledRuleStack.remove(returnState.GetRuleIndex())
la.look1(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
}
@ -158,6 +148,13 @@ func (la *LL1Analyzer) look1(s, stopState ATNState, ctx PredictionContext, look
}
if ctx != BasePredictionContextEMPTY {
removed := calledRuleStack.contains(s.GetRuleIndex())
defer func() {
if removed {
calledRuleStack.add(s.GetRuleIndex())
}
}()
calledRuleStack.remove(s.GetRuleIndex())
// run thru all possible stack tops in ctx
for i := 0; i < ctx.length(); i++ {
returnState := la.atn.states[ctx.getReturnState(i)]

View File

@ -73,4 +73,4 @@ const CharStreams = {
}
};
module.exports = CharStreams
module.exports = CharStreams;

View File

@ -99,11 +99,11 @@ class IntervalSet {
const r = this.intervals[k + 1];
// if r contained in l
if (l.stop >= r.stop) {
this.intervals.pop(k + 1);
this.intervals = this.intervals.splice(k + 1, 1);
this.reduce(k);
} else if (l.stop >= r.start) {
this.intervals[k] = new Interval(l.start, r.stop);
this.intervals.pop(k + 1);
this.intervals = this.intervals.splice(k + 1, 1);
}
}
}

View File

@ -128,17 +128,17 @@ class LL1Analyzer {
return;
}
if (ctx !== PredictionContext.EMPTY) {
// run thru all possible stack tops in ctx
for(let i=0; i<ctx.length; i++) {
const returnState = this.atn.states[ctx.getReturnState(i)];
const removed = calledRuleStack.contains(returnState.ruleIndex);
const removed = calledRuleStack.contains(s.ruleIndex);
try {
calledRuleStack.remove(returnState.ruleIndex);
calledRuleStack.remove(s.ruleIndex);
// run thru all possible stack tops in ctx
for (let i = 0; i < ctx.length; i++) {
const returnState = this.atn.states[ctx.getReturnState(i)];
this._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF);
} finally {
if (removed) {
calledRuleStack.add(returnState.ruleIndex);
}
}finally {
if (removed) {
calledRuleStack.add(s.ruleIndex);
}
}
return;

View File

@ -423,12 +423,12 @@ function titleCase(str) {
function equalArrays(a, b) {
if (!Array.isArray(a) || !Array.isArray(b))
return false;
if (a == b)
if (a === b)
return true;
if (a.length != b.length)
if (a.length !== b.length)
return false;
for (let i = 0; i < a.length; i++) {
if (a[i] == b[i])
if (a[i] === b[i])
continue;
if (!a[i].equals || !a[i].equals(b[i]))
return false;

View File

@ -156,7 +156,7 @@ class LexerATNConfig extends ATNConfig {
equals(other) {
return this === other ||
(other instanceof LexerATNConfig &&
this.passedThroughNonGreedyDecision == other.passedThroughNonGreedyDecision &&
this.passedThroughNonGreedyDecision === other.passedThroughNonGreedyDecision &&
(this.lexerActionExecutor ? this.lexerActionExecutor.equals(other.lexerActionExecutor) : !other.lexerActionExecutor) &&
super.equals(other));
}

View File

@ -589,7 +589,7 @@ class ParserATNSimulator extends ATNSimulator {
}
const fullCtx = true;
let foundExactAmbig = false;
let reach = null;
let reach;
let previous = s0;
input.seek(startIndex);
let t = input.LA(1);
@ -1253,7 +1253,7 @@ class ParserATNSimulator extends ATNSimulator {
// both epsilon transitions and non-epsilon transitions.
}
for(let i = 0;i<p.transitions.length; i++) {
if(i==0 && this.canDropLoopEntryEdgeInLeftRecursiveRule(config))
if(i === 0 && this.canDropLoopEntryEdgeInLeftRecursiveRule(config))
continue;
const t = p.transitions[i];
@ -1308,9 +1308,9 @@ class ParserATNSimulator extends ATNSimulator {
// the context has an empty stack case. If so, it would mean
// global FOLLOW so we can't perform optimization
// Are we the special loop entry/exit state? or SLL wildcard
if(p.stateType != ATNState.STAR_LOOP_ENTRY)
if(p.stateType !== ATNState.STAR_LOOP_ENTRY)
return false;
if(p.stateType != ATNState.STAR_LOOP_ENTRY || !p.isPrecedenceDecision ||
if(p.stateType !== ATNState.STAR_LOOP_ENTRY || !p.isPrecedenceDecision ||
config.context.isEmpty() || config.context.hasEmptyPath())
return false;
@ -1318,7 +1318,7 @@ class ParserATNSimulator extends ATNSimulator {
const numCtxs = config.context.length;
for(let i=0; i<numCtxs; i++) { // for each stack context
const returnState = this.atn.states[config.context.getReturnState(i)];
if (returnState.ruleIndex != p.ruleIndex)
if (returnState.ruleIndex !== p.ruleIndex)
return false;
}
@ -1332,29 +1332,29 @@ class ParserATNSimulator extends ATNSimulator {
const returnStateNumber = config.context.getReturnState(i);
const returnState = this.atn.states[returnStateNumber];
// all states must have single outgoing epsilon edge
if (returnState.transitions.length != 1 || !returnState.transitions[0].isEpsilon)
if (returnState.transitions.length !== 1 || !returnState.transitions[0].isEpsilon)
return false;
// Look for prefix op case like 'not expr', (' type ')' expr
const returnStateTarget = returnState.transitions[0].target;
if ( returnState.stateType == ATNState.BLOCK_END && returnStateTarget == p )
if ( returnState.stateType === ATNState.BLOCK_END && returnStateTarget === p )
continue;
// Look for 'expr op expr' or case where expr's return state is block end
// of (...)* internal block; the block end points to loop back
// which points to p but we don't need to check that
if ( returnState == blockEndState )
if ( returnState === blockEndState )
continue;
// Look for ternary expr ? expr : expr. The return state points at block end,
// which points at loop entry state
if ( returnStateTarget == blockEndState )
if ( returnStateTarget === blockEndState )
continue;
// Look for complex prefix 'between expr and expr' case where 2nd expr's
// return state points at block end state of (...)* internal block
if (returnStateTarget.stateType == ATNState.BLOCK_END && returnStateTarget.transitions.length == 1
&& returnStateTarget.transitions[0].isEpsilon && returnStateTarget.transitions[0].target == p)
if (returnStateTarget.stateType === ATNState.BLOCK_END && returnStateTarget.transitions.length === 1
&& returnStateTarget.transitions[0].isEpsilon && returnStateTarget.transitions[0].target === p)
continue;
// anything else ain't conforming
@ -1401,7 +1401,7 @@ class ParserATNSimulator extends ATNSimulator {
actionTransition(config, t) {
if (this.debug) {
const index = t.actionIndex==-1 ? 65535 : t.actionIndex;
const index = t.actionIndex === -1 ? 65535 : t.actionIndex;
console.log("ACTION edge " + t.ruleIndex + ":" + index);
}
return new ATNConfig({state:t.target}, config);
@ -1659,7 +1659,7 @@ class ParserATNSimulator extends ATNSimulator {
* state was not already present
*/
addDFAState(dfa, D) {
if (D == ATNSimulator.ERROR) {
if (D === ATNSimulator.ERROR) {
return D;
}
const existing = dfa.states.get(D);

View File

@ -501,7 +501,7 @@ const PredictionMode = {
getConflictingAltSubsets: function(configs) {
const configToAlts = new Map();
configToAlts.hashFunction = function(cfg) { hashStuff(cfg.state.stateNumber, cfg.context); };
configToAlts.equalsFunction = function(c1, c2) { return c1.state.stateNumber==c2.state.stateNumber && c1.context.equals(c2.context);}
configToAlts.equalsFunction = function(c1, c2) { return c1.state.stateNumber === c2.state.stateNumber && c1.context.equals(c2.context);};
configs.items.map(function(cfg) {
let alts = configToAlts.get(cfg);
if (alts === null) {
@ -557,6 +557,6 @@ const PredictionMode = {
}
return result;
}
}
};
module.exports = PredictionMode;

View File

@ -111,7 +111,7 @@ class DFA {
*/
setPrecedenceDfa(precedenceDfa) {
if (this.precedenceDfa!==precedenceDfa) {
this._states = new DFAStatesSet();
this._states = new Set();
if (precedenceDfa) {
const precedenceState = new DFAState(null, new ATNConfigSet());
precedenceState.edges = [];

View File

@ -55,6 +55,8 @@ class DefaultErrorStrategy extends ErrorStrategy {
*/
this.lastErrorIndex = -1;
this.lastErrorStates = null;
this.nextTokensContext = null;
this.nextTokenState = 0;
}
/**
@ -216,11 +218,21 @@ class DefaultErrorStrategy extends ErrorStrategy {
if (this.inErrorRecoveryMode(recognizer)) {
return;
}
const s = recognizer._interp.atn.states[recognizer.state]
const la = recognizer.getTokenStream().LA(1)
const s = recognizer._interp.atn.states[recognizer.state];
const la = recognizer.getTokenStream().LA(1);
// try cheaper subset first; might get lucky. seems to shave a wee bit off
const nextTokens = recognizer.atn.nextTokens(s)
if (nextTokens.contains(Token.EPSILON) || nextTokens.contains(la)) {
const nextTokens = recognizer.atn.nextTokens(s);
if(nextTokens.contains(la)) {
this.nextTokensContext = null;
this.nextTokenState = ATNState.INVALID_STATE_NUMBER;
return;
} else if (nextTokens.contains(Token.EPSILON)) {
if(this.nextTokensContext === null) {
// It's possible the next token won't match information tracked
// by sync is restricted for performance.
this.nextTokensContext = recognizer._ctx;
this.nextTokensState = recognizer._stateNumber;
}
return;
}
switch (s.stateType) {

View File

@ -11,7 +11,8 @@
* and what kind of problem occurred.
*/
const {PredicateTransition} = require('./../atn/Transition')
const {PredicateTransition} = require('./../atn/Transition');
const {Interval} = require('../IntervalSet').Interval;
class RecognitionException extends Error {
constructor(params) {
@ -76,9 +77,9 @@ class LexerNoViableAltException extends RecognitionException {
}
toString() {
let symbol = ""
let symbol = "";
if (this.startIndex >= 0 && this.startIndex < this.input.size) {
symbol = this.input.getText((this.startIndex,this.startIndex));
symbol = this.input.getText(new Interval(this.startIndex,this.startIndex));
}
return "LexerNoViableAltException" + symbol;
}

View File

@ -4,22 +4,24 @@ if (!String.prototype.codePointAt) {
'use strict'; // needed to support `apply`/`call` with `undefined`/`null`
var defineProperty = (function() {
// IE 8 only supports `Object.defineProperty` on DOM elements
let result;
try {
var object = {};
var $defineProperty = Object.defineProperty;
var result = $defineProperty(object, object, object) && $defineProperty;
} catch(error) {}
const object = {};
const $defineProperty = Object.defineProperty;
result = $defineProperty(object, object, object) && $defineProperty;
} catch(error) {
}
return result;
}());
var codePointAt = function(position) {
const codePointAt = function(position) {
if (this == null) {
throw TypeError();
}
var string = String(this);
var size = string.length;
const string = String(this);
const size = string.length;
// `ToInteger`
var index = position ? Number(position) : 0;
if (index != index) { // better `isNaN`
let index = position ? Number(position) : 0;
if (index !== index) { // better `isNaN`
index = 0;
}
// Account for out-of-bounds indices:
@ -27,8 +29,8 @@ if (!String.prototype.codePointAt) {
return undefined;
}
// Get the first code unit
var first = string.charCodeAt(index);
var second;
const first = string.charCodeAt(index);
let second;
if ( // check if its the start of a surrogate pair
first >= 0xD800 && first <= 0xDBFF && // high surrogate
size > index + 1 // there is a next code unit

View File

@ -1,35 +1,36 @@
/*! https://mths.be/fromcodepoint v0.2.1 by @mathias */
if (!String.fromCodePoint) {
(function() {
var defineProperty = (function() {
const defineProperty = (function() {
// IE 8 only supports `Object.defineProperty` on DOM elements
let result;
try {
var object = {};
var $defineProperty = Object.defineProperty;
var result = $defineProperty(object, object, object) && $defineProperty;
const object = {};
const $defineProperty = Object.defineProperty;
result = $defineProperty(object, object, object) && $defineProperty;
} catch(error) {}
return result;
}());
var stringFromCharCode = String.fromCharCode;
var floor = Math.floor;
var fromCodePoint = function(_) {
var MAX_SIZE = 0x4000;
var codeUnits = [];
var highSurrogate;
var lowSurrogate;
var index = -1;
var length = arguments.length;
const stringFromCharCode = String.fromCharCode;
const floor = Math.floor;
const fromCodePoint = function(_) {
const MAX_SIZE = 0x4000;
const codeUnits = [];
let highSurrogate;
let lowSurrogate;
let index = -1;
const length = arguments.length;
if (!length) {
return '';
}
var result = '';
let result = '';
while (++index < length) {
var codePoint = Number(arguments[index]);
let codePoint = Number(arguments[index]);
if (
!isFinite(codePoint) || // `NaN`, `+Infinity`, or `-Infinity`
codePoint < 0 || // not a valid Unicode code point
codePoint > 0x10FFFF || // not a valid Unicode code point
floor(codePoint) != codePoint // not an integer
floor(codePoint) !== codePoint // not an integer
) {
throw RangeError('Invalid code point: ' + codePoint);
}
@ -42,7 +43,7 @@ if (!String.fromCodePoint) {
lowSurrogate = (codePoint % 0x400) + 0xDC00;
codeUnits.push(highSurrogate, lowSurrogate);
}
if (index + 1 == length || codeUnits.length > MAX_SIZE) {
if (index + 1 === length || codeUnits.length > MAX_SIZE) {
result += stringFromCharCode.apply(null, codeUnits);
codeUnits.length = 0;
}

View File

@ -1,4 +1,4 @@
const path = require('path')
const path = require('path');
module.exports = {
mode: "production",

View File

@ -129,16 +129,16 @@ class LL1Analyzer (object):
return
if ctx != PredictionContext.EMPTY:
removed = s.ruleIndex in calledRuleStack
try:
calledRuleStack.discard(s.ruleIndex)
# run thru all possible stack tops in ctx
for i in range(0, len(ctx)):
returnState = self.atn.states[ctx.getReturnState(i)]
removed = returnState.ruleIndex in calledRuleStack
try:
calledRuleStack.discard(returnState.ruleIndex)
self._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
finally:
if removed:
calledRuleStack.add(returnState.ruleIndex)
calledRuleStack.add(s.ruleIndex)
return
for t in s.transitions:
@ -163,8 +163,8 @@ class LL1Analyzer (object):
elif type(t) == WildcardTransition:
look.addRange( Interval(Token.MIN_USER_TOKEN_TYPE, self.atn.maxTokenType + 1) )
else:
set = t.label
if set is not None:
set_ = t.label
if set_ is not None:
if isinstance(t, NotSetTransition):
set = set.complement(Token.MIN_USER_TOKEN_TYPE, self.atn.maxTokenType)
look.addSet(set)
set_ = set_.complement(Token.MIN_USER_TOKEN_TYPE, self.atn.maxTokenType)
look.addSet(set_)

View File

@ -53,6 +53,8 @@ class DefaultErrorStrategy(ErrorStrategy):
#
self.lastErrorIndex = -1
self.lastErrorStates = None
self.nextTokensContext = None
self.nextTokenState = 0
# <p>The default implementation simply calls {@link #endErrorCondition} to
# ensure that the handler is not in error recovery mode.</p>
@ -203,7 +205,16 @@ class DefaultErrorStrategy(ErrorStrategy):
la = recognizer.getTokenStream().LA(1)
# try cheaper subset first; might get lucky. seems to shave a wee bit off
nextTokens = recognizer.atn.nextTokens(s)
if Token.EPSILON in nextTokens or la in nextTokens:
if la in nextTokens:
self.nextTokensContext = None
self.nextTokenState = ATNState.INVALID_STATE_NUMBER
return
elif Token.EPSILON in nextTokens:
if self.nextTokensContext is None:
# It's possible the next token won't match information tracked
# by sync is restricted for performance.
self.nextTokensContext = recognizer._ctx
self.nextTokensState = recognizer._stateNumber
return
if s.stateType in [ATNState.BLOCK_START, ATNState.STAR_BLOCK_START,

View File

@ -39,7 +39,7 @@ class TestLexer(Lexer):
def __init__(self, input=None):
super(TestLexer, self).__init__(input)
self.checkVersion("4.9")
self.checkVersion("4.9.1")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None
@ -95,7 +95,7 @@ class TestLexer2(Lexer):
def __init__(self, input=None):
super(TestLexer2, self).__init__(input)
self.checkVersion("4.8")
self.checkVersion("4.9.1")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None

View File

@ -132,16 +132,16 @@ class LL1Analyzer (object):
return
if ctx != PredictionContext.EMPTY:
removed = s.ruleIndex in calledRuleStack
try:
calledRuleStack.discard(s.ruleIndex)
# run thru all possible stack tops in ctx
for i in range(0, len(ctx)):
returnState = self.atn.states[ctx.getReturnState(i)]
removed = returnState.ruleIndex in calledRuleStack
try:
calledRuleStack.discard(returnState.ruleIndex)
self._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF)
finally:
if removed:
calledRuleStack.add(returnState.ruleIndex)
calledRuleStack.add(s.ruleIndex)
return
for t in s.transitions:

View File

@ -453,7 +453,7 @@ class Parser (Recognizer):
def getInvokingContext(self, ruleIndex:int):
ctx = self._ctx
while ctx is not None:
if ctx.ruleIndex == ruleIndex:
if ctx.getRuleIndex() == ruleIndex:
return ctx
ctx = ctx.parentCtx
return None

View File

@ -58,6 +58,8 @@ class DefaultErrorStrategy(ErrorStrategy):
#
self.lastErrorIndex = -1
self.lastErrorStates = None
self.nextTokensContext = None
self.nextTokenState = 0
# <p>The default implementation simply calls {@link #endErrorCondition} to
# ensure that the handler is not in error recovery mode.</p>
@ -208,7 +210,16 @@ class DefaultErrorStrategy(ErrorStrategy):
la = recognizer.getTokenStream().LA(1)
# try cheaper subset first; might get lucky. seems to shave a wee bit off
nextTokens = recognizer.atn.nextTokens(s)
if Token.EPSILON in nextTokens or la in nextTokens:
if la in nextTokens:
self.nextTokensContext = None
self.nextTokenState = ATNState.INVALID_STATE_NUMBER
return
elif Token.EPSILON in nextTokens:
if self.nextTokensContext is None:
# It's possible the next token won't match information tracked
# by sync is restricted for performance.
self.nextTokensContext = recognizer._ctx
self.nextTokensState = recognizer._stateNumber
return
if s.stateType in [ATNState.BLOCK_START, ATNState.STAR_BLOCK_START,

View File

@ -95,7 +95,7 @@ class TestLexer2(Lexer):
def __init__(self, input=None):
super(TestLexer2, self).__init__(input)
self.checkVersion("4.8")
self.checkVersion("4.9.1")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None

View File

@ -132,7 +132,7 @@ open class Lexer: Recognizer<LexerATNSimulator>, TokenSource {
// Mark start location in char stream so unbuffered streams are
// guaranteed at least have text of current token
var tokenStartMarker = _input.mark()
let tokenStartMarker = _input.mark()
defer {
// make sure we release marker after match or
// unbuffered char stream will keep buffering

View File

@ -169,16 +169,18 @@ public class LL1Analyzer {
}
if ctx != PredictionContext.EMPTY {
let removed = try! calledRuleStack.get(s.ruleIndex!)
try! calledRuleStack.clear(s.ruleIndex!)
defer {
if removed {
try! calledRuleStack.set(s.ruleIndex!)
}
}
// run thru all possible stack tops in ctx
let length = ctx.size()
for i in 0..<length {
let returnState = atn.states[(ctx.getReturnState(i))]!
let removed = try! calledRuleStack.get(returnState.ruleIndex!)
try! calledRuleStack.clear(returnState.ruleIndex!)
_LOOK(returnState, stopState, ctx.getParent(i), look, &lookBusy, calledRuleStack, seeThruPreds, addEOF)
if removed {
try! calledRuleStack.set(returnState.ruleIndex!)
}
}
return
}

View File

@ -116,7 +116,7 @@ open class LexerATNSimulator: ATNSimulator {
LexerATNSimulator.match_calls += 1
self.mode = mode
var mark = input.mark()
let mark = input.mark()
defer {
try! input.release(mark)
}
@ -609,10 +609,10 @@ open class LexerATNSimulator: ATNSimulator {
return try recog.sempred(nil, ruleIndex, predIndex)
}
var savedCharPositionInLine = charPositionInLine
var savedLine = line
var index = input.index()
var marker = input.mark()
let savedCharPositionInLine = charPositionInLine
let savedLine = line
let index = input.index()
let marker = input.mark()
do {
try consume(input)
defer

View File

@ -149,7 +149,7 @@ public class LexerActionExecutor: Hashable {
///
public func execute(_ lexer: Lexer, _ input: CharStream, _ startIndex: Int) throws {
var requiresSeek: Bool = false
var stopIndex: Int = input.index()
let stopIndex: Int = input.index()
defer {
if requiresSeek {
try! input.seek(stopIndex)

View File

@ -28,13 +28,27 @@ public struct LookupDictionary {
private func hash(_ config: ATNConfig) -> Int {
if type == LookupDictionaryType.lookup {
/* migrating to XCode 12.3/Swift 5.3 introduced a very weird bug
where reading hashValue from a SemanticContext.AND instance woul:
call the AND empty constructor
NOT call AND.hash(into)
Could it be a Swift compiler bug ?
All tests pass when using Hasher.combine()
Keeping the old code for reference:
var hashCode: Int = 7
hashCode = 31 * hashCode + config.state.stateNumber
hashCode = 31 * hashCode + config.alt
hashCode = 31 * hashCode + config.semanticContext.hashValue
hashCode = 31 * hashCode + config.semanticContext.hashValue // <- the crash would occur here
return hashCode
*/
var hasher = Hasher()
hasher.combine(7)
hasher.combine(config.state.stateNumber)
hasher.combine(config.alt)
hasher.combine(config.semanticContext)
return hasher.finalize()
} else {
//Ordered
return config.hashValue

View File

@ -53,17 +53,17 @@ public class ProfilingATNSimulator: ParserATNSimulator {
override
public func adaptivePredict(_ input: TokenStream, _ decision: Int,_ outerContext: ParserRuleContext?) throws -> Int {
var outerContext = outerContext
let outerContext = outerContext
self._sllStopIndex = -1
self._llStopIndex = -1
self.currentDecision = decision
var start: Int64 = Int64(Date().timeIntervalSince1970) //System.nanoTime(); // expensive but useful info
var alt: Int = try super.adaptivePredict(input, decision, outerContext)
var stop: Int64 = Int64(Date().timeIntervalSince1970) //System.nanoTime();
let start: Int64 = Int64(Date().timeIntervalSince1970) //System.nanoTime(); // expensive but useful info
let alt: Int = try super.adaptivePredict(input, decision, outerContext)
let stop: Int64 = Int64(Date().timeIntervalSince1970) //System.nanoTime();
decisions[decision].timeInPrediction += (stop - start)
decisions[decision].invocations += 1
var SLL_k: Int64 = Int64(_sllStopIndex - _startIndex + 1)
let SLL_k: Int64 = Int64(_sllStopIndex - _startIndex + 1)
decisions[decision].SLL_TotalLook += SLL_k
decisions[decision].SLL_MinLook = decisions[decision].SLL_MinLook == 0 ? SLL_k : min(decisions[decision].SLL_MinLook, SLL_k)
if SLL_k > decisions[decision].SLL_MaxLook {
@ -73,7 +73,7 @@ public class ProfilingATNSimulator: ParserATNSimulator {
}
if _llStopIndex >= 0 {
var LL_k: Int64 = Int64(_llStopIndex - _startIndex + 1)
let LL_k: Int64 = Int64(_llStopIndex - _startIndex + 1)
decisions[decision].LL_TotalLook += LL_k
decisions[decision].LL_MinLook = decisions[decision].LL_MinLook == 0 ? LL_k : min(decisions[decision].LL_MinLook, LL_k)
if LL_k > decisions[decision].LL_MaxLook {

View File

@ -16,6 +16,7 @@ import java.io.File;
import static org.junit.Assert.assertEquals;
public class BaseJavaToolTest extends BaseJavaTest {
public void testErrors(String[] pairs, boolean printTree) {
for (int i = 0; i < pairs.length; i+=2) {
String grammarStr = pairs[i];
@ -23,10 +24,10 @@ public class BaseJavaToolTest extends BaseJavaTest {
String[] lines = grammarStr.split("\n");
String fileName = getFilenameFromFirstLineOfGrammar(lines[0]);
ErrorQueue equeue = BaseRuntimeTest.antlrOnString(tmpdir, null, fileName, grammarStr, false); // use default language target in case test overrides
ErrorQueue equeue = BaseRuntimeTest.antlrOnString(getTempDirPath(), null, fileName, grammarStr, false); // use default language target in case test overrides
String actual = equeue.toString(true);
actual = actual.replace(tmpdir + File.separator, "");
actual = actual.replace(getTempDirPath() + File.separator, "");
// System.err.println(actual);
String msg = grammarStr;
msg = msg.replace("\n","\\n");

View File

@ -9,12 +9,14 @@ package org.antlr.v4.test.tool;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.NoViableAltException;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.atn.BlockStartState;
import org.antlr.v4.runtime.atn.LexerATNSimulator;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.IntegerList;
import org.antlr.v4.test.runtime.MockIntTokenStream;
import org.antlr.v4.tool.DOTGenerator;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.LexerGrammar;
@ -22,6 +24,7 @@ import org.antlr.v4.tool.Rule;
import org.junit.Before;
import org.junit.Test;
import static org.antlr.v4.test.runtime.RuntimeTestUtils.getTokenTypesViaATN;
import static org.junit.Assert.assertEquals;
// NOTICE: TOKENS IN LEXER, PARSER MUST BE SAME OR TOKEN TYPE MISMATCH
@ -373,7 +376,7 @@ public class TestATNInterpreter extends BaseJavaToolTest {
ParserATNFactory f = new ParserATNFactory(g);
ATN atn = f.createATN();
IntTokenStream input = new IntTokenStream(types);
TokenStream input = new MockIntTokenStream(types);
// System.out.println("input="+input.types);
ParserInterpreterForTesting interp = new ParserInterpreterForTesting(g, input);
ATNState startState = atn.ruleToStartState[g.getRule("a").index];

View File

@ -11,6 +11,7 @@ import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNState;
import org.antlr.v4.runtime.misc.Utils;
import org.antlr.v4.test.runtime.RuntimeTestUtils;
import org.antlr.v4.tool.DOTGenerator;
import org.antlr.v4.tool.LexerGrammar;
import org.junit.Before;
@ -386,7 +387,7 @@ public class TestATNLexerInterpreter extends BaseJavaToolTest {
DOTGenerator dot = new DOTGenerator(lg);
// System.out.println(dot.getDOT(startState, true));
List<String> tokenTypes = getTokenTypes(lg, atn, input);
List<String> tokenTypes = RuntimeTestUtils.getTokenTypes(lg, atn, input);
String result = Utils.join(tokenTypes.iterator(), ", ");
// System.out.println(tokenTypes);

Some files were not shown because too many files have changed in this diff Show More