Merge branch 'master' into master

This commit is contained in:
Markus Franke 2019-10-11 15:24:09 +02:00 committed by GitHub
commit 76c0a65d19
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
67 changed files with 3937 additions and 190 deletions

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "runtime/PHP"]
path = runtime/PHP
url = https://github.com/antlr/antlr-php-runtime.git

View File

@ -152,6 +152,13 @@ matrix:
jdk: openjdk8
env: TARGET=csharp
stage: main-test
- os: linux
language: php
php:
- 7.2
jdk: openjdk8
env: TARGET=php
stage: main-test
- os: linux
jdk: openjdk8
dist: trusty

View File

@ -0,0 +1,10 @@
#!/bin/bash
set -euo pipefail
sudo apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 3FA7E0328081BFF6A14DA29AA6A19B38D3D831EF
sudo apt-get update -qq
php -v
mvn install -DskipTests=true -Dmaven.javadoc.skip=true -B -V

9
.travis/run-tests-php.sh Executable file
View File

@ -0,0 +1,9 @@
#!/bin/bash
set -euo pipefail
php_path=$(which php)
composer install -d ../runtime/PHP
mvn -q -DPHP_PATH="${php_path}" -Dparallel=methods -DthreadCount=4 -Dtest=php.* test

View File

@ -22,6 +22,7 @@ ANTLR project lead and supreme dictator for life
* [Janyou](https://github.com/janyou) (Swift target)
* [Ewan Mellor](https://github.com/ewanmellor), [Hanzhou Shi](https://github.com/hanjoes) (Swift target merging)
* [Ben Hamilton](https://github.com/bhamiltoncx) (Full Unicode support in serialized ATN and all languages' runtimes for code points > U+FFFF)
* [Marcos Passos](https://github.com/marcospassos) (PHP target)
## Useful information

View File

@ -4,6 +4,9 @@ cache:
- '%USERPROFILE%\.nuget\packages -> **\project.json'
image: Visual Studio 2017
build: off
install:
- git submodule update --init --recursive
- cinst -y php composer
build_script:
- mvn -DskipTests install --batch-mode
- msbuild /target:restore /target:rebuild /property:Configuration=Release /logger:"C:\Program Files\AppVeyor\BuildAgent\Appveyor.MSBuildLogger.dll" /verbosity:detailed runtime/CSharp/runtime/CSharp/Antlr4.dotnet.sln
@ -11,7 +14,7 @@ build_script:
after_build:
- msbuild /target:pack /property:Configuration=Release /verbosity:detailed runtime/CSharp/runtime/CSharp/Antlr4.dotnet.sln
test_script:
- mvn install -Dantlr-python2-python="C:\Python27\python.exe" -Dantlr-python3-python="C:\Python35\python.exe" -Dantlr-javascript-nodejs="C:\Program Files (x86)\nodejs\node.exe" --batch-mode
- mvn install -Dantlr-php-php="C:\tools\php73\php.exe" -Dantlr-python2-python="C:\Python27\python.exe" -Dantlr-python3-python="C:\Python35\python.exe" -Dantlr-javascript-nodejs="C:\Program Files (x86)\nodejs\node.exe" --batch-mode
artifacts:
- path: 'runtime\**\*.nupkg'
name: NuGet

View File

@ -221,4 +221,13 @@ YYYY/MM/DD, github id, Full name, email
2019/07/11, olowo726, Olof Wolgast, olof@baah.se
2019/07/16, abhijithneilabraham, Abhijith Neil Abraham, abhijithneilabrahampk@gmail.com
2019/07/26, Braavos96, Eric Hettiaratchi, erichettiaratchi@gmail.com
2019/08/23, akaJes, Oleksandr Mamchyts, akaJes@gmail.com
2019/08/27, wurzelpeter, Markus Franke, markus[hyphen]franke[at]web[dot]de
2019/09/10, ImanHosseini, Iman Hosseini, hosseini.iman@yahoo.com
2019/09/03, João Henrique, johnnyonflame@hotmail.com
2019/09/10, neko1235, Ihar Mokharau, igor.mohorev@gmail.com
2019/09/10, yar3333, Yaroslav Sivakov, yar3333@gmail.com
2019/09/10, marcospassos, Marcos Passos, marcospassos.com@gmail.com
2019/09/10, amorimjuliana, Juliana Amorim, juu.amorim@gmail.com
2019/09/17, kaz, Kazuki Sawada, kazuki@6715.jp
2019/09/28, lmy269, Mingyang Liu, lmy040758@gmail.com

View File

@ -14,7 +14,7 @@ For the 4.7.1 release, we discussed both approaches in [detail](https://github.c
## Case-insensitive grammars
As a prime example of a grammar that specifically describes case insensitive keywords, see the
As a prime example of a grammar that specifically describes case insensitive keywords, see the
[SQLite grammar](https://github.com/antlr/grammars-v4/blob/master/sqlite/SQLite.g4). To match a case insensitive keyword, there are rules such as
```
@ -72,7 +72,8 @@ Lexer lexer = new SomeSQLLexer(upper);
Here are implementations of `CaseChangingCharStream` in various target languages:
* [Java](https://github.com/parrt/antlr4/blob/case-insensitivity-doc/doc/resources/CaseChangingCharStream.java)
* [JavaScript](https://github.com/parrt/antlr4/blob/case-insensitivity-doc/doc/resources/CaseInsensitiveInputStream.js)
* [Go](https://github.com/parrt/antlr4/blob/case-insensitivity-doc/doc/resources/case_changing_stream.go)
* [C#](https://github.com/parrt/antlr4/blob/case-insensitivity-doc/doc/resources/CaseChangingCharStream.cs)
* [C#](https://github.com/antlr/antlr4/blob/master/doc/resources/CaseChangingCharStream.cs)
* [Go](https://github.com/antlr/antlr4/blob/master/doc/resources/case_changing_stream.go)
* [Java](https://github.com/antlr/antlr4/blob/master/doc/resources/CaseChangingCharStream.java)
* [JavaScript](https://github.com/antlr/antlr4/blob/master/doc/resources/CaseChangingStream.js)
* [Python2/3](https://github.com/antlr/antlr4/blob/master/doc/resources/CaseChangingStream.py)

111
doc/php-target.md Normal file
View File

@ -0,0 +1,111 @@
# ANTLR4 Runtime for PHP
### First steps
#### 1. Install ANTLR4
[The getting started guide](https://github.com/antlr/antlr4/blob/master/doc/getting-started.md)
should get you started.
#### 2. Install the PHP ANTLR runtime
Each target language for ANTLR has a runtime package for running parser
generated by ANTLR4. The runtime provides a common set of tools for using your parser.
Install the runtime with Composer:
```bash
composer install antlr/antlr4
```
#### 3. Generate your parser
You use the ANTLR4 "tool" to generate a parser. These will reference the ANTLR
runtime, installed above.
Suppose you're using a UNIX system and have set up an alias for the ANTLR4 tool
as described in [the getting started guide](https://github.com/antlr/antlr4/blob/master/doc/getting-started.md).
To generate your PHP parser, run the following command:
```bash
antlr4 -Dlanguage=PHP MyGrammar.g4
```
For a full list of antlr4 tool options, please visit the
[tool documentation page](https://github.com/antlr/antlr4/blob/master/doc/tool-options.md).
### Complete example
Suppose you're using the JSON grammar from https://github.com/antlr/grammars-v4/tree/master/json.
Then, invoke `antlr4 -Dlanguage=PHP JSON.g4`. The result of this is a
collection of `.php` files in the `parser` directory including:
```
JsonParser.php
JsonBaseListener.php
JsonLexer.php
JsonListener.php
```
Another common option to the ANTLR tool is `-visitor`, which generates a parse
tree visitor, but we won't be doing that here. For a full list of antlr4 tool
options, please visit the [tool documentation page](tool-options.md).
We'll write a small main func to call the generated parser/lexer
(assuming they are separate). This one writes out the encountered
`ParseTreeContext`'s:
```php
<?php
namespace JsonParser;
use Antlr\Antlr4\Runtime\CommonTokenStream;
use Antlr\Antlr4\Runtime\Error\Listeners\DiagnosticErrorListener;
use Antlr\Antlr4\Runtime\InputStream;
use Antlr\Antlr4\Runtime\ParserRuleContext;
use Antlr\Antlr4\Runtime\Tree\ErrorNode;
use Antlr\Antlr4\Runtime\Tree\ParseTreeListener;
use Antlr\Antlr4\Runtime\Tree\ParseTreeWalker;
use Antlr\Antlr4\Runtime\Tree\TerminalNode;
final class TreeShapeListener implements ParseTreeListener {
public function visitTerminal(TerminalNode $node) : void {}
public function visitErrorNode(ErrorNode $node) : void {}
public function exitEveryRule(ParserRuleContext $ctx) : void {}
public function enterEveryRule(ParserRuleContext $ctx) : void {
echo $ctx->getText();
}
}
$input = InputStream::fromPath($argv[1]);
$lexer = new JSONLexer($input);
$tokens = new CommonTokenStream($lexer);
$parser = new JSONParser($tokens);
$parser->addErrorListener(new DiagnosticErrorListener());
$parser->setBuildParseTree(true);
$tree = $parser->json();
ParseTreeWalker::default()->walk(new TreeShapeListener(), $tree);
```
Create a `example.json` file:
```json
{"a":1}
```
Parse the input file:
```
php json.php example.json
```
The expected output is:
```
{"a":1}
{"a":1}
"a":1
1
```

View File

@ -0,0 +1,65 @@
//
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
//
function CaseChangingStream(stream, upper) {
this._stream = stream;
this._upper = upper;
}
CaseChangingStream.prototype.LA = function(offset) {
var c = this._stream.LA(offset);
if (c <= 0) {
return c;
}
return String.fromCodePoint(c)[this._upper ? "toUpperCase" : "toLowerCase"]().codePointAt(0);
};
CaseChangingStream.prototype.reset = function() {
return this._stream.reset();
};
CaseChangingStream.prototype.consume = function() {
return this._stream.consume();
};
CaseChangingStream.prototype.LT = function(offset) {
return this._stream.LT(offset);
};
CaseChangingStream.prototype.mark = function() {
return this._stream.mark();
};
CaseChangingStream.prototype.release = function(marker) {
return this._stream.release(marker);
};
CaseChangingStream.prototype.seek = function(_index) {
return this._stream.seek(_index);
};
CaseChangingStream.prototype.getText = function(start, stop) {
return this._stream.getText(start, stop);
};
CaseChangingStream.prototype.toString = function() {
return this._stream.toString();
};
Object.defineProperty(CaseChangingStream.prototype, "index", {
get: function() {
return this._stream.index;
}
});
Object.defineProperty(CaseChangingStream.prototype, "size", {
get: function() {
return this._stream.size;
}
});
exports.CaseChangingStream = CaseChangingStream;

View File

@ -0,0 +1,13 @@
class CaseChangingStream():
def __init__(self, stream, upper):
self._stream = stream
self._upper = upper
def __getattr__(self, name):
return self._stream.__getattribute__(name)
def LA(self, offset):
c = self._stream.LA(offset)
if c <= 0:
return c
return ord(chr(c).upper() if self._upper else chr(c).lower())

View File

@ -1,54 +0,0 @@
//
/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
//
function CaseInsensitiveInputStream(stream, upper) {
this._stream = stream;
this._case = upper ? String.toUpperCase : String.toLowerCase;
return this;
}
CaseInsensitiveInputStream.prototype.LA = function (offset) {
c = this._stream.LA(i);
if (c <= 0) {
return c;
}
return this._case.call(String.fromCodePoint(c))
};
CaseInsensitiveInputStream.prototype.reset = function() {
return this._stream.reset();
};
CaseInsensitiveInputStream.prototype.consume = function() {
return this._stream.consume();
};
CaseInsensitiveInputStream.prototype.LT = function(offset) {
return this._stream.LT(offset);
};
CaseInsensitiveInputStream.prototype.mark = function() {
return this._stream.mark();
};
CaseInsensitiveInputStream.prototype.release = function(marker) {
return this._stream.release(marker);
};
CaseInsensitiveInputStream.prototype.seek = function(_index) {
return this._stream.getText(start, stop);
};
CaseInsensitiveInputStream.prototype.getText = function(start, stop) {
return this._stream.getText(start, stop);
};
CaseInsensitiveInputStream.prototype.toString = function() {
return this._stream.toString();
};
exports.CaseInsensitiveInputStream = CaseInsensitiveInputStream;

View File

@ -1,13 +1,15 @@
package antlr
package antlr_resource
import (
"unicode"
"github.com/antlr/antlr4/runtime/Go/antlr"
)
// CaseChangingStream wraps an existing CharStream, but upper cases, or
// lower cases the input before it is tokenized.
type CaseChangingStream struct {
CharStream
antlr.CharStream
upper bool
}
@ -15,10 +17,8 @@ type CaseChangingStream struct {
// NewCaseChangingStream returns a new CaseChangingStream that forces
// all tokens read from the underlying stream to be either upper case
// or lower case based on the upper argument.
func NewCaseChangingStream(in CharStream, upper bool) *CaseChangingStream {
return &CaseChangingStream{
in, upper,
}
func NewCaseChangingStream(in antlr.CharStream, upper bool) *CaseChangingStream {
return &CaseChangingStream{in, upper}
}
// LA gets the value of the symbol at offset from the current position

View File

@ -9,12 +9,13 @@ This page lists the available and upcoming ANTLR runtimes. Please note that you
* [Go](go-target.md)
* [C++](cpp-target.md)
* [Swift](swift-target.md)
* [PHP](php-target.md)
## Target feature parity
New features generally appear in the Java target and then migrate to the other targets, but these other targets don't always get updated in the same overall tool release. This section tries to identify features added to Java that have not been added to the other targets.
|Feature|Java|C&sharp;|Python2|Python3|JavaScript|Go|C++|Swift|
|---|---|---|---|---|---|---|---|---|
|Ambiguous tree construction|4.5.1|-|-|-|-|-|-|-|
|Feature|Java|C&sharp;|Python2|Python3|JavaScript|Go|C++|Swift|PHP
|---|---|---|---|---|---|---|---|---|---|
|Ambiguous tree construction|4.5.1|-|-|-|-|-|-|-|-|

View File

@ -109,14 +109,15 @@
<!-- SUREFIRE-951: file.encoding cannot be set via systemPropertyVariables -->
<argLine>-Dfile.encoding=UTF-8</argLine>
<includes>
<include>**/csharp/Test*.java</include>
<include>**/java/Test*.java</include>
<include>**/go/Test*.java</include>
<include>**/javascript/node/Test*.java</include>
<include>**/python2/Test*.java</include>
<include>**/python3/Test*.java</include>
<include>${antlr.tests.swift}</include>
</includes>
<include>**/csharp/Test*.java</include>
<include>**/java/Test*.java</include>
<include>**/go/Test*.java</include>
<include>**/javascript/node/Test*.java</include>
<include>**/python2/Test*.java</include>
<include>**/python3/Test*.java</include>
<include>**/php/Test*.java</include>
<include>${antlr.tests.swift}</include>
</includes>
</configuration>
</plugin>
<plugin>

View File

@ -14,6 +14,8 @@ Cast(t,v) ::= "((<t>)<v>)"
Append(a,b) ::= "<a> + <b>"
AppendStr(a,b) ::= <%<Append(a,b)>%>
Concat(a,b) ::= "<a><b>"
DeclareLocal(s,v) ::= "Object <s> = <v>;"
@ -26,6 +28,12 @@ InitIntMember(n,v) ::= <%int <n> = <v>;%>
InitBooleanMember(n,v) ::= <%bool <n> = <v>;%>
InitIntVar(n,v) ::= <%<InitIntMember(n,v)>%>
IntArg(n) ::= "int <n>"
VarRef(n) ::= "<n>"
GetMember(n) ::= <%this.<n>%>
SetMember(n,v) ::= <%this.<n> = <v>;%>
@ -94,6 +102,8 @@ bool Property() {
ParserPropertyCall(p, call) ::= "<p>.<call>"
PositionAdjustingLexerDef() ::= ""
PositionAdjustingLexer() ::= <<
public override IToken NextToken() {

View File

@ -14,6 +14,8 @@ Cast(t,v) ::= "<v>"
Append(a,b) ::= "<a> + <b>"
AppendStr(a,b) ::= <%<Append(a,b)>%>
Concat(a,b) ::= "<a><b>"
DeclareLocal(s,v) ::= "var <s> = <v>;"
@ -26,6 +28,12 @@ InitIntMember(n,v) ::= <%this.<n> = <v>;%>
InitBooleanMember(n,v) ::= <%this.<n> = <v>;%>
InitIntVar(n,v) ::= <%<InitIntMember(n,v)>%>
IntArg(n) ::= "<n>"
VarRef(n) ::= "<n>"
GetMember(n) ::= <%this.<n>%>
SetMember(n,v) ::= <%this.<n> = <v>;%>
@ -92,6 +100,8 @@ this.Property = function() {
}
>>
PositionAdjustingLexerDef() ::= ""
PositionAdjustingLexer() ::= <<
PositionAdjustingLexer.prototype.resetAcceptPosition = function(index, line, column) {

View File

@ -9,6 +9,7 @@ Assert(s) ::= ""
Cast(t,v) ::= "dynamic_cast\<<t> *>(<v>)" // Should actually use a more specific name. We may have to use other casts as well.
Append(a,b) ::= "<a> + <b>->toString()"
Concat(a,b) ::= "<a><b>"
AppendStr(a,b) ::= "<a> + <b>"
DeclareLocal(s,v) ::= "<s> = <v>"
@ -17,6 +18,9 @@ AssignLocal(s,v) ::= "<s> = <v>;"
InitIntMember(n,v) ::= "int <n> = <v>;"
InitBooleanMember(n,v) ::= "bool <n> = <v>;"
InitIntVar(n,v) ::= <%<InitIntMember(n,v)>%>
IntArg(n) ::= "int <n>"
VarRef(n) ::= "<n>"
GetMember(n) ::= "<n>"
SetMember(n,v) ::= "<n> = <v>;"
@ -68,6 +72,8 @@ bool Property() {
ParserPropertyCall(p, call) ::= "<call>"
PositionAdjustingLexerDef() ::= ""
PositionAdjustingLexer() ::= <<
protected:
class PositionAdjustingLexerATNSimulator : public antlr4::atn::LexerATNSimulator {

View File

@ -14,6 +14,8 @@ Cast(t,v) ::= "<v>"
Append(a,b) ::= "<a> + <b>"
AppendStr(a,b) ::= <%<Append(a,b)>%>
Concat(a,b) ::= "<a><b>"
DeclareLocal(s,v) ::= "var <s> = <v>;"
@ -26,6 +28,12 @@ InitIntMember(n,v) ::= <%this.<n> = <v>;%>
InitBooleanMember(n,v) ::= <%this.<n> = <v>;%>
InitIntVar(n,v) ::= <%<InitIntMember(n,v)>%>
IntArg(n) ::= "<n>"
VarRef(n) ::= "<n>"
GetMember(n) ::= <%this.<n>%>
SetMember(n,v) ::= <%this.<n> = <v>;%>
@ -98,6 +106,8 @@ this.Property = function() {
ParserPropertyCall(p, call) ::= "<p>.<call>"
PositionAdjustingLexerDef() ::= ""
PositionAdjustingLexer() ::= <<
PositionAdjustingLexer.prototype.resetAcceptPosition = function(index, line, column) {

View File

@ -14,6 +14,8 @@ Cast(t,v) ::= "<v>"
Append(a,b) ::= "<a> + <b>"
AppendStr(a,b) ::= <%<Append(a,b)>%>
Concat(a,b) ::= "<a><b>"
DeclareLocal(s,v) ::= "var <s> = <v>;"
@ -26,6 +28,12 @@ InitIntMember(n,v) ::= <%this.<n> = <v>;%>
InitBooleanMember(n,v) ::= <%this.<n> = <v>;%>
InitIntVar(n,v) ::= <%<InitIntMember(n,v)>%>
IntArg(n) ::= "<n>"
VarRef(n) ::= "<n>"
GetMember(n) ::= <%this.<n>%>
SetMember(n,v) ::= <%this.<n> = <v>;%>
@ -100,6 +108,8 @@ this.Property = function() {
ParserPropertyCall(p, call) ::= "<p>.<call>"
PositionAdjustingLexerDef() ::= ""
PositionAdjustingLexer() ::= <<
PositionAdjustingLexer.prototype.resetAcceptPosition = function(index, line, column) {

View File

@ -14,6 +14,8 @@ Cast(t,v) ::= "(<v>)"
Append(a,b) ::= "<a> + fmt.Sprint(<b>)"
AppendStr(a,b) ::= "<a> + <b>"
Concat(a,b) ::= "<a><b>"
DeclareLocal(s, v) ::= "var <s> = <v>"
@ -26,6 +28,12 @@ InitIntMember(n, v) ::= <%var <n> int = <v>; var _ int = <n>; %>
InitBooleanMember(n, v) ::= <%var <n> bool = <v>; var _ bool = <n>; %>
InitIntVar(n,v) ::= <%<InitIntMember(n,v)>%>
IntArg(n) ::= "int <n>"
VarRef(n) ::= "<n>"
GetMember(n) ::= <%<n>%>
SetMember(n, v) ::= <%<n> = <v>;%>
@ -94,6 +102,8 @@ func (p *TParser) Property() bool {
ParserPropertyCall(p, call) ::= "<p>.<call>"
PositionAdjustingLexerDef() ::= ""
PositionAdjustingLexer() ::= <<
func (p *PositionAdjustingLexer) NextToken() antlr.Token {
if _, ok := p.Interpreter.(*PositionAdjustingLexerATNSimulator); !ok {

View File

@ -14,6 +14,8 @@ Cast(t,v) ::= "((<t>)<v>)"
Append(a,b) ::= "<a> + <b>"
AppendStr(a,b) ::= <%<Append(a,b)>%>
Concat(a,b) ::= "<a><b>"
DeclareLocal(s,v) ::= "Object <s> = <v>;"
@ -26,6 +28,12 @@ InitIntMember(n,v) ::= <%int <n> = <v>;%>
InitBooleanMember(n,v) ::= <%boolean <n> = <v>;%>
InitIntVar(n,v) ::= <%<InitIntMember(n,v)>%>
IntArg(n) ::= "int <n>"
VarRef(n) ::= "<n>"
GetMember(n) ::= <%this.<n>%>
SetMember(n,v) ::= <%this.<n> = <v>;%>
@ -94,6 +102,8 @@ boolean Property() {
ParserPropertyCall(p, call) ::= "<p>.<call>"
PositionAdjustingLexerDef() ::= ""
PositionAdjustingLexer() ::= <<
@Override

View File

@ -14,6 +14,8 @@ Cast(t,v) ::= "<v>"
Append(a,b) ::= "<a> + <b>"
AppendStr(a,b) ::= <%<Append(a,b)>%>
Concat(a,b) ::= "<a><b>"
DeclareLocal(s,v) ::= "var <s> = <v>;"
@ -26,6 +28,12 @@ InitIntMember(n,v) ::= <%this.<n> = <v>;%>
InitBooleanMember(n,v) ::= <%this.<n> = <v>;%>
InitIntVar(n,v) ::= <%<InitIntMember(n,v)>%>
IntArg(n) ::= "<n>"
VarRef(n) ::= "<n>"
GetMember(n) ::= <%this.<n>%>
SetMember(n,v) ::= <%this.<n> = <v>;%>
@ -98,6 +106,8 @@ this.Property = function() {
ParserPropertyCall(p, call) ::= "<p>.<call>"
PositionAdjustingLexerDef() ::= ""
PositionAdjustingLexer() ::= <<
PositionAdjustingLexer.prototype.resetAcceptPosition = function(index, line, column) {

View File

@ -0,0 +1,272 @@
writeln(s) ::= <<echo <s> . \PHP_EOL;>>
write(s) ::= <<echo <s>;>>
writeList(s) ::= <<echo <s; separator=".">;>>
False() ::= "false"
True() ::= "true"
Not(v) ::= "!<v>"
Assert(s) ::= <<assert(<s>);>>
Cast(t,v) ::= "<v>"
Append(a,b) ::= "<a> . <b>"
AppendStr(a,b) ::= <%<Append(a,b)>%>
Concat(a,b) ::= "<a><b>"
DeclareLocal(s,v) ::= "<s> = <v>;"
AssertIsList(v) ::= "assert(\is_array(<v>));" // just use static type system
AssignLocal(s,v) ::= "<s> = <v>;"
InitIntMember(n,v) ::= <%public \$<n> = <v>;%>
InitBooleanMember(n,v) ::= <%public \$<n> = <v>;%>
InitIntVar(n,v) ::= <%\$<n> = <v>;%>
IntArg(n) ::= "int <n>"
VarRef(n) ::= "$<n>"
GetMember(n) ::= <%\$this-><n>%>
SetMember(n,v) ::= <%\$this-><n> = <v>;%>
AddMember(n,v) ::= <%\$this-><n> += <v>;%>
PlusMember(v,n) ::= <%<v> + \$this-><n>%>
MemberEquals(n,v) ::= <%\$this-><n> == <v>%>
ModMemberEquals(n,m,v) ::= <%\$this-><n> % <m> == <v>%>
ModMemberNotEquals(n,m,v) ::= <%\$this-><n> % <m> !== <v>%>
DumpDFA() ::= "\$this->dumpDFA();"
Pass() ::= ""
StringList() ::= ""
BuildParseTrees() ::= "\$this->setBuildParseTree(true);"
BailErrorStrategy() ::= <%\$this->setErrorHandler(new Antlr\\Antlr4\\Runtime\\Error\\BailErrorStrategy());%>
ToStringTree(s) ::= <%<s>->toStringTree(\$this->getRuleNames())%>
Column() ::= "\$this->getCharPositionInLine()"
Text() ::= "\$this->getText()"
ValEquals(a,b) ::= <%<a>===<b>%>
TextEquals(a) ::= <%\$this->getText() === "<a>"%>
PlusText(a) ::= <%"<a>" . \$this->getText()%>
InputText() ::= "\$this->input->getText()"
LTEquals(i, v) ::= <%\$this->input->LT(<i>)->getText() === <v>%>
LANotEquals(i, v) ::= <%\$this->input->LA(<i>) !== <v>%>
TokenStartColumnEquals(i) ::= <%\$this->tokenStartCharPositionInLine === <i>%>
ImportListener(X) ::= ""
GetExpectedTokenNames() ::= "\$this->getExpectedTokens()->toStringVocabulary(\$this->getVocabulary())"
RuleInvocationStack() ::= "'[' . \implode(', ', \$this->getRuleInvocationStack()) . ']'"
LL_EXACT_AMBIG_DETECTION() ::= <<\$this->interp->setPredictionMode(Antlr\\Antlr4\\Runtime\\Atn\\PredictionMode::LL_EXACT_AMBIG_DETECTION);>>
ParserToken(parser, token) ::= <%<parser>::<token>%>
Production(p) ::= <%<p>%>
Result(r) ::= <%<r>%>
ParserPropertyMember() ::= <<
@members {
public function Property() : bool
{
return true;
}
}
>>
ParserPropertyCall(p, call) ::= "<p>-><call>"
PositionAdjustingLexerDef() ::= <<
class PositionAdjustingLexerATNSimulator extends LexerATNSimulator {
public function resetAcceptPosition(CharStream \$input, int \$index, int \$line, int \$charPositionInLine) : void {
\$input->seek(\$index);
\$this->line = \$line;
\$this->charPositionInLine = \$charPositionInLine;
\$this->consume(\$input);
}
}
>>
PositionAdjustingLexer() ::= <<
public function nextToken() : Antlr\\Antlr4\\Runtime\\Token
{
if (!\$this->interp instanceof PositionAdjustingLexerATNSimulator) {
\$this->interp = new PositionAdjustingLexerATNSimulator(\$this, self::\$atn, self::\$decisionToDFA, self::\$sharedContextCache);
}
return parent::nextToken();
}
public function emit() : Antlr\\Antlr4\\Runtime\\Token
{
switch (\$this->type) {
case self::TOKENS:
\$this->handleAcceptPositionForKeyword('tokens');
break;
case self::LABEL:
\$this->handleAcceptPositionForIdentifier();
break;
}
return parent::emit();
}
private function handleAcceptPositionForIdentifier() : bool
{
\$tokenText = \$this->getText();
\$identifierLength = 0;
while (\$identifierLength \< \strlen(\$tokenText) && self::isIdentifierChar(\$tokenText[\$identifierLength])) {
\$identifierLength++;
}
if (\$this->getInputStream()->getIndex() > \$this->tokenStartCharIndex + \$identifierLength) {
\$offset = \$identifierLength - 1;
\$this->getInterpreter()->resetAcceptPosition(\$this->getInputStream(), \$this->tokenStartCharIndex + \$offset, \$this->tokenStartLine, \$this->tokenStartCharPositionInLine + \$offset);
return true;
}
return false;
}
private function handleAcceptPositionForKeyword(string \$keyword) : bool
{
if (\$this->getInputStream()->getIndex() > \$this->tokenStartCharIndex + \strlen(\$keyword)) {
\$offset = \strlen(\$keyword) - 1;
\$this->getInterpreter()->resetAcceptPosition(\$this->getInputStream(), \$this->tokenStartCharIndex + \$offset, \$this->tokenStartLine, \$this->tokenStartCharPositionInLine + \$offset);
return true;
}
return false;
}
private static function isIdentifierChar(string \$c) : bool
{
return \ctype_alnum(\$c) || \$c === '_';
}
>>
BasicListener(X) ::= <<
@parser::definitions {
class LeafListener extends TBaseListener
{
public function visitTerminal(Antlr\\Antlr4\\Runtime\\Tree\\TerminalNode \$node) : void
{
echo \$node->getSymbol()->getText() . \PHP_EOL;
}
}
}
>>
WalkListener(s) ::= <<
\$walker = new Antlr\\Antlr4\\Runtime\\Tree\\ParseTreeWalker();
\$walker->walk(new LeafListener(), <s>);
>>
TreeNodeWithAltNumField(X) ::= <<
@parser::contexts {
class MyRuleNode extends ParserRuleContext
{
public \$altNum;
public function getAltNumber() : int
{
return \$this->altNum;
}
public function setAltNumber(int \$altNum) : void
{
\$this->altNum = \$altNum;
}
}
}
>>
TokenGetterListener(X) ::= <<
@parser::definitions {
class LeafListener extends TBaseListener {
public function exitA(Context\\AContext \$ctx) : void {
if (\$ctx->getChildCount() === 2) {
echo \sprintf('%s %s [%s]',\$ctx->INT(0)->getSymbol()->getText(), \$ctx->INT(1)->getSymbol()->getText(), \implode(', ', \$ctx->INT())) . \PHP_EOL;
} else {
echo \$ctx->ID()->getSymbol() . \PHP_EOL;
}
}
}
}
>>
RuleGetterListener(X) ::= <<
@parser::definitions {
class LeafListener extends TBaseListener {
public function exitA(Context\\AContext \$ctx) : void
{
if (\$ctx->getChildCount() === 2) {
echo \sprintf('%s %s %s', \$ctx->b(0)->start->getText(), \$ctx->b(1)->start->getText(),\$ctx->b()[0]->start->getText()) . \PHP_EOL;
} else {
echo \$ctx->b(0)->start->getText() . \PHP_EOL;
}
}
}
}
>>
LRListener(X) ::= <<
@parser::definitions {
class LeafListener extends TBaseListener {
public function exitE(Context\\EContext \$ctx) : void
{
if (\$ctx->getChildCount() === 3) {
echo \sprintf('%s %s %s', \$ctx->e(0)->start->getText(), \$ctx->e(1)->start->getText(), \$ctx->e()[0]->start->getText()) . \PHP_EOL;
} else {
echo \$ctx->INT()->getSymbol()->getText() . \PHP_EOL;
}
}
}
}
>>
LRWithLabelsListener(X) ::= <<
@parser::definitions {
class LeafListener extends TBaseListener
{
public function exitCall(Context\\CallContext \$ctx) : void {
echo \sprintf('%s %s',\$ctx->e()->start->getText(),\$ctx->eList()) . \PHP_EOL;
}
public function exitInt(Context\\IntContext \$ctx) : void {
echo \$ctx->INT()->getSymbol()->getText() . \PHP_EOL;
}
}
}
>>
DeclareContextListGettersFunction() ::= <<
public function foo() : void {
\$s = null;
\$a = \$s->a();
\$b = \$s->b();
}
>>
Declare_foo() ::= <<
public function foo() : void {
echo 'foo' . \PHP_EOL;
}
>>
Invoke_foo() ::= "\$this->foo();"
Declare_pred() ::= <<public function pred(bool \$v) : bool {
echo "eval=".(\$v ? 'true' : 'false') . \PHP_EOL;
return \$v;
}
>>
Invoke_pred(v) ::= "\$this->pred(<v>)"
ParserTokenType(t) ::= "Parser::<t>"
ContextRuleFunction(ctx, rule) ::= "<ctx>-><rule>"
StringType() ::= ""
ContextMember(ctx, subctx, member) ::= "<ctx>-><subctx>-><member>"

View File

@ -14,6 +14,8 @@ Cast(t,v) ::= "<v>"
Append(a,b) ::= "<a> + str(<b>)"
AppendStr(a,b) ::= "<a> + <b>"
Concat(a,b) ::= "<a><b>"
DeclareLocal(s,v) ::= "<s> = <v>"
@ -26,6 +28,12 @@ InitIntMember(n,v) ::= <%<n> = <v>%>
InitBooleanMember(n,v) ::= <%<n> = <v>%>
InitIntVar(n,v) ::= <%<InitIntMember(n,v)>%>
IntArg(n) ::= "<n>"
VarRef(n) ::= "<n>"
GetMember(n) ::= <%self.<n>%>
SetMember(n,v) ::= <%self.<n> = <v>%>
@ -94,6 +102,8 @@ def Property(self):
ParserPropertyCall(p, call) ::= "<p>.<call>"
PositionAdjustingLexerDef() ::= ""
PositionAdjustingLexer() ::= <<
def resetAcceptPosition(self, index, line, column):

View File

@ -14,6 +14,8 @@ Cast(t,v) ::= "<v>"
Append(a,b) ::= "<a> + str(<b>)"
AppendStr(a,b) ::= "<a> + <b>"
Concat(a,b) ::= "<a><b>"
DeclareLocal(s,v) ::= "<s> = <v>"
@ -26,6 +28,12 @@ InitIntMember(n,v) ::= <%<n> = <v>%>
InitBooleanMember(n,v) ::= <%<n> = <v>%>
InitIntVar(n,v) ::= <%<InitIntMember(n,v)>%>
IntArg(n) ::= "<n>"
VarRef(n) ::= "<n>"
GetMember(n) ::= <%self.<n>%>
SetMember(n,v) ::= <%self.<n> = <v>%>
@ -99,6 +107,8 @@ def Property(self):
ParserPropertyCall(p, call) ::= "<p>.<call>"
PositionAdjustingLexerDef() ::= ""
PositionAdjustingLexer() ::= <<
def resetAcceptPosition(self, index, line, column):

View File

@ -14,6 +14,8 @@ Cast(t,v) ::= "<v>"
Append(a,b) ::= "<a> + <b>"
AppendStr(a,b) ::= <%<Append(a,b)>%>
Concat(a,b) ::= "<a><b>"
DeclareLocal(s,v) ::= "var <s> = <v>;"
@ -26,6 +28,12 @@ InitIntMember(n,v) ::= <%this.<n> = <v>;%>
InitBooleanMember(n,v) ::= <%this.<n> = <v>;%>
InitIntVar(n,v) ::= <%<InitIntMember(n,v)>%>
IntArg(n) ::= "<n>"
VarRef(n) ::= "<n>"
GetMember(n) ::= <%this.<n>%>
SetMember(n,v) ::= <%this.<n> = <v>;%>
@ -98,6 +106,8 @@ this.Property = function() {
ParserPropertyCall(p, call) ::= "<p>.<call>"
PositionAdjustingLexerDef() ::= ""
PositionAdjustingLexer() ::= <<
PositionAdjustingLexer.prototype.resetAcceptPosition = function(index, line, column) {

View File

@ -14,6 +14,8 @@ Cast(t,v) ::= "((<v> as! <t>))"
Append(a,b) ::= "<a> + <b>"
AppendStr(a,b) ::= <%<Append(a,b)>%>
Concat(a,b) ::= "<a><b>"
DeclareLocal(s,v) ::= "var <s> = <v>"
@ -26,6 +28,12 @@ InitIntMember(n,v) ::= <%var <n> = <v>%>
InitBooleanMember(n,v) ::= <%var <n> = <v>%>
InitIntVar(n,v) ::= <%<InitIntMember(n,v)>%>
IntArg(n) ::= "int <n>"
VarRef(n) ::= "<n>"
GetMember(n) ::= <%self.<n>%>
SetMember(n,v) ::= <%self.<n> = <v>%>
@ -96,6 +104,8 @@ ParserPropertyCall(parser, property) ::= <<
<parser>.<property>
>>
PositionAdjustingLexerDef() ::= ""
PositionAdjustingLexer() ::= <<
override

View File

@ -50,6 +50,7 @@ public abstract class BaseRuntimeTest {
"Go",
"CSharp",
"Python2", "Python3",
"PHP",
"Node", "Safari", "Firefox", "Explorer", "Chrome"
};
public final static String[] JavaScriptTargets = {
@ -116,6 +117,7 @@ public abstract class BaseRuntimeTest {
System.out.printf("Ignore "+descriptor);
return;
}
if ( descriptor.getTestType().contains("Parser") ) {
testParser(descriptor);
}

View File

@ -418,7 +418,7 @@ public class CompositeParsersDescriptors {
parser grammar S;
type_ : 'int' ;
decl : type_ ID ';'
| type_ ID init_ ';' {<write("\"JavaDecl: \" + $text")>};
| type_ ID init_ ';' {<AppendStr("\"JavaDecl: \"","$text"):writeln()>};
init_ : '=' INT;
*/
@CommentHasStringValue
@ -532,7 +532,7 @@ public class CompositeParsersDescriptors {
/**
parser grammar S;
a @after {<InitIntMember("x","0")>} : B;
a @after {<InitIntVar("x","0")>} : B;
*/
@CommentHasStringValue
public String slaveGrammarS;

View File

@ -372,7 +372,7 @@ public class FullContextParsingDescriptors {
: expr_or_assign*;
expr_or_assign
: expr '++' {<writeln("\"fail.\"")>}
| expr {<writeln("\"pass: \"+$expr.text")>}
| expr {<AppendStr("\"pass: \"","$expr.text"):writeln()>}
;
expr: expr_primary ('\<-' ID)?;
expr_primary

View File

@ -529,8 +529,8 @@ public class LeftRecursionDescriptors {
| e '+' e {$v = <Cast("BinaryContext","$ctx"):ContextMember({<Production("e")>(0)}, {<Result("v")>})> + <Cast("BinaryContext","$ctx"):ContextMember({<Production("e")>(1)}, {<Result("v")>})>;} # binary
| INT {$v = $INT.int;} # anInt
| '(' e ')' {$v = $e.v;} # parens
| left=e INC {<Cast("UnaryContext","$ctx"):Concat(".INC() != null"):Assert()>$v = $left.v + 1;} # unary
| left=e DEC {<Cast("UnaryContext","$ctx"):Concat(".DEC() != null"):Assert()>$v = $left.v - 1;} # unary
| left=e INC {<ContextRuleFunction(Cast("UnaryContext","$ctx"), "INC()"):Concat(" != null"):Assert()>$v = $left.v + 1;} # unary
| left=e DEC {<ContextRuleFunction(Cast("UnaryContext","$ctx"), "DEC()"):Concat(" != null"):Assert()>$v = $left.v - 1;} # unary
| ID {<AssignLocal("$v","3")>} # anID
;
ID : 'a'..'z'+ ;
@ -636,9 +636,9 @@ public class LeftRecursionDescriptors {
grammar T;
s : e {<writeln("$e.result")>} ;
e returns [<StringType()> result]
: ID '=' e1=e {$result = "(" + $ID.text + "=" + $e1.result + ")";}
: ID '=' e1=e {$result = <AppendStr("\"(\"", AppendStr("$ID.text", AppendStr("\"=\"", AppendStr("$e1.result", "\")\""))))>;}
| ID {$result = $ID.text;}
| e1=e '+' e2=e {$result = "(" + $e1.result + "+" + $e2.result + ")";}
| e1=e '+' e2=e {$result = <AppendStr("\"(\"", AppendStr("$e1.result", AppendStr("\"+\"", AppendStr("$e2.result", "\")\""))))>;}
;
ID : 'a'..'z'+ ;
INT : '0'..'9'+ ;

View File

@ -766,6 +766,10 @@ public class LexerExecDescriptors {
/**
lexer grammar PositionAdjustingLexer;
@definitions {
<PositionAdjustingLexerDef()>
}
@members {
<PositionAdjustingLexer()>
}

View File

@ -144,8 +144,8 @@ public class SemPredEvalParserDescriptors {
/**
grammar T;
s : b[2] ';' | b[2] '.' ; // decision in s drills down to ctx-dependent pred in a;
b[int i] : a[i] ;
a[int i]
b[<IntArg("i")>] : a[<VarRef("i")>] ;
a[<IntArg("i")>]
: {<ValEquals("$i","1")>}? ID {<writeln("\"alt 1\"")>}
| {<ValEquals("$i","2")>}? ID {<writeln("\"alt 2\"")>}
;
@ -310,7 +310,7 @@ public class SemPredEvalParserDescriptors {
grammar T;
@parser::members {<InitBooleanMember("enumKeyword",True())>}
primary
: ID {<writeln("\"ID \"+$ID.text")>}
: ID {<AppendStr("\"ID \"", "$ID.text"):writeln()>}
| {<GetMember("enumKeyword"):Not()>}? 'enum' {<writeln("\"enum\"")>}
;
ID : [a-z]+ ;

View File

@ -0,0 +1,599 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.antlr.v4.Tool;
import org.antlr.v4.automata.LexerATNFactory;
import org.antlr.v4.automata.ParserATNFactory;
import org.antlr.v4.runtime.atn.ATN;
import org.antlr.v4.runtime.atn.ATNDeserializer;
import org.antlr.v4.runtime.atn.ATNSerializer;
import org.antlr.v4.semantics.SemanticPipeline;
import org.antlr.v4.test.runtime.ErrorQueue;
import org.antlr.v4.test.runtime.RuntimeTestSupport;
import org.antlr.v4.test.runtime.StreamVacuum;
import org.antlr.v4.tool.Grammar;
import org.antlr.v4.tool.LexerGrammar;
import org.stringtemplate.v4.ST;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.antlrOnString;
import static org.antlr.v4.test.runtime.BaseRuntimeTest.writeFile;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class BasePHPTest implements RuntimeTestSupport {
public static final String newline = System.getProperty("line.separator");
public String tmpdir = null;
/**
* If error during parser execution, store stderr here; can't return
* stdout and stderr. This doesn't trap errors from running antlr.
*/
protected String stderrDuringParse;
/**
* Errors found while running antlr
*/
protected StringBuilder antlrToolErrors;
private String getPropertyPrefix() {
return "antlr-php";
}
@Override
public void testSetUp() throws Exception {
// new output dir for each test
String propName = getPropertyPrefix() + "-test-dir";
String prop = System.getProperty(propName);
if (prop != null && prop.length() > 0) {
tmpdir = prop;
} else {
String classSimpleName = getClass().getSimpleName();
String threadName = Thread.currentThread().getName();
String childPath = String.format("%s-%s-%s", classSimpleName, threadName, System.currentTimeMillis());
tmpdir = new File(System.getProperty("java.io.tmpdir"), childPath).getAbsolutePath();
}
antlrToolErrors = new StringBuilder();
}
@Override
public void testTearDown() throws Exception {
}
@Override
public String getTmpDir() {
return tmpdir;
}
@Override
public String getStdout() {
return null;
}
@Override
public String getParseErrors() {
return stderrDuringParse;
}
@Override
public String getANTLRToolErrors() {
if (antlrToolErrors.length() == 0) {
return null;
}
return antlrToolErrors.toString();
}
protected ATN createATN(Grammar g, boolean useSerializer) {
if (g.atn == null) {
semanticProcess(g);
assertEquals(0, g.tool.getNumErrors());
ParserATNFactory f;
if (g.isLexer()) {
f = new LexerATNFactory((LexerGrammar) g);
} else {
f = new ParserATNFactory(g);
}
g.atn = f.createATN();
assertEquals(0, g.tool.getNumErrors());
}
ATN atn = g.atn;
if (useSerializer) {
char[] serialized = ATNSerializer.getSerializedAsChars(atn);
return new ATNDeserializer().deserialize(serialized);
}
return atn;
}
protected void semanticProcess(Grammar g) {
if (g.ast != null && !g.ast.hasErrors) {
Tool antlr = new Tool();
SemanticPipeline sem = new SemanticPipeline(g);
sem.process();
if (g.getImportedGrammars() != null) {
for (Grammar imp: g.getImportedGrammars()) {
antlr.processNonCombinedGrammar(imp, false);
}
}
}
}
protected String execLexer(
String grammarFileName,
String grammarStr,
String lexerName,
String input
) {
return execLexer(grammarFileName, grammarStr, lexerName, input, false);
}
@Override
public String execLexer(
String grammarFileName,
String grammarStr,
String lexerName,
String input,
boolean showDFA
) {
boolean success = rawGenerateAndBuildRecognizer(
grammarFileName,
grammarStr,
null,
lexerName,
"-no-listener"
);
assertTrue(success);
writeFile(tmpdir, "input", input);
writeLexerTestFile(lexerName, showDFA);
String output = execModule("Test.php");
return output;
}
public String execParser(
String grammarFileName,
String grammarStr,
String parserName,
String lexerName,
String listenerName,
String visitorName,
String startRuleName,
String input,
boolean showDiagnosticErrors
) {
return execParser_(
grammarFileName,
grammarStr,
parserName,
lexerName,
listenerName,
visitorName,
startRuleName,
input,
showDiagnosticErrors,
false
);
}
public String execParser_(
String grammarFileName,
String grammarStr,
String parserName,
String lexerName,
String listenerName,
String visitorName,
String startRuleName,
String input,
boolean debug,
boolean trace
) {
boolean success = rawGenerateAndBuildRecognizer(
grammarFileName,
grammarStr,
parserName,
lexerName,
"-visitor"
);
assertTrue(success);
writeFile(tmpdir, "input", input);
rawBuildRecognizerTestFile(
parserName,
lexerName,
listenerName,
visitorName,
startRuleName,
debug,
trace
);
return execRecognizer();
}
/**
* Return true if all is well
*/
protected boolean rawGenerateAndBuildRecognizer(
String grammarFileName,
String grammarStr,
String parserName,
String lexerName,
String... extraOptions
) {
return rawGenerateAndBuildRecognizer(
grammarFileName,
grammarStr,
parserName,
lexerName,
false,
extraOptions
);
}
/**
* Return true if all is well
*/
protected boolean rawGenerateAndBuildRecognizer(
String grammarFileName,
String grammarStr,
String parserName,
String lexerName,
boolean defaultListener,
String... extraOptions
) {
ErrorQueue equeue = antlrOnString(getTmpDir(), "PHP", grammarFileName, grammarStr, defaultListener, extraOptions);
if (!equeue.errors.isEmpty()) {
return false;
}
List<String> files = new ArrayList<String>();
if (lexerName != null) {
files.add(lexerName + ".php");
}
if (parserName != null) {
files.add(parserName + ".php");
Set<String> optionsSet = new HashSet<String>(Arrays.asList(extraOptions));
if (!optionsSet.contains("-no-listener")) {
files.add(grammarFileName.substring(0, grammarFileName.lastIndexOf('.')) + "Listener.php");
}
if (optionsSet.contains("-visitor")) {
files.add(grammarFileName.substring(0, grammarFileName.lastIndexOf('.')) + "Visitor.php");
}
}
return true;
}
protected void rawBuildRecognizerTestFile(
String parserName,
String lexerName,
String listenerName,
String visitorName,
String parserStartRuleName,
boolean debug,
boolean trace
) {
this.stderrDuringParse = null;
if (parserName == null) {
writeLexerTestFile(lexerName, false);
} else {
writeParserTestFile(
parserName,
lexerName,
listenerName,
visitorName,
parserStartRuleName,
debug,
trace
);
}
}
public String execRecognizer() {
return execModule("Test.php");
}
public String execModule(String fileName) {
String phpPath = locatePhp();
String runtimePath = locateRuntime();
File tmpdirFile = new File(tmpdir);
String modulePath = new File(tmpdirFile, fileName).getAbsolutePath();
String inputPath = new File(tmpdirFile, "input").getAbsolutePath();
Path outputPath = tmpdirFile.toPath().resolve("output").toAbsolutePath();
try {
ProcessBuilder builder = new ProcessBuilder(phpPath, modulePath, inputPath, outputPath.toString());
builder.environment().put("RUNTIME", runtimePath);
builder.directory(tmpdirFile);
Process process = builder.start();
StreamVacuum stdoutVacuum = new StreamVacuum(process.getInputStream());
StreamVacuum stderrVacuum = new StreamVacuum(process.getErrorStream());
stdoutVacuum.start();
stderrVacuum.start();
process.waitFor();
stdoutVacuum.join();
stderrVacuum.join();
String output = stdoutVacuum.toString();
if (output.length() == 0) {
output = null;
}
if (stderrVacuum.toString().length() > 0) {
this.stderrDuringParse = stderrVacuum.toString();
}
return output;
} catch (Exception e) {
System.err.println("can't exec recognizer");
e.printStackTrace(System.err);
}
return null;
}
private String locateTool(String tool) {
final String phpPath = System.getProperty("PHP_PATH");
if (phpPath != null && new File(phpPath).exists()) {
return phpPath;
}
String[] roots = {"/usr/local/bin/", "/opt/local/bin", "/usr/bin/"};
for (String root: roots) {
if (new File(root + tool).exists()) {
return root + tool;
}
}
throw new RuntimeException("Could not locate " + tool);
}
protected String locatePhp() {
String propName = getPropertyPrefix() + "-php";
String prop = System.getProperty(propName);
if (prop == null || prop.length() == 0) {
prop = locateTool("php");
}
File file = new File(prop);
if (!file.exists()) {
throw new RuntimeException("Missing system property:" + propName);
}
return file.getAbsolutePath();
}
protected String locateRuntime() {
String propName = "antlr-php-runtime";
String prop = System.getProperty(propName);
if (prop == null || prop.length() == 0) {
prop = "../runtime/PHP";
}
File file = new File(prop);
if (!file.exists()) {
throw new RuntimeException("Missing system property:" + propName);
}
try {
return file.getCanonicalPath();
} catch (IOException e) {
return file.getAbsolutePath();
}
}
protected void mkdir(String dir) {
File f = new File(dir);
f.mkdirs();
}
protected void writeLexerTestFile(String lexerName, boolean showDFA) {
ST outputFileST = new ST(
"\\<?php\n"
+ "\n"
+ "declare(strict_types=1);\n"
+ "\n"
+ "use Antlr\\Antlr4\\Runtime\\CommonTokenStream;\n"
+ "use Antlr\\Antlr4\\Runtime\\Error\\Listeners\\ConsoleErrorListener;\n"
+ "use Antlr\\Antlr4\\Runtime\\InputStream;\n"
+ "use Antlr\\Antlr4\\Runtime\\Lexer;\n"
+ "\n"
+ "$runtime = \\getenv('RUNTIME');\n"
+ "\n"
+ "\\spl_autoload_register(function (string $class) use ($runtime) : void {\n"
+ " $file = \\str_replace('\\\\\\', \\DIRECTORY_SEPARATOR, \\str_replace('Antlr\\Antlr4\\Runtime\\\\\\', $runtime . '\\\\\\src\\\\\\', $class)) . '.php';\n"
+ "\n"
+ " if (\\file_exists($file)) {\n"
+ " require_once $file; \n"
+ " }\n"
+ "});"
+ "\n"
+ "$input = InputStream::fromPath($argv[1]);\n"
+ "$lexer = new <lexerName>($input);\n"
+ "$lexer->addErrorListener(new ConsoleErrorListener());"
+ "$tokens = new CommonTokenStream($lexer);\n"
+ "$tokens->fill();\n"
+ "\n"
+ "foreach ($tokens->getAllTokens() as $token) {\n"
+ " echo $token . \\PHP_EOL;\n"
+ "}"
+ (showDFA
? "echo $lexer->getInterpreter()->getDFA(Lexer::DEFAULT_MODE)->toLexerString();\n"
: "")
);
outputFileST.add("lexerName", lexerName);
writeFile(tmpdir, "Test.php", outputFileST.render());
}
protected void writeParserTestFile(
String parserName, String lexerName,
String listenerName, String visitorName,
String parserStartRuleName, boolean debug, boolean trace
) {
if (!parserStartRuleName.endsWith(")")) {
parserStartRuleName += "()";
}
ST outputFileST = new ST(
"\\<?php\n"
+ "\n"
+ "declare(strict_types=1);\n"
+ "\n"
+ "use Antlr\\Antlr4\\Runtime\\CommonTokenStream;\n"
+ "use Antlr\\Antlr4\\Runtime\\Error\\Listeners\\DiagnosticErrorListener;\n"
+ "use Antlr\\Antlr4\\Runtime\\Error\\Listeners\\ConsoleErrorListener;\n"
+ "use Antlr\\Antlr4\\Runtime\\InputStream;\n"
+ "use Antlr\\Antlr4\\Runtime\\ParserRuleContext;\n"
+ "use Antlr\\Antlr4\\Runtime\\Tree\\ErrorNode;\n"
+ "use Antlr\\Antlr4\\Runtime\\Tree\\ParseTreeListener;\n"
+ "use Antlr\\Antlr4\\Runtime\\Tree\\ParseTreeWalker;\n"
+ "use Antlr\\Antlr4\\Runtime\\Tree\\RuleNode;\n"
+ "use Antlr\\Antlr4\\Runtime\\Tree\\TerminalNode;\n"
+ "\n"
+ "$runtime = \\getenv('RUNTIME');\n"
+ "\n"
+ "\\spl_autoload_register(function (string $class) use ($runtime) : void {\n"
+ " $file = \\str_replace('\\\\\\', \\DIRECTORY_SEPARATOR, \\str_replace('Antlr\\Antlr4\\Runtime\\\\\\', $runtime . '\\\\\\src\\\\\\', $class)) . '.php';\n"
+ "\n"
+ " if (\\file_exists($file)) {\n"
+ " require_once $file; \n"
+ " }\n"
+ "});\n"
+ "\n"
+ "final class TreeShapeListener implements ParseTreeListener {\n"
+ " public function visitTerminal(TerminalNode $node) : void {}\n"
+ " public function visitErrorNode(ErrorNode $node) : void {}\n"
+ " public function exitEveryRule(ParserRuleContext $ctx) : void {}\n"
+ "\n"
+ " public function enterEveryRule(ParserRuleContext $ctx) : void {\n"
+ " for ($i = 0, $count = $ctx->getChildCount(); $i \\< $count; $i++) {\n"
+ " $parent = $ctx->getChild($i)->getParent();\n"
+ "\n"
+ " if (!($parent instanceof RuleNode) || $parent->getRuleContext() !== $ctx) {\n"
+ " throw new RuntimeException('Invalid parse tree shape detected.');\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ "}"
+ "\n"
+ "$input = InputStream::fromPath($argv[1]);\n"
+ "$lexer = new <lexerName>($input);\n"
+ "$lexer->addErrorListener(new ConsoleErrorListener());"
+ "$tokens = new CommonTokenStream($lexer);\n"
+ "<createParser>"
+ "$parser->addErrorListener(new ConsoleErrorListener());"
+ "$parser->setBuildParseTree(true);\n"
+ "$tree = $parser-><parserStartRuleName>;\n\n"
+ "ParseTreeWalker::default()->walk(new TreeShapeListener(), $tree);\n"
);
String stSource = "$parser = new <parserName>($tokens);\n";
if (debug) {
stSource += "$parser->addErrorListener(new DiagnosticErrorListener());\n";
}
if (trace) {
stSource += "$parser->setTrace(true);\n";
}
ST createParserST = new ST(stSource);
outputFileST.add("createParser", createParserST);
outputFileST.add("parserName", parserName);
outputFileST.add("lexerName", lexerName);
outputFileST.add("listenerName", listenerName);
outputFileST.add("visitorName", visitorName);
outputFileST.add("parserStartRuleName", parserStartRuleName);
writeFile(tmpdir, "Test.php", outputFileST.render());
}
protected void eraseFiles(File dir) {
String[] files = dir.list();
for (int i = 0; files != null && i < files.length; i++) {
new File(dir, files[i]).delete();
}
}
@Override
public void eraseTempDir() {
boolean doErase = true;
String propName = getPropertyPrefix() + "-erase-test-dir";
String prop = System.getProperty(propName);
if (prop != null && prop.length() > 0) {
doErase = Boolean.getBoolean(prop);
}
if (doErase) {
File tmpdirF = new File(tmpdir);
if (tmpdirF.exists()) {
eraseFiles(tmpdirF);
tmpdirF.delete();
}
}
}
/**
* Sort a list
*/
public <T extends Comparable<? super T>> List<T> sort(List<T> data) {
List<T> dup = new ArrayList<T>();
dup.addAll(data);
Collections.sort(dup);
return dup;
}
/**
* Return map sorted by key
*/
public <K extends Comparable<? super K>, V> LinkedHashMap<K, V> sort(Map<K, V> data) {
LinkedHashMap<K, V> dup = new LinkedHashMap<K, V>();
List<K> keys = new ArrayList<K>();
keys.addAll(data.keySet());
Collections.sort(keys);
for (K k: keys) {
dup.put(k, data.get(k));
}
return dup;
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.CompositeLexersDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestCompositeLexers extends BaseRuntimeTest {
public TestCompositeLexers(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(CompositeLexersDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.CompositeParsersDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestCompositeParsers extends BaseRuntimeTest {
public TestCompositeParsers(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(CompositeParsersDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.FullContextParsingDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestFullContextParsing extends BaseRuntimeTest {
public TestFullContextParsing(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(FullContextParsingDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.LeftRecursionDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestLeftRecursion extends BaseRuntimeTest {
public TestLeftRecursion(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(LeftRecursionDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.LexerErrorsDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestLexerErrors extends BaseRuntimeTest {
public TestLexerErrors(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(LexerErrorsDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.LexerExecDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestLexerExec extends BaseRuntimeTest {
public TestLexerExec(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(LexerExecDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.ListenersDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestListeners extends BaseRuntimeTest {
public TestListeners(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(ListenersDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.ParseTreesDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestParseTrees extends BaseRuntimeTest {
public TestParseTrees(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(ParseTreesDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.ParserErrorsDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestParserErrors extends BaseRuntimeTest {
public TestParserErrors(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(ParserErrorsDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.ParserExecDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestParserExec extends BaseRuntimeTest {
public TestParserExec(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(ParserExecDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.PerformanceDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestPerformance extends BaseRuntimeTest {
public TestPerformance(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(PerformanceDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.SemPredEvalLexerDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestSemPredEvalLexer extends BaseRuntimeTest {
public TestSemPredEvalLexer(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(SemPredEvalLexerDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.SemPredEvalParserDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestSemPredEvalParser extends BaseRuntimeTest {
public TestSemPredEvalParser(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(SemPredEvalParserDescriptors.class, "PHP");
}
}

View File

@ -0,0 +1,25 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.test.runtime.php;
import org.antlr.v4.test.runtime.BaseRuntimeTest;
import org.antlr.v4.test.runtime.RuntimeTestDescriptor;
import org.antlr.v4.test.runtime.descriptors.SetsDescriptors;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@RunWith(Parameterized.class)
public class TestSets extends BaseRuntimeTest {
public TestSets(RuntimeTestDescriptor descriptor) {
super(descriptor,new BasePHPTest());
}
@Parameterized.Parameters(name="{0}")
public static RuntimeTestDescriptor[] getAllTestDescriptors() {
return BaseRuntimeTest.getRuntimeTestDescriptors(SetsDescriptors.class, "PHP");
}
}

View File

@ -78,8 +78,6 @@ public class LexerATNSimulator extends ATNSimulator {
protected final SimState prevAccept = new SimState();
public static int match_calls = 0;
public LexerATNSimulator(ATN atn, DFA[] decisionToDFA,
PredictionContextCache sharedContextCache)
{
@ -103,7 +101,6 @@ public class LexerATNSimulator extends ATNSimulator {
}
public int match(CharStream input, int mode) {
match_calls++;
this.mode = mode;
int mark = input.mark();
try {

View File

@ -7,6 +7,7 @@
var RuleContext = require('./RuleContext').RuleContext;
var Hash = require('./Utils').Hash;
var Map = require('./Utils').Map;
function PredictionContext(cachedHashCode) {
this.cachedHashCode = cachedHashCode;
@ -79,7 +80,7 @@ function calculateHashString(parent, returnState) {
// can be used for both lexers and parsers.
function PredictionContextCache() {
this.cache = {};
this.cache = new Map();
return this;
}
@ -91,16 +92,16 @@ PredictionContextCache.prototype.add = function(ctx) {
if (ctx === PredictionContext.EMPTY) {
return PredictionContext.EMPTY;
}
var existing = this.cache[ctx] || null;
var existing = this.cache.get(ctx) || null;
if (existing !== null) {
return existing;
}
this.cache[ctx] = ctx;
this.cache.put(ctx, ctx);
return ctx;
};
PredictionContextCache.prototype.get = function(ctx) {
return this.cache[ctx] || null;
return this.cache.get(ctx) || null;
};
Object.defineProperty(PredictionContextCache.prototype, "length", {
@ -111,11 +112,13 @@ Object.defineProperty(PredictionContextCache.prototype, "length", {
function SingletonPredictionContext(parent, returnState) {
var hashCode = 0;
var hash = new Hash();
if(parent !== null) {
var hash = new Hash();
hash.update(parent, returnState);
hashCode = hash.finish();
} else {
hash.update(1);
}
hashCode = hash.finish();
PredictionContext.call(this, hashCode);
this.parentCtx = parent;
this.returnState = returnState;
@ -640,16 +643,16 @@ function mergeArrays(a, b, rootIsWildcard, mergeCache) {
// ones.
// /
function combineCommonParents(parents) {
var uniqueParents = {};
var uniqueParents = new Map();
for (var p = 0; p < parents.length; p++) {
var parent = parents[p];
if (!(parent in uniqueParents)) {
uniqueParents[parent] = parent;
if (!(uniqueParents.containsKey(parent))) {
uniqueParents.put(parent, parent);
}
}
for (var q = 0; q < parents.length; q++) {
parents[q] = uniqueParents[parents[q]];
parents[q] = uniqueParents.get(parents[q]);
}
}
@ -657,13 +660,13 @@ function getCachedPredictionContext(context, contextCache, visited) {
if (context.isEmpty()) {
return context;
}
var existing = visited[context] || null;
var existing = visited.get(context) || null;
if (existing !== null) {
return existing;
}
existing = contextCache.get(context);
if (existing !== null) {
visited[context] = existing;
visited.put(context, existing);
return existing;
}
var changed = false;
@ -683,7 +686,7 @@ function getCachedPredictionContext(context, contextCache, visited) {
}
if (!changed) {
contextCache.add(context);
visited[context] = context;
visited.put(context, context);
return context;
}
var updated = null;
@ -696,8 +699,8 @@ function getCachedPredictionContext(context, contextCache, visited) {
updated = new ArrayPredictionContext(parents, context.returnStates);
}
contextCache.add(updated);
visited[updated] = updated;
visited[context] = updated;
visited.put(updated, updated);
visited.put(context, updated);
return updated;
}
@ -708,13 +711,13 @@ function getAllContextNodes(context, nodes, visited) {
nodes = [];
return getAllContextNodes(context, nodes, visited);
} else if (visited === null) {
visited = {};
visited = new Map();
return getAllContextNodes(context, nodes, visited);
} else {
if (context === null || visited[context] !== null) {
if (context === null || visited.containsKey(context)) {
return nodes;
}
visited[context] = context;
visited.put(context, context);
nodes.push(context);
for (var i = 0; i < context.length; i++) {
getAllContextNodes(context.getParent(i), nodes, visited);

View File

@ -323,7 +323,9 @@ AltDict.prototype.values = function () {
});
};
function DoubleDict() {
function DoubleDict(defaultMapCtor) {
this.defaultMapCtor = defaultMapCtor || Map;
this.cacheMap = new this.defaultMapCtor();
return this;
}
@ -339,7 +341,7 @@ Hash.prototype.update = function () {
if (value == null)
continue;
if(Array.isArray(value))
this.update.apply(value);
this.update.apply(this, value);
else {
var k = 0;
switch (typeof(value)) {
@ -354,7 +356,10 @@ Hash.prototype.update = function () {
k = value.hashCode();
break;
default:
value.updateHashCode(this);
if(value.updateHashCode)
value.updateHashCode(this);
else
console.log("No updateHashCode for " + value.toString())
continue;
}
k = k * 0xCC9E2D51;
@ -367,7 +372,7 @@ Hash.prototype.update = function () {
this.hash = hash;
}
}
}
};
Hash.prototype.finish = function () {
var hash = this.hash ^ (this.count * 4);
@ -377,26 +382,26 @@ Hash.prototype.finish = function () {
hash = hash * 0xC2B2AE35;
hash = hash ^ (hash >>> 16);
return hash;
}
};
function hashStuff() {
var hash = new Hash();
hash.update.apply(arguments);
hash.update.apply(hash, arguments);
return hash.finish();
}
DoubleDict.prototype.get = function (a, b) {
var d = this[a] || null;
return d === null ? null : (d[b] || null);
var d = this.cacheMap.get(a) || null;
return d === null ? null : (d.get(b) || null);
};
DoubleDict.prototype.set = function (a, b, o) {
var d = this[a] || null;
var d = this.cacheMap.get(a) || null;
if (d === null) {
d = {};
this[a] = d;
d = new this.defaultMapCtor();
this.cacheMap.put(a, d);
}
d[b] = o;
d.put(b, o);
};

View File

@ -175,7 +175,7 @@ ATNConfigSet.prototype.equals = function(other) {
ATNConfigSet.prototype.hashCode = function() {
var hash = new Hash();
this.updateHashCode(hash);
hash.update(this.configs);
return hash.finish();
};
@ -183,13 +183,11 @@ ATNConfigSet.prototype.hashCode = function() {
ATNConfigSet.prototype.updateHashCode = function(hash) {
if (this.readOnly) {
if (this.cachedHashCode === -1) {
var hash = new Hash();
hash.update(this.configs);
this.cachedHashCode = hash.finish();
this.cachedHashCode = this.hashCode();
}
hash.update(this.cachedHashCode);
} else {
hash.update(this.configs);
hash.update(this.hashCode());
}
};

View File

@ -138,7 +138,7 @@ ATNDeserializer.prototype.deserialize = function(data) {
ATNDeserializer.prototype.reset = function(data) {
var adjust = function(c) {
var v = c.charCodeAt(0);
return v>1 ? v-2 : v + 65533;
return v>1 ? v-2 : v + 65534;
};
var temp = data.split("").map(adjust);
// don't adjust the first value since that's the version number

View File

@ -8,6 +8,7 @@
var DFAState = require('./../dfa/DFAState').DFAState;
var ATNConfigSet = require('./ATNConfigSet').ATNConfigSet;
var getCachedPredictionContext = require('./../PredictionContext').getCachedPredictionContext;
var Map = require('./../Utils').Map;
function ATNSimulator(atn, sharedContextCache) {
@ -44,7 +45,7 @@ ATNSimulator.prototype.getCachedContext = function(context) {
if (this.sharedContextCache ===null) {
return context;
}
var visited = {};
var visited = new Map();
return getCachedPredictionContext(context, this.sharedContextCache, visited);
};

View File

@ -139,12 +139,6 @@ DFAState.prototype.toString = function() {
DFAState.prototype.hashCode = function() {
var hash = new Hash();
hash.update(this.configs);
if(this.isAcceptState) {
if (this.predicates !== null)
hash.update(this.predicates);
else
hash.update(this.prediction);
}
return hash.finish();
};

1
runtime/PHP Submodule

@ -0,0 +1 @@
Subproject commit 9e1b759d02220eedf477771169bdfb6a186a2984

View File

@ -47,7 +47,7 @@
# <p>
# Whitespace is not allowed.</p>
#
from antlr4 import CommonTokenStream, DFA, PredictionContextCache, Lexer, LexerATNSimulator
from antlr4 import CommonTokenStream, DFA, PredictionContextCache, Lexer, LexerATNSimulator, ParserRuleContext, TerminalNode
from antlr4.InputStream import InputStream
from antlr4.Parser import Parser
from antlr4.RuleContext import RuleContext
@ -134,7 +134,7 @@ class XPathLexer(Lexer):
if _action is not None:
_action(localctx, actionIndex)
else:
raise Exception("No registered action for:" + str(ruleIndex))
raise Exception("No registered action for: %d" % ruleIndex)
def ID_action(self, localctx:RuleContext , actionIndex:int):
if actionIndex == 0:
@ -166,40 +166,40 @@ class XPath(object):
try:
tokenStream.fill()
except LexerNoViableAltException as e:
pos = lexer.getColumn()
msg = "Invalid tokens or characters at index " + str(pos) + " in path '" + path + "'"
pos = lexer.column
msg = "Invalid tokens or characters at index %d in path '%s'" % (pos, path)
raise Exception(msg, e)
tokens = tokenStream.getTokens()
tokens = iter(tokenStream.tokens)
elements = list()
n = len(tokens)
i=0
while i < n :
el = tokens[i]
next = None
for el in tokens:
invert = False
anywhere = False
# Check for path separators, if none assume root
if el.type in [XPathLexer.ROOT, XPathLexer.ANYWHERE]:
anywhere = el.type == XPathLexer.ANYWHERE
i += 1
next = tokens[i]
invert = next.type==XPathLexer.BANG
if invert:
i += 1
next = tokens[i]
pathElement = self.getXPathElement(next, anywhere)
pathElement.invert = invert
elements.append(pathElement)
i += 1
elif el.type in [XPathLexer.TOKEN_REF, XPathLexer.RULE_REF, XPathLexer.WILDCARD] :
elements.append( self.getXPathElement(el, False) )
i += 1
elif el.type==Token.EOF :
break
anywhere = el.type == XPathLexer.ANYWHERE
next_el = next(tokens, None)
if not next_el:
raise Exception('Missing element after %s' % el.getText())
else:
el = next_el
# Check for bangs
if el.type == XPathLexer.BANG:
invert = True
next_el = next(tokens, None)
if not next_el:
raise Exception('Missing element after %s' % el.getText())
else:
el = next_el
# Add searched element
if el.type in [XPathLexer.TOKEN_REF, XPathLexer.RULE_REF, XPathLexer.WILDCARD, XPathLexer.STRING]:
element = self.getXPathElement(el, anywhere)
element.invert = invert
elements.append(element)
elif el.type==Token.EOF:
break
else:
raise Exception("Unknown path element " + str(el))
raise Exception("Unknown path element %s" % lexer.symbolicNames[el.type])
return elements
#
@ -210,24 +210,31 @@ class XPath(object):
def getXPathElement(self, wordToken:Token, anywhere:bool):
if wordToken.type==Token.EOF:
raise Exception("Missing path element at end of path")
word = wordToken.text
ttype = self.parser.getTokenType(word)
ruleIndex = self.parser.getRuleIndex(word)
if wordToken.type==XPathLexer.WILDCARD :
return XPathWildcardAnywhereElement() if anywhere else XPathWildcardElement()
elif wordToken.type in [XPathLexer.TOKEN_REF, XPathLexer.STRING]:
tsource = self.parser.getTokenStream().tokenSource
if ttype==Token.INVALID_TYPE:
raise Exception( word + " at index " + str(wordToken.startIndex) + " isn't a valid token name")
ttype = Token.INVALID_TYPE
if wordToken.type == XPathLexer.TOKEN_REF:
if word in tsource.ruleNames:
ttype = tsource.ruleNames.index(word) + 1
else:
if word in tsource.literalNames:
ttype = tsource.literalNames.index(word)
if ttype == Token.INVALID_TYPE:
raise Exception("%s at index %d isn't a valid token name" % (word, wordToken.tokenIndex))
return XPathTokenAnywhereElement(word, ttype) if anywhere else XPathTokenElement(word, ttype)
else:
ruleIndex = self.parser.ruleNames.index(word) if word in self.parser.ruleNames else -1
if ruleIndex==-1:
raise Exception( word + " at index " + str(wordToken.getStartIndex()) + " isn't a valid rule name")
if ruleIndex == -1:
raise Exception("%s at index %d isn't a valid rule name" % (word, wordToken.tokenIndex))
return XPathRuleAnywhereElement(word, ruleIndex) if anywhere else XPathRuleElement(word, ruleIndex)
@ -246,18 +253,21 @@ class XPath(object):
dummyRoot.children = [t] # don't set t's parent.
work = [dummyRoot]
for i in range(0, len(self.elements)):
next = set()
for element in self.elements:
work_next = list()
for node in work:
if len( node.children) > 0 :
if not isinstance(node, TerminalNode) and node.children:
# only try to match next element if it has children
# e.g., //func/*/stat might have a token node for which
# we can't go looking for stat nodes.
matching = self.elements[i].evaluate(node)
next |= matching
i += 1
work = next
matching = element.evaluate(node)
# See issue antlr#370 - Prevents XPath from returning the
# same node multiple times
matching = filter(lambda m: m not in work_next, matching)
work_next.extend(matching)
work = work_next
return work
@ -283,8 +293,8 @@ class XPathRuleAnywhereElement(XPathElement):
self.ruleIndex = ruleIndex
def evaluate(self, t:ParseTree):
return Trees.findAllRuleNodes(t, self.ruleIndex)
# return all ParserRuleContext descendants of t that match ruleIndex (or do not match if inverted)
return filter(lambda c: isinstance(c, ParserRuleContext) and (self.invert ^ (c.getRuleIndex() == self.ruleIndex)), Trees.descendants(t))
class XPathRuleElement(XPathElement):
@ -293,9 +303,8 @@ class XPathRuleElement(XPathElement):
self.ruleIndex = ruleIndex
def evaluate(self, t:ParseTree):
# return all children of t that match nodeName
return [c for c in Trees.getChildren(t) if isinstance(c, ParserRuleContext) and (c.ruleIndex == self.ruleIndex) == (not self.invert)]
# return all ParserRuleContext children of t that match ruleIndex (or do not match if inverted)
return filter(lambda c: isinstance(c, ParserRuleContext) and (self.invert ^ (c.getRuleIndex() == self.ruleIndex)), Trees.getChildren(t))
class XPathTokenAnywhereElement(XPathElement):
@ -304,8 +313,8 @@ class XPathTokenAnywhereElement(XPathElement):
self.tokenType = tokenType
def evaluate(self, t:ParseTree):
return Trees.findAllTokenNodes(t, self.tokenType)
# return all TerminalNode descendants of t that match tokenType (or do not match if inverted)
return filter(lambda c: isinstance(c, TerminalNode) and (self.invert ^ (c.symbol.type == self.tokenType)), Trees.descendants(t))
class XPathTokenElement(XPathElement):
@ -314,8 +323,8 @@ class XPathTokenElement(XPathElement):
self.tokenType = tokenType
def evaluate(self, t:ParseTree):
# return all children of t that match nodeName
return [c for c in Trees.getChildren(t) if isinstance(c, TerminalNode) and (c.symbol.type == self.tokenType) == (not self.invert)]
# return all TerminalNode children of t that match tokenType (or do not match if inverted)
return filter(lambda c: isinstance(c, TerminalNode) and (self.invert ^ (c.symbol.type == self.tokenType)), Trees.getChildren(t))
class XPathWildcardAnywhereElement(XPathElement):

View File

@ -0,0 +1,31 @@
// Taken from "tool-testsuite/test/org/antlr/v4/test/tool/TestXPath.java"
// Builds ExprLexer.py and ExprParser.py
grammar Expr;
prog: func+ ;
func: 'def' ID '(' arg (',' arg)* ')' body ;
body: '{' stat+ '}' ;
arg : ID ;
stat: expr ';' # printExpr
| ID '=' expr ';' # assign
| 'return' expr ';' # ret
| ';' # blank
;
expr: expr ('*'|'/') expr # MulDiv
| expr ('+'|'-') expr # AddSub
| primary # prim
;
primary
: INT # int
| ID # id
| '(' expr ')' # parens
;
MUL : '*' ; // assigns token name to '*' used above in grammar
DIV : '/' ;
ADD : '+' ;
SUB : '-' ;
RETURN : 'return' ;
ID : [a-zA-Z]+ ; // match identifiers
INT : [0-9]+ ; // match integers
NEWLINE:'\r'? '\n' -> skip; // return newlines to parser (is end-statement signal)
WS : [ \t]+ -> skip ; // toss out whitespace

View File

@ -0,0 +1,94 @@
# Generated from expr/Expr.g4 by ANTLR 4.7.2
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\23")
buf.write("^\b\1\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7")
buf.write("\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t\13\4\f\t\f\4\r\t\r\4\16")
buf.write("\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22\3\2\3\2")
buf.write("\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6\3\6\3\7\3\7\3\b\3")
buf.write("\b\3\t\3\t\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3\16")
buf.write("\3\16\3\16\3\16\3\16\3\16\3\17\6\17H\n\17\r\17\16\17I")
buf.write("\3\20\6\20M\n\20\r\20\16\20N\3\21\5\21R\n\21\3\21\3\21")
buf.write("\3\21\3\21\3\22\6\22Y\n\22\r\22\16\22Z\3\22\3\22\2\2\23")
buf.write("\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n\23\13\25\f\27\r\31")
buf.write("\16\33\17\35\20\37\21!\22#\23\3\2\5\4\2C\\c|\3\2\62;\4")
buf.write("\2\13\13\"\"\2a\2\3\3\2\2\2\2\5\3\2\2\2\2\7\3\2\2\2\2")
buf.write("\t\3\2\2\2\2\13\3\2\2\2\2\r\3\2\2\2\2\17\3\2\2\2\2\21")
buf.write("\3\2\2\2\2\23\3\2\2\2\2\25\3\2\2\2\2\27\3\2\2\2\2\31\3")
buf.write("\2\2\2\2\33\3\2\2\2\2\35\3\2\2\2\2\37\3\2\2\2\2!\3\2\2")
buf.write("\2\2#\3\2\2\2\3%\3\2\2\2\5)\3\2\2\2\7+\3\2\2\2\t-\3\2")
buf.write("\2\2\13/\3\2\2\2\r\61\3\2\2\2\17\63\3\2\2\2\21\65\3\2")
buf.write("\2\2\23\67\3\2\2\2\259\3\2\2\2\27;\3\2\2\2\31=\3\2\2\2")
buf.write("\33?\3\2\2\2\35G\3\2\2\2\37L\3\2\2\2!Q\3\2\2\2#X\3\2\2")
buf.write("\2%&\7f\2\2&\'\7g\2\2\'(\7h\2\2(\4\3\2\2\2)*\7*\2\2*\6")
buf.write("\3\2\2\2+,\7.\2\2,\b\3\2\2\2-.\7+\2\2.\n\3\2\2\2/\60\7")
buf.write("}\2\2\60\f\3\2\2\2\61\62\7\177\2\2\62\16\3\2\2\2\63\64")
buf.write("\7=\2\2\64\20\3\2\2\2\65\66\7?\2\2\66\22\3\2\2\2\678\7")
buf.write(",\2\28\24\3\2\2\29:\7\61\2\2:\26\3\2\2\2;<\7-\2\2<\30")
buf.write("\3\2\2\2=>\7/\2\2>\32\3\2\2\2?@\7t\2\2@A\7g\2\2AB\7v\2")
buf.write("\2BC\7w\2\2CD\7t\2\2DE\7p\2\2E\34\3\2\2\2FH\t\2\2\2GF")
buf.write("\3\2\2\2HI\3\2\2\2IG\3\2\2\2IJ\3\2\2\2J\36\3\2\2\2KM\t")
buf.write("\3\2\2LK\3\2\2\2MN\3\2\2\2NL\3\2\2\2NO\3\2\2\2O \3\2\2")
buf.write("\2PR\7\17\2\2QP\3\2\2\2QR\3\2\2\2RS\3\2\2\2ST\7\f\2\2")
buf.write("TU\3\2\2\2UV\b\21\2\2V\"\3\2\2\2WY\t\4\2\2XW\3\2\2\2Y")
buf.write("Z\3\2\2\2ZX\3\2\2\2Z[\3\2\2\2[\\\3\2\2\2\\]\b\22\2\2]")
buf.write("$\3\2\2\2\7\2INQZ\3\b\2\2")
return buf.getvalue()
class ExprLexer(Lexer):
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
T__0 = 1
T__1 = 2
T__2 = 3
T__3 = 4
T__4 = 5
T__5 = 6
T__6 = 7
T__7 = 8
MUL = 9
DIV = 10
ADD = 11
SUB = 12
RETURN = 13
ID = 14
INT = 15
NEWLINE = 16
WS = 17
channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN" ]
modeNames = [ "DEFAULT_MODE" ]
literalNames = [ "<INVALID>",
"'def'", "'('", "','", "')'", "'{'", "'}'", "';'", "'='", "'*'",
"'/'", "'+'", "'-'", "'return'" ]
symbolicNames = [ "<INVALID>",
"MUL", "DIV", "ADD", "SUB", "RETURN", "ID", "INT", "NEWLINE",
"WS" ]
ruleNames = [ "T__0", "T__1", "T__2", "T__3", "T__4", "T__5", "T__6",
"T__7", "MUL", "DIV", "ADD", "SUB", "RETURN", "ID", "INT",
"NEWLINE", "WS" ]
grammarFileName = "Expr.g4"
def __init__(self, input=None, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.2")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
self._predicates = None

View File

@ -0,0 +1,658 @@
# Generated from expr/Expr.g4 by ANTLR 4.7.2
# encoding: utf-8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
def serializedATN():
with StringIO() as buf:
buf.write("\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\23")
buf.write("S\4\2\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b")
buf.write("\t\b\3\2\6\2\22\n\2\r\2\16\2\23\3\3\3\3\3\3\3\3\3\3\3")
buf.write("\3\7\3\34\n\3\f\3\16\3\37\13\3\3\3\3\3\3\3\3\4\3\4\6\4")
buf.write("&\n\4\r\4\16\4\'\3\4\3\4\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3")
buf.write("\6\3\6\3\6\3\6\3\6\3\6\3\6\3\6\5\6;\n\6\3\7\3\7\3\7\3")
buf.write("\7\3\7\3\7\3\7\3\7\3\7\7\7F\n\7\f\7\16\7I\13\7\3\b\3\b")
buf.write("\3\b\3\b\3\b\3\b\5\bQ\n\b\3\b\2\3\f\t\2\4\6\b\n\f\16\2")
buf.write("\4\3\2\13\f\3\2\r\16\2U\2\21\3\2\2\2\4\25\3\2\2\2\6#\3")
buf.write("\2\2\2\b+\3\2\2\2\n:\3\2\2\2\f<\3\2\2\2\16P\3\2\2\2\20")
buf.write("\22\5\4\3\2\21\20\3\2\2\2\22\23\3\2\2\2\23\21\3\2\2\2")
buf.write("\23\24\3\2\2\2\24\3\3\2\2\2\25\26\7\3\2\2\26\27\7\20\2")
buf.write("\2\27\30\7\4\2\2\30\35\5\b\5\2\31\32\7\5\2\2\32\34\5\b")
buf.write("\5\2\33\31\3\2\2\2\34\37\3\2\2\2\35\33\3\2\2\2\35\36\3")
buf.write("\2\2\2\36 \3\2\2\2\37\35\3\2\2\2 !\7\6\2\2!\"\5\6\4\2")
buf.write("\"\5\3\2\2\2#%\7\7\2\2$&\5\n\6\2%$\3\2\2\2&\'\3\2\2\2")
buf.write("\'%\3\2\2\2\'(\3\2\2\2()\3\2\2\2)*\7\b\2\2*\7\3\2\2\2")
buf.write("+,\7\20\2\2,\t\3\2\2\2-.\5\f\7\2./\7\t\2\2/;\3\2\2\2\60")
buf.write("\61\7\20\2\2\61\62\7\n\2\2\62\63\5\f\7\2\63\64\7\t\2\2")
buf.write("\64;\3\2\2\2\65\66\7\17\2\2\66\67\5\f\7\2\678\7\t\2\2")
buf.write("8;\3\2\2\29;\7\t\2\2:-\3\2\2\2:\60\3\2\2\2:\65\3\2\2\2")
buf.write(":9\3\2\2\2;\13\3\2\2\2<=\b\7\1\2=>\5\16\b\2>G\3\2\2\2")
buf.write("?@\f\5\2\2@A\t\2\2\2AF\5\f\7\6BC\f\4\2\2CD\t\3\2\2DF\5")
buf.write("\f\7\5E?\3\2\2\2EB\3\2\2\2FI\3\2\2\2GE\3\2\2\2GH\3\2\2")
buf.write("\2H\r\3\2\2\2IG\3\2\2\2JQ\7\21\2\2KQ\7\20\2\2LM\7\4\2")
buf.write("\2MN\5\f\7\2NO\7\6\2\2OQ\3\2\2\2PJ\3\2\2\2PK\3\2\2\2P")
buf.write("L\3\2\2\2Q\17\3\2\2\2\t\23\35\':EGP")
return buf.getvalue()
class ExprParser ( Parser ):
grammarFileName = "Expr.g4"
atn = ATNDeserializer().deserialize(serializedATN())
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
sharedContextCache = PredictionContextCache()
literalNames = [ "<INVALID>", "'def'", "'('", "','", "')'", "'{'", "'}'",
"';'", "'='", "'*'", "'/'", "'+'", "'-'", "'return'" ]
symbolicNames = [ "<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "<INVALID>", "<INVALID>", "<INVALID>",
"<INVALID>", "MUL", "DIV", "ADD", "SUB", "RETURN",
"ID", "INT", "NEWLINE", "WS" ]
RULE_prog = 0
RULE_func = 1
RULE_body = 2
RULE_arg = 3
RULE_stat = 4
RULE_expr = 5
RULE_primary = 6
ruleNames = [ "prog", "func", "body", "arg", "stat", "expr", "primary" ]
EOF = Token.EOF
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
MUL=9
DIV=10
ADD=11
SUB=12
RETURN=13
ID=14
INT=15
NEWLINE=16
WS=17
def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
super().__init__(input, output)
self.checkVersion("4.7.2")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
class ProgContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def func(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(ExprParser.FuncContext)
else:
return self.getTypedRuleContext(ExprParser.FuncContext,i)
def getRuleIndex(self):
return ExprParser.RULE_prog
def prog(self):
localctx = ExprParser.ProgContext(self, self._ctx, self.state)
self.enterRule(localctx, 0, self.RULE_prog)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 15
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 14
self.func()
self.state = 17
self._errHandler.sync(self)
_la = self._input.LA(1)
if not (_la==ExprParser.T__0):
break
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class FuncContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ID(self):
return self.getToken(ExprParser.ID, 0)
def arg(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(ExprParser.ArgContext)
else:
return self.getTypedRuleContext(ExprParser.ArgContext,i)
def body(self):
return self.getTypedRuleContext(ExprParser.BodyContext,0)
def getRuleIndex(self):
return ExprParser.RULE_func
def func(self):
localctx = ExprParser.FuncContext(self, self._ctx, self.state)
self.enterRule(localctx, 2, self.RULE_func)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 19
self.match(ExprParser.T__0)
self.state = 20
self.match(ExprParser.ID)
self.state = 21
self.match(ExprParser.T__1)
self.state = 22
self.arg()
self.state = 27
self._errHandler.sync(self)
_la = self._input.LA(1)
while _la==ExprParser.T__2:
self.state = 23
self.match(ExprParser.T__2)
self.state = 24
self.arg()
self.state = 29
self._errHandler.sync(self)
_la = self._input.LA(1)
self.state = 30
self.match(ExprParser.T__3)
self.state = 31
self.body()
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class BodyContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def stat(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(ExprParser.StatContext)
else:
return self.getTypedRuleContext(ExprParser.StatContext,i)
def getRuleIndex(self):
return ExprParser.RULE_body
def body(self):
localctx = ExprParser.BodyContext(self, self._ctx, self.state)
self.enterRule(localctx, 4, self.RULE_body)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
self.state = 33
self.match(ExprParser.T__4)
self.state = 35
self._errHandler.sync(self)
_la = self._input.LA(1)
while True:
self.state = 34
self.stat()
self.state = 37
self._errHandler.sync(self)
_la = self._input.LA(1)
if not ((((_la) & ~0x3f) == 0 and ((1 << _la) & ((1 << ExprParser.T__1) | (1 << ExprParser.T__6) | (1 << ExprParser.RETURN) | (1 << ExprParser.ID) | (1 << ExprParser.INT))) != 0)):
break
self.state = 39
self.match(ExprParser.T__5)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ArgContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def ID(self):
return self.getToken(ExprParser.ID, 0)
def getRuleIndex(self):
return ExprParser.RULE_arg
def arg(self):
localctx = ExprParser.ArgContext(self, self._ctx, self.state)
self.enterRule(localctx, 6, self.RULE_arg)
try:
self.enterOuterAlt(localctx, 1)
self.state = 41
self.match(ExprParser.ID)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class StatContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return ExprParser.RULE_stat
def copyFrom(self, ctx:ParserRuleContext):
super().copyFrom(ctx)
class RetContext(StatContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a ExprParser.StatContext
super().__init__(parser)
self.copyFrom(ctx)
def RETURN(self):
return self.getToken(ExprParser.RETURN, 0)
def expr(self):
return self.getTypedRuleContext(ExprParser.ExprContext,0)
class BlankContext(StatContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a ExprParser.StatContext
super().__init__(parser)
self.copyFrom(ctx)
class PrintExprContext(StatContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a ExprParser.StatContext
super().__init__(parser)
self.copyFrom(ctx)
def expr(self):
return self.getTypedRuleContext(ExprParser.ExprContext,0)
class AssignContext(StatContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a ExprParser.StatContext
super().__init__(parser)
self.copyFrom(ctx)
def ID(self):
return self.getToken(ExprParser.ID, 0)
def expr(self):
return self.getTypedRuleContext(ExprParser.ExprContext,0)
def stat(self):
localctx = ExprParser.StatContext(self, self._ctx, self.state)
self.enterRule(localctx, 8, self.RULE_stat)
try:
self.state = 56
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,3,self._ctx)
if la_ == 1:
localctx = ExprParser.PrintExprContext(self, localctx)
self.enterOuterAlt(localctx, 1)
self.state = 43
self.expr(0)
self.state = 44
self.match(ExprParser.T__6)
pass
elif la_ == 2:
localctx = ExprParser.AssignContext(self, localctx)
self.enterOuterAlt(localctx, 2)
self.state = 46
self.match(ExprParser.ID)
self.state = 47
self.match(ExprParser.T__7)
self.state = 48
self.expr(0)
self.state = 49
self.match(ExprParser.T__6)
pass
elif la_ == 3:
localctx = ExprParser.RetContext(self, localctx)
self.enterOuterAlt(localctx, 3)
self.state = 51
self.match(ExprParser.RETURN)
self.state = 52
self.expr(0)
self.state = 53
self.match(ExprParser.T__6)
pass
elif la_ == 4:
localctx = ExprParser.BlankContext(self, localctx)
self.enterOuterAlt(localctx, 4)
self.state = 55
self.match(ExprParser.T__6)
pass
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
class ExprContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return ExprParser.RULE_expr
def copyFrom(self, ctx:ParserRuleContext):
super().copyFrom(ctx)
class PrimContext(ExprContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a ExprParser.ExprContext
super().__init__(parser)
self.copyFrom(ctx)
def primary(self):
return self.getTypedRuleContext(ExprParser.PrimaryContext,0)
class MulDivContext(ExprContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a ExprParser.ExprContext
super().__init__(parser)
self.copyFrom(ctx)
def expr(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(ExprParser.ExprContext)
else:
return self.getTypedRuleContext(ExprParser.ExprContext,i)
def MUL(self):
return self.getToken(ExprParser.MUL, 0)
def DIV(self):
return self.getToken(ExprParser.DIV, 0)
class AddSubContext(ExprContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a ExprParser.ExprContext
super().__init__(parser)
self.copyFrom(ctx)
def expr(self, i:int=None):
if i is None:
return self.getTypedRuleContexts(ExprParser.ExprContext)
else:
return self.getTypedRuleContext(ExprParser.ExprContext,i)
def ADD(self):
return self.getToken(ExprParser.ADD, 0)
def SUB(self):
return self.getToken(ExprParser.SUB, 0)
def expr(self, _p:int=0):
_parentctx = self._ctx
_parentState = self.state
localctx = ExprParser.ExprContext(self, self._ctx, _parentState)
_prevctx = localctx
_startState = 10
self.enterRecursionRule(localctx, 10, self.RULE_expr, _p)
self._la = 0 # Token type
try:
self.enterOuterAlt(localctx, 1)
localctx = ExprParser.PrimContext(self, localctx)
self._ctx = localctx
_prevctx = localctx
self.state = 59
self.primary()
self._ctx.stop = self._input.LT(-1)
self.state = 69
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,5,self._ctx)
while _alt!=2 and _alt!=ATN.INVALID_ALT_NUMBER:
if _alt==1:
if self._parseListeners is not None:
self.triggerExitRuleEvent()
_prevctx = localctx
self.state = 67
self._errHandler.sync(self)
la_ = self._interp.adaptivePredict(self._input,4,self._ctx)
if la_ == 1:
localctx = ExprParser.MulDivContext(self, ExprParser.ExprContext(self, _parentctx, _parentState))
self.pushNewRecursionContext(localctx, _startState, self.RULE_expr)
self.state = 61
if not self.precpred(self._ctx, 3):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 3)")
self.state = 62
_la = self._input.LA(1)
if not(_la==ExprParser.MUL or _la==ExprParser.DIV):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 63
self.expr(4)
pass
elif la_ == 2:
localctx = ExprParser.AddSubContext(self, ExprParser.ExprContext(self, _parentctx, _parentState))
self.pushNewRecursionContext(localctx, _startState, self.RULE_expr)
self.state = 64
if not self.precpred(self._ctx, 2):
from antlr4.error.Errors import FailedPredicateException
raise FailedPredicateException(self, "self.precpred(self._ctx, 2)")
self.state = 65
_la = self._input.LA(1)
if not(_la==ExprParser.ADD or _la==ExprParser.SUB):
self._errHandler.recoverInline(self)
else:
self._errHandler.reportMatch(self)
self.consume()
self.state = 66
self.expr(3)
pass
self.state = 71
self._errHandler.sync(self)
_alt = self._interp.adaptivePredict(self._input,5,self._ctx)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.unrollRecursionContexts(_parentctx)
return localctx
class PrimaryContext(ParserRuleContext):
def __init__(self, parser, parent:ParserRuleContext=None, invokingState:int=-1):
super().__init__(parent, invokingState)
self.parser = parser
def getRuleIndex(self):
return ExprParser.RULE_primary
def copyFrom(self, ctx:ParserRuleContext):
super().copyFrom(ctx)
class ParensContext(PrimaryContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a ExprParser.PrimaryContext
super().__init__(parser)
self.copyFrom(ctx)
def expr(self):
return self.getTypedRuleContext(ExprParser.ExprContext,0)
class IdContext(PrimaryContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a ExprParser.PrimaryContext
super().__init__(parser)
self.copyFrom(ctx)
def ID(self):
return self.getToken(ExprParser.ID, 0)
class IntContext(PrimaryContext):
def __init__(self, parser, ctx:ParserRuleContext): # actually a ExprParser.PrimaryContext
super().__init__(parser)
self.copyFrom(ctx)
def INT(self):
return self.getToken(ExprParser.INT, 0)
def primary(self):
localctx = ExprParser.PrimaryContext(self, self._ctx, self.state)
self.enterRule(localctx, 12, self.RULE_primary)
try:
self.state = 78
self._errHandler.sync(self)
token = self._input.LA(1)
if token in [ExprParser.INT]:
localctx = ExprParser.IntContext(self, localctx)
self.enterOuterAlt(localctx, 1)
self.state = 72
self.match(ExprParser.INT)
pass
elif token in [ExprParser.ID]:
localctx = ExprParser.IdContext(self, localctx)
self.enterOuterAlt(localctx, 2)
self.state = 73
self.match(ExprParser.ID)
pass
elif token in [ExprParser.T__1]:
localctx = ExprParser.ParensContext(self, localctx)
self.enterOuterAlt(localctx, 3)
self.state = 74
self.match(ExprParser.T__1)
self.state = 75
self.expr(0)
self.state = 76
self.match(ExprParser.T__3)
pass
else:
raise NoViableAltException(self)
except RecognitionException as re:
localctx.exception = re
self._errHandler.reportError(self, re)
self._errHandler.recover(self, re)
finally:
self.exitRule()
return localctx
def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int):
if self._predicates == None:
self._predicates = dict()
self._predicates[5] = self.expr_sempred
pred = self._predicates.get(ruleIndex, None)
if pred is None:
raise Exception("No predicate with index:" + str(ruleIndex))
else:
return pred(localctx, predIndex)
def expr_sempred(self, localctx:ExprContext, predIndex:int):
if predIndex == 0:
return self.precpred(self._ctx, 3)
if predIndex == 1:
return self.precpred(self._ctx, 2)

View File

@ -3,5 +3,6 @@ import os
src_path = os.path.join(os.path.dirname(os.path.dirname(os.path.abspath(__file__))), 'src')
sys.path.insert(0,src_path)
from TestTokenStreamRewriter import TestTokenStreamRewriter
from xpathtest import XPathTest
import unittest
unittest.main()

View File

@ -0,0 +1,89 @@
import antlr4
from antlr4 import InputStream, CommonTokenStream, TerminalNode
from antlr4.xpath.XPath import XPath
import unittest
from expr.ExprParser import ExprParser
from expr.ExprLexer import ExprLexer
def tokenToString(token, ruleNames):
if isinstance(token, TerminalNode):
return str(token)
else:
return ruleNames[token.getRuleIndex()]
class XPathTest(unittest.TestCase):
def setUp(self):
self.input_stream = InputStream(
"def f(x,y) { x = 3+4; y; ; }\n"
"def g(x) { return 1+2*x; }\n"
)
# Create the Token Stream
self.lexer = ExprLexer(self.input_stream)
self.stream = CommonTokenStream(self.lexer)
self.stream.fill()
# Create the parser and expression parse tree
self.parser = ExprParser(self.stream)
self.tree = self.parser.prog()
def testValidPaths(self):
valid_paths = [
"/prog/func", # all funcs under prog at root
"/prog/*", # all children of prog at root
"/*/func", # all func kids of any root node
"prog", # prog must be root node
"/prog", # prog must be root node
"/*", # any root
"*", # any root
"//ID", # any ID in tree
"//expr/primary/ID", # any ID child of a primary under any expr
"//body//ID", # any ID under a body
"//'return'", # any 'return' literal in tree, matched by literal name
"//RETURN", # any 'return' literal in tree, matched by symbolic name
"//primary/*", # all kids of any primary
"//func/*/stat", # all stat nodes grandkids of any func node
"/prog/func/'def'", # all def literal kids of func kid of prog
"//stat/';'", # all ';' under any stat node
"//expr/primary/!ID",# anything but ID under primary under any expr node
"//expr/!primary", # anything but primary under any expr node
"//!*", # nothing anywhere
"/!*", # nothing at root
"//expr//ID" # any ID under any expression (tests antlr/antlr4#370)
]
expected_results = [
"[func, func]",
"[func, func]",
"[func, func]",
"[prog]",
"[prog]",
"[prog]",
"[prog]",
"[f, x, y, x, y, g, x, x]",
"[y, x]",
"[x, y, x]",
"[return]",
"[return]",
"[3, 4, y, 1, 2, x]",
"[stat, stat, stat, stat]",
"[def, def]",
"[;, ;, ;, ;]",
"[3, 4, 1, 2]",
"[expr, expr, expr, expr, expr, expr]",
"[]",
"[]",
"[y, x]",
]
for path, expected in zip(valid_paths, expected_results):
# Build test string
res = XPath.findAll(self.tree, path, self.parser)
res_str = ", ".join([tokenToString(token, self.parser.ruleNames) for token in res])
res_str = "[%s]" % res_str
# Test against expected output
self.assertEqual(res_str, expected, "Failed test %s" % path)
if __name__ == '__main__':
unittest.main()

File diff suppressed because it is too large Load Diff

View File

@ -51,8 +51,11 @@ ParserFile(file, parser, namedActions, contextSuperClass) ::= <<
# encoding: utf-8
from antlr4 import *
from io import StringIO
from typing.io import TextIO
import sys
if sys.version_info[1] > 5:
from typing import TextIO
else:
from typing.io import TextIO
<namedActions.header>
<parser>

View File

@ -17,6 +17,7 @@ import java.util.ArrayList;
/** A StructDecl to handle a -&gt; label on alt */
public class AltLabelStructDecl extends StructDecl {
public int altNum;
public String parentRule;
public AltLabelStructDecl(OutputModelFactory factory, Rule r,
int altNum, String label)
{
@ -24,6 +25,7 @@ public class AltLabelStructDecl extends StructDecl {
this.altNum = altNum;
this.name = // override name set in super to the label ctx
factory.getGenerator().getTarget().getAltLabelContextStructName(label);
this.parentRule = r.name;
derivedFromName = label;
}

View File

@ -0,0 +1,105 @@
/*
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
* Use of this file is governed by the BSD 3-clause license that
* can be found in the LICENSE.txt file in the project root.
*/
package org.antlr.v4.codegen.target;
import org.antlr.v4.codegen.CodeGenerator;
import org.antlr.v4.codegen.Target;
import org.antlr.v4.codegen.UnicodeEscapes;
import org.antlr.v4.tool.ast.GrammarAST;
import org.stringtemplate.v4.STGroup;
import org.stringtemplate.v4.StringRenderer;
import java.util.Arrays;
import java.util.HashSet;
import java.util.Set;
public class PHPTarget extends Target {
private static final String[] phpKeywords = {
"abstract", "and", "array", "as",
"break",
"callable", "case", "catch", "class", "clone", "const", "continue",
"declare", "default", "die", "do",
"echo", "else", "elseif", "empty", "enddeclare", "endfor", "endforeach",
"endif", "endswitch", "endwhile", "eval", "exit", "extends",
"final", "finally", "for", "foreach", "function",
"global", "goto",
"if", "implements", "include", "include_once", "instanceof", "insteadof", "interface", "isset",
"list",
"namespace", "new",
"or",
"print", "private", "protected", "public",
"require", "require_once", "return",
"static", "switch",
"throw", "trait", "try",
"unset", "use",
"var",
"while",
"xor",
"yield",
"__halt_compiler", "__CLASS__", "__DIR__", "__FILE__", "__FUNCTION__",
"__LINE__", "__METHOD__", "__NAMESPACE__", "__TRAIT__"
};
private final Set<String> badWords = new HashSet<String>();
public PHPTarget(CodeGenerator gen) {
super(gen, "PHP");
targetCharValueEscape['$'] = "\\$";
}
@Override
public String getVersion() {
return "4.7.2";
}
@Override
public String encodeIntAsCharEscape(int v) {
if (v < Character.MIN_VALUE || v > Character.MAX_VALUE) {
throw new IllegalArgumentException(String.format("Cannot encode the specified value: %d", v));
}
return String.format("\\u{%X}", v & 0xFFFF);
}
public Set<String> getBadWords() {
if (badWords.isEmpty()) {
addBadWords();
}
return badWords;
}
protected void addBadWords() {
badWords.addAll(Arrays.asList(phpKeywords));
badWords.add("rule");
badWords.add("parserRule");
}
@Override
protected boolean visibleGrammarSymbolCausesIssueInGeneratedCode(GrammarAST idNode) {
return getBadWords().contains(idNode.getText());
}
@Override
protected STGroup loadTemplates() {
STGroup result = super.loadTemplates();
result.registerRenderer(String.class, new StringRenderer(), true);
return result;
}
@Override
public boolean supportsOverloadedMethods() {
return false;
}
@Override
protected void appendUnicodeEscapedCodePoint(int codePoint, StringBuilder sb) {
UnicodeEscapes.appendPythonStyleEscapedCodePoint(codePoint, sb);
}
}