summaryrefslogtreecommitdiff
path: root/tool/resources
diff options
context:
space:
mode:
Diffstat (limited to 'tool/resources')
-rw-r--r--tool/resources/org/antlr/v4/tool/templates/codegen/CSharp/CSharp.stg43
-rw-r--r--tool/resources/org/antlr/v4/tool/templates/codegen/Cpp/Cpp.stg22
-rw-r--r--tool/resources/org/antlr/v4/tool/templates/codegen/Go/Go.stg114
-rw-r--r--tool/resources/org/antlr/v4/tool/templates/codegen/Java/Java.stg18
-rw-r--r--tool/resources/org/antlr/v4/tool/templates/codegen/JavaScript/JavaScript.stg29
-rw-r--r--tool/resources/org/antlr/v4/tool/templates/codegen/Python2/Python2.stg46
-rw-r--r--tool/resources/org/antlr/v4/tool/templates/codegen/Python3/Python3.stg48
-rwxr-xr-xtool/resources/org/antlr/v4/tool/templates/codegen/Swift/Swift.stg320
-rw-r--r--tool/resources/org/antlr/v4/tool/templates/messages/formats/gnu.stg4
-rw-r--r--tool/resources/org/antlr/v4/tool/templates/unicodedata.st62
10 files changed, 445 insertions, 261 deletions
diff --git a/tool/resources/org/antlr/v4/tool/templates/codegen/CSharp/CSharp.stg b/tool/resources/org/antlr/v4/tool/templates/codegen/CSharp/CSharp.stg
index 07b9f8b..a665ba6 100644
--- a/tool/resources/org/antlr/v4/tool/templates/codegen/CSharp/CSharp.stg
+++ b/tool/resources/org/antlr/v4/tool/templates/codegen/CSharp/CSharp.stg
@@ -37,6 +37,7 @@ namespace <file.genPackage> {
<endif>
<namedActions.header>
using System;
+using System.IO;
using System.Text;
using System.Diagnostics;
using System.Collections.Generic;
@@ -287,7 +288,7 @@ public partial class <csIdentifier.(parser.name)> : <superClass; null="Parser">
public override string[] RuleNames { get { return ruleNames; } }
- public override string SerializedAtn { get { return _serializedATN; } }
+ public override string SerializedAtn { get { return new string(_serializedATN); } }
static <csIdentifier.(parser.name)>() {
decisionToDFA = new DFA[_ATN.NumberOfDecisions];
@@ -357,8 +358,10 @@ case <f.ruleIndex> : return <f.name>_sempred(<if(!recog.modes)>(<f.ctxType>)<end
>>
parser_ctor(parser) ::= <<
-public <csIdentifier.(parser.name)>(ITokenStream input)
- : base(input)
+ public <csIdentifier.(parser.name)>(ITokenStream input) : this(input, Console.Out, Console.Error) { }
+
+ public <csIdentifier.(parser.name)>(ITokenStream input, TextWriter output, TextWriter errorOutput)
+ : base(input, output, errorOutput)
{
Interpreter = new ParserATNSimulator(this, _ATN, decisionToDFA, sharedContextCache);
}
@@ -566,7 +569,7 @@ switch (TokenStream.LA(1)) {
<alt>
break;}; separator="\n">
default:
- <error>
+ break;
}
>>
@@ -958,6 +961,7 @@ namespace <file.genPackage> {
<endif>
<namedActions.header>
using System;
+using System.IO;
using System.Text;
using Antlr4.Runtime;
using Antlr4.Runtime.Atn;
@@ -982,7 +986,14 @@ public partial class <csIdentifier.(lexer.name)> : <superClass; null="Lexer"> {
public const int
<lexer.channels:{k | <csIdentifier.(k)>=<lexer.channels.(k)>}; separator=", ", wrap, anchor>;
<endif>
- <rest(lexer.modes):{m| public const int <modeName.(m)> = <i>;}; separator="\n">
+ <if(rest(lexer.modes))>
+ public const int
+ <rest(lexer.modes):{m | <m>=<i>}; separator=", ", wrap, anchor>;
+ <endif>
+ public static string[] channelNames = {
+ "DEFAULT_TOKEN_CHANNEL", "HIDDEN"<if (lexer.channels)>, <lexer.channels:{c| "<c>"}; separator=", ", wrap, anchor><endif>
+ };
+
public static string[] modeNames = {
<lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor>
};
@@ -994,7 +1005,10 @@ public partial class <csIdentifier.(lexer.name)> : <superClass; null="Lexer"> {
<namedActions.members>
public <csIdentifier.(lexer.name)>(ICharStream input)
- : base(input)
+ : this(input, Console.Out, Console.Error) { }
+
+ public <csIdentifier.(lexer.name)>(ICharStream input, TextWriter output, TextWriter errorOutput)
+ : base(input, output, errorOutput)
{
Interpreter = new LexerATNSimulator(this, _ATN, decisionToDFA, sharedContextCache);
}
@@ -1005,9 +1019,11 @@ public partial class <csIdentifier.(lexer.name)> : <superClass; null="Lexer"> {
public override string[] RuleNames { get { return ruleNames; } }
+ public override string[] ChannelNames { get { return channelNames; } }
+
public override string[] ModeNames { get { return modeNames; } }
- public override string SerializedAtn { get { return _serializedATN; } }
+ public override string SerializedAtn { get { return new string(_serializedATN); } }
static <csIdentifier.(lexer.name)>() {
decisionToDFA = new DFA[_ATN.NumberOfDecisions];
@@ -1022,16 +1038,12 @@ public partial class <csIdentifier.(lexer.name)> : <superClass; null="Lexer"> {
SerializedATN(model) ::= <<
-private static string _serializedATN = _serializeATN();
-private static string _serializeATN()
-{
- StringBuilder sb = new StringBuilder();
- sb.Append("<model.serialized; wrap={");<\n><\t>sb.Append("}>");
- return sb.ToString();
-}
+private static char[] _serializedATN = {
+ <model.serialized; separator=", ", wrap>,
+};
public static readonly ATN _ATN =
- new ATNDeserializer().Deserialize(_serializedATN.ToCharArray());
+ new ATNDeserializer().Deserialize(_serializedATN);
>>
@@ -1133,6 +1145,7 @@ csIdentifier ::= [
"ushort" : "@ushort",
"using" : "@using",
"virtual" : "@virtual",
+ "values" : "@values",
"void" : "@void",
"volatile" : "@volatile",
"while" : "@while",
diff --git a/tool/resources/org/antlr/v4/tool/templates/codegen/Cpp/Cpp.stg b/tool/resources/org/antlr/v4/tool/templates/codegen/Cpp/Cpp.stg
index 78f9610..6342b66 100644
--- a/tool/resources/org/antlr/v4/tool/templates/codegen/Cpp/Cpp.stg
+++ b/tool/resources/org/antlr/v4/tool/templates/codegen/Cpp/Cpp.stg
@@ -59,7 +59,7 @@ public:
<if (rest(lexer.modes))>
enum {
- <rest(lexer.modes): {m | <m> = <i>,}; separator="\n", anchor>
+ <rest(lexer.modes): {m | <m> = <i>}; separator=", ", wrap, anchor>
};
<endif>
@@ -70,6 +70,7 @@ public:
virtual std::string getGrammarFileName() const override;
virtual const std::vector\<std::string>& getRuleNames() const override;
+ virtual const std::vector\<std::string>& getChannelNames() const override;
virtual const std::vector\<std::string>& getModeNames() const override;
virtual const std::vector\<std::string>& getTokenNames() const override; // deprecated, use vocabulary instead
virtual antlr4::dfa::Vocabulary& getVocabulary() const override;
@@ -89,6 +90,7 @@ private:
static antlr4::atn::PredictionContextCache _sharedContextCache;
static std::vector\<std::string> _ruleNames;
static std::vector\<std::string> _tokenNames;
+ static std::vector\<std::string> _channelNames;
static std::vector\<std::string> _modeNames;
static std::vector\<std::string> _literalNames;
@@ -128,6 +130,10 @@ const std::vector\<std::string>& <lexer.name>::getRuleNames() const {
return _ruleNames;
}
+const std::vector\<std::string>& <lexer.name>::getChannelNames() const {
+ return _channelNames;
+}
+
const std::vector\<std::string>& <lexer.name>::getModeNames() const {
return _modeNames;
}
@@ -186,19 +192,23 @@ atn::ATN <lexer.name>::_atn;
std::vector\<uint16_t> <lexer.name>::_serializedATN;
std::vector\<std::string> <lexer.name>::_ruleNames = {
- <lexer.ruleNames: {r | "<r>"}; separator = ", ", wrap, anchor>
+ <lexer.ruleNames: {r | u8"<r>"}; separator = ", ", wrap, anchor>
+};
+
+std::vector\<std::string> <lexer.name>::_channelNames = {
+ "DEFAULT_TOKEN_CHANNEL", "HIDDEN"<if (lexer.channels)>, <lexer.channels: {c | u8"<c>"}; separator = ", ", wrap, anchor><endif>
};
std::vector\<std::string> <lexer.name>::_modeNames = {
- <lexer.modes: {m | "<m>"}; separator = ", ", wrap, anchor>
+ <lexer.modes: {m | u8"<m>"}; separator = ", ", wrap, anchor>
};
std::vector\<std::string> <lexer.name>::_literalNames = {
- <lexer.literalNames: {t | <t>}; null = "\"\"", separator = ", ", wrap, anchor>
+ <lexer.literalNames: {t | u8<t>}; null = "\"\"", separator = ", ", wrap, anchor>
};
std::vector\<std::string> <lexer.name>::_symbolicNames = {
- <lexer.symbolicNames: {t | <t>}; null = "\"\"", separator = ", ", wrap, anchor>
+ <lexer.symbolicNames: {t | u8<t>}; null = "\"\"", separator = ", ", wrap, anchor>
};
dfa::Vocabulary <lexer.name>::_vocabulary(_literalNames, _symbolicNames);
@@ -644,7 +654,7 @@ switch (_input->LA(1)) {
\}
}; separator="\n">
default:
- <error>
+ break;
}
>>
diff --git a/tool/resources/org/antlr/v4/tool/templates/codegen/Go/Go.stg b/tool/resources/org/antlr/v4/tool/templates/codegen/Go/Go.stg
index 7559f01..81d0048 100644
--- a/tool/resources/org/antlr/v4/tool/templates/codegen/Go/Go.stg
+++ b/tool/resources/org/antlr/v4/tool/templates/codegen/Go/Go.stg
@@ -1,5 +1,5 @@
fileHeader(grammarFileName, ANTLRVersion) ::= <<
-// Generated from <grammarFileName; format="java-escape"> by ANTLR <ANTLRVersion>.
+// Code generated from <grammarFileName; format="java-escape"> by ANTLR <ANTLRVersion>. DO NOT EDIT.
>>
ParserFile(file, parser, namedActions, contextSuperClass) ::= <<
@@ -145,19 +145,13 @@ func (v *Base<file.grammarName>Visitor) Visit<lname; format="cap">(ctx *<lname;
>>
Parser(parser, funcs, atn, sempredFuncs, superClass) ::= <<
-<if(superClass)>
-import "./<superClass>"
-
-<endif>
<if(atn)>
var parserATN = <atn>
<else>
var parserATN []uint16
<endif>
-
var deserializer = antlr.NewATNDeserializer(nil)
-
var deserializedATN = deserializer.DeserializeFromUInt16(parserATN)
<if(parser.literalNames)>
@@ -168,7 +162,6 @@ var literalNames = []string{
var literalNames []string
<endif>
-
<if(parser.symbolicNames)>
var symbolicNames = []string{
<parser.symbolicNames; null="\"\"", separator=", ", wrap>,
@@ -186,24 +179,24 @@ var ruleNames = []string{
var ruleNames []string
<endif>
+var decisionToDFA = make([]*antlr.DFA, len(deserializedATN.DecisionToState))
+
+func init() {
+ for index, ds := range deserializedATN.DecisionToState {
+ decisionToDFA[index] = antlr.NewDFA(ds, index)
+ }
+}
type <parser.name> struct {
<superClass; null="*antlr.BaseParser">
}
func New<parser.name>(input antlr.TokenStream) *<parser.name> {
- var decisionToDFA = make([]*antlr.DFA, len(deserializedATN.DecisionToState))
- var sharedContextCache = antlr.NewPredictionContextCache()
-
- for index, ds := range deserializedATN.DecisionToState {
- decisionToDFA[index] = antlr.NewDFA(ds, index)
- }
-
this := new(<parser.name>)
this.BaseParser = antlr.NewBaseParser(input)
- this.Interpreter = antlr.NewParserATNSimulator(this, deserializedATN, decisionToDFA, sharedContextCache)
+ this.Interpreter = antlr.NewParserATNSimulator(this, deserializedATN, decisionToDFA, antlr.NewPredictionContextCache())
this.RuleNames = ruleNames
this.LiteralNames = literalNames
this.SymbolicNames = symbolicNames
@@ -563,9 +556,6 @@ switch p.GetTokenStream().LA(1) {
<endif>
default:
- <if(error)>
- <error>
- <endif>
}
>>
@@ -787,29 +777,31 @@ MatchSet(m, expr, capture) ::= "<CommonSetStuff(m, expr, capture, false)>"
MatchNotSet(m, expr, capture) ::= "<CommonSetStuff(m, expr, capture, true)>"
CommonSetStuff(m, expr, capture, invert) ::= <<
-p.SetState(<m.stateNumber>)
-<if(m.labels)>
-
-var _lt = p.GetTokenStream().LT(1)<! TODO: Should LT be called always like InvokeRule and MatchToken? !>
+{
+ p.SetState(<m.stateNumber>)
+ <if(m.labels)>
-<m.labels:{l | <labelref(l)> = _lt}; separator="\n">
+ var _lt = p.GetTokenStream().LT(1)<! TODO: Should LT be called always like InvokeRule and MatchToken? !>
-<endif>
-<if(capture)>
-<capture>
+ <m.labels:{l | <labelref(l)> = _lt}; separator="\n">
-<endif>
-<if(invert)>if <m.varName> \<= 0 || <expr> <else>if !(<expr>)<endif> {
- <if(m.labels)>
- var _ri = p.GetErrorHandler().RecoverInline(p)
+ <endif>
+ <if(capture)>
+ <capture>
- <m.labels:{l | <labelref(l)> = _ri}; separator="\n">
- <else>
- p.GetErrorHandler().RecoverInline(p)
<endif>
-} else {
- p.GetErrorHandler().ReportMatch(p)
- p.Consume()
+ <if(invert)>if <m.varName> \<= 0 || <expr> <else>if !(<expr>)<endif> {
+ <if(m.labels)>
+ var _ri = p.GetErrorHandler().RecoverInline(p)
+
+ <m.labels:{l | <labelref(l)> = _ri}; separator="\n">
+ <else>
+ p.GetErrorHandler().RecoverInline(p)
+ <endif>
+ } else {
+ p.GetErrorHandler().ReportMatch(p)
+ p.Consume()
+ }
}
>>
@@ -905,7 +897,7 @@ TokenListDecl(t) ::= "<t.name> []antlr.Token"
RuleContextDecl(r) ::= "<r.name> I<r.ctxName> "
RuleContextListDecl(rdecl) ::= "<rdecl.name> []I<rdecl.ctxName>"
-AttributeDecl(d) ::= "<d.name> <d.type;format={lower}><if(d.initValue)>// TODO = <d.initValue><endif>"
+AttributeDecl(d) ::= "<d.name> <d.type><if(d.initValue)>// TODO = <d.initValue><endif>"
ContextTokenGetterDecl(t) ::= <<
<t.name; format="cap">() antlr.TerminalNode {
@@ -1049,13 +1041,13 @@ Set<a.name; format="cap">([]I<a.ctxName>) }; separator="\n\n">
<if(struct.attributeDecls)>
<struct.attributeDecls:{a | // Get<a.name; format="cap"> returns the <a.name> attribute.
-Get<a.name; format="cap">() <a.type;format="lower">}; separator="\n\n">
+Get<a.name; format="cap">() <a.type>}; separator="\n\n">
<endif>
<if(struct.attributeDecls)>
<struct.attributeDecls:{a | // Set<a.name; format="cap"> sets the <a.name> attribute.
-Set<a.name; format="cap">(<a.type;format="lower">)}; separator="\n\n">
+Set<a.name; format="cap">(<a.type>)}; separator="\n\n">
<endif>
@@ -1080,7 +1072,7 @@ func NewEmpty<struct.name>() *<struct.name> {
func (*<struct.name>) Is<struct.name>() {}
-func New<struct.name>(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int<struct.ctorAttrs:{a | , <a.name> <a.type;format="lower">}>) *<struct.name> {
+func New<struct.name>(parser antlr.Parser, parent antlr.ParserRuleContext, invokingState int<struct.ctorAttrs:{a | , <a.name> <a.type>}>) *<struct.name> {
var p = new(<struct.name>)
p.<if(contextSuperClass)><contextSuperClass><else>BaseParserRuleContext<endif> = <if(contextSuperClass)>New<contextSuperClass><else>antlr.NewBaseParserRuleContext<endif>(parent, invokingState)
@@ -1148,12 +1140,12 @@ func (s *<struct.name>) GetParser() antlr.Parser { return s.parser }
<if(struct.attributeDecls)>
-<struct.attributeDecls:{a | func (s *<struct.name>) Get<a.name; format="cap">() <a.type;format="lower"> { return s.<a.name> \}}; separator="\n\n">
+<struct.attributeDecls:{a | func (s *<struct.name>) Get<a.name; format="cap">() <a.type> { return s.<a.name> \}}; separator="\n\n">
<endif>
<if(struct.attributeDecls)>
-<struct.attributeDecls:{a | func (s *<struct.name>) Set<a.name; format="cap">(v <a.type;format="lower">) { s.<a.name> = v \}}; separator="\n\n">
+<struct.attributeDecls:{a | func (s *<struct.name>) Set<a.name; format="cap">(v <a.type>) { s.<a.name> = v \}}; separator="\n\n">
<endif>
<if(getters)>
@@ -1258,12 +1250,12 @@ func New<struct.name>(parser antlr.Parser, ctx antlr.ParserRuleContext) *<struct
<if(struct.attributeDecls)>
-<struct.attributeDecls:{a | func (s *<struct.name>) Get<a.name; format="cap">() <a.type;format="lower"> { return s.<a.name> \}}; separator="\n\n">
+<struct.attributeDecls:{a | func (s *<struct.name>) Get<a.name; format="cap">() <a.type> { return s.<a.name> \}}; separator="\n\n">
<endif>
<if(struct.attributeDecls)>
-<struct.attributeDecls:{a | func (s *<struct.name>) Set<a.name; format="cap">(v <a.type;format="lower">) { s.<a.name> = v \}}; separator="\n\n">
+<struct.attributeDecls:{a | func (s *<struct.name>) Set<a.name; format="cap">(v <a.type>) { s.<a.name> = v \}}; separator="\n\n">
<endif>
func (s *<struct.name>) GetRuleContext() antlr.RuleContext {
@@ -1386,14 +1378,13 @@ var serializedLexerAtn []uint16
var lexerDeserializer = antlr.NewATNDeserializer(nil)
var lexerAtn = lexerDeserializer.DeserializeFromUInt16(serializedLexerAtn)
-<if(lexer.modes)>
+var lexerChannelNames = []string{
+ "DEFAULT_TOKEN_CHANNEL", "HIDDEN"<if (lexer.channels)>, <lexer.channels:{c | "<c>"}; separator=", ", wrap><endif>,
+}
+
var lexerModeNames = []string{
<lexer.modes:{m | "<m>"}; separator=", ", wrap>,
}
-<else>
-var lexerModeNames []string
-<endif>
-
<if(lexer.literalNames)>
var lexerLiteralNames = []string{
@@ -1424,22 +1415,27 @@ var lexerRuleNames []string
type <lexer.name> struct {
*<if(superClass)><superClass><else>antlr.BaseLexer<endif>
+ channelNames []string
modeNames []string
// TODO: EOF string
}
-func New<lexer.name>(input antlr.CharStream) *<lexer.name> {
- var lexerDecisionToDFA = make([]*antlr.DFA, len(lexerAtn.DecisionToState))
+var lexerDecisionToDFA = make([]*antlr.DFA, len(lexerAtn.DecisionToState))
+func init() {
for index, ds := range lexerAtn.DecisionToState {
lexerDecisionToDFA[index] = antlr.NewDFA(ds, index)
}
+}
+
+func New<lexer.name>(input antlr.CharStream) *<lexer.name> {
l := new(<lexer.name>)
l.BaseLexer = antlr.NewBaseLexer(input)
l.Interpreter = antlr.NewLexerATNSimulator(l, lexerAtn, lexerDecisionToDFA, antlr.NewPredictionContextCache())
+ l.channelNames = lexerChannelNames
l.modeNames = lexerModeNames
l.RuleNames = lexerRuleNames
l.LiteralNames = lexerLiteralNames
@@ -1461,9 +1457,21 @@ const (
const <lexer.name><first(lexer.tokens)> = <lexer.tokens.(first(lexer.tokens))>
<endif>
+<if(rest(lexer.channels))>
+
+// <lexer.name> channels.
+const (
+ <lexer.channels:{c | <lexer.name><c> = <lexer.channels.(c)>}; separator="\n">
+)
+<elseif(lexer.channels)>
+
+// <lexer.name><first(lexer.channels)> is the <lexer.name> channel.
+const <lexer.name><first(lexer.channels)> = <lexer.channels.(first(lexer.channels))>
+<endif>
+
<if(rest(rest(lexer.modes)))>
-// <lexer.name> modes
+// <lexer.name> modes.
const (
<first(rest(lexer.modes)):{m | <lexer.name><m> = iota + 1}>
<rest(rest(lexer.modes)):{m | <lexer.name><m>}; separator="\n">
diff --git a/tool/resources/org/antlr/v4/tool/templates/codegen/Java/Java.stg b/tool/resources/org/antlr/v4/tool/templates/codegen/Java/Java.stg
index a1b18fc..492c566 100644
--- a/tool/resources/org/antlr/v4/tool/templates/codegen/Java/Java.stg
+++ b/tool/resources/org/antlr/v4/tool/templates/codegen/Java/Java.stg
@@ -489,7 +489,7 @@ switch (_input.LA(1)) {
<alt>
break;}; separator="\n">
default:
- <error>
+ break;
}
>>
@@ -894,13 +894,22 @@ public class <lexer.name> extends <superClass; null="Lexer"> {
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
+ <if(lexer.tokens)>
public static final int
<lexer.tokens:{k | <k>=<lexer.tokens.(k)>}; separator=", ", wrap, anchor>;
+ <endif>
<if(lexer.channels)>
public static final int
- <lexer.channels:{k | <k>=<lexer.channels.(k)>}; separator=", ", wrap, anchor>;
+ <lexer.channels:{c | <c>=<lexer.channels.(c)>}; separator=", ", wrap, anchor>;
+ <endif>
+ <if(rest(lexer.modes))>
+ public static final int
+ <rest(lexer.modes):{m | <m>=<i>}; separator=", ", wrap, anchor>;
<endif>
- <rest(lexer.modes):{m| public static final int <m> = <i>;}; separator="\n">
+ public static String[] channelNames = {
+ "DEFAULT_TOKEN_CHANNEL", "HIDDEN"<if (lexer.channels)>, <lexer.channels:{c| "<c>"}; separator=", ", wrap, anchor><endif>
+ };
+
public static String[] modeNames = {
<lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor>
};
@@ -928,6 +937,9 @@ public class <lexer.name> extends <superClass; null="Lexer"> {
public String getSerializedATN() { return _serializedATN; }
@Override
+ public String[] getChannelNames() { return channelNames; }
+
+ @Override
public String[] getModeNames() { return modeNames; }
@Override
diff --git a/tool/resources/org/antlr/v4/tool/templates/codegen/JavaScript/JavaScript.stg b/tool/resources/org/antlr/v4/tool/templates/codegen/JavaScript/JavaScript.stg
index 2e10728..38c9ad8 100644
--- a/tool/resources/org/antlr/v4/tool/templates/codegen/JavaScript/JavaScript.stg
+++ b/tool/resources/org/antlr/v4/tool/templates/codegen/JavaScript/JavaScript.stg
@@ -36,12 +36,12 @@
* REQUIRED.
*/
-pythonTypeInitMap ::= [
- "bool":"False",
+javascriptTypeInitMap ::= [
+ "bool":"false",
"int":"0",
"float":"0.0",
"str":"",
- default:"None" // anything other than a primitive type is an object
+ default:"{}" // anything other than a primitive type is an object
]
// args must be <object-model-object>, <fields-resulting-in-STs>
@@ -360,7 +360,7 @@ switch (this._input.LA(1)) {
<alt>
break;}; separator="\n">
default:
- <error>
+ break;
}
>>
@@ -530,7 +530,7 @@ else {
Wildcard(w) ::= <<
this.state = <w.stateNumber>;
-<if(w.labels)><w.labels:{l | <labelref(l)> = }><endif>matchWildcard();
+<if(w.labels)><w.labels:{l | <labelref(l)> = }><endif>this.matchWildcard();
>>
// ACTION STUFF
@@ -802,6 +802,9 @@ var antlr4 = require('antlr4/index');
>>
Lexer(lexer, atn, actionFuncs, sempredFuncs, superClass) ::= <<
+<if(superClass)>
+var <superClass> = require('./<superClass>').<superClass>;
+<endif>
<atn>
@@ -818,11 +821,25 @@ function <lexer.name>(input) {
<lexer.name>.prototype = Object.create(<if(superClass)><superClass><else>antlr4.Lexer<endif>.prototype);
<lexer.name>.prototype.constructor = <lexer.name>;
+Object.defineProperty(<lexer.name>.prototype, "atn", {
+ get : function() {
+ return atn;
+ }
+});
+
<lexer.name>.EOF = antlr4.Token.EOF;
<lexer.tokens:{k | <lexer.name>.<k> = <lexer.tokens.(k)>;}; separator="\n", wrap, anchor>
+<if(lexer.channels)>
+<lexer.channels:{c| <lexer.name>.<c> = <lexer.channels.(c)>;}; separator="\n">
+
+<endif>
+<if(rest(lexer.modes))>
<rest(lexer.modes):{m| <lexer.name>.<m> = <i>;}; separator="\n">
+<endif>
+<lexer.name>.prototype.channelNames = [ "DEFAULT_TOKEN_CHANNEL", "HIDDEN"<if (lexer.channels)>, <lexer.channels:{c| "<c>"}; separator=", ", wrap, anchor><endif> ];
+
<lexer.name>.prototype.modeNames = [ <lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor> ];
<lexer.name>.prototype.literalNames = [ <lexer.literalNames:{t | <t>}; null="null", separator=", ", wrap, anchor> ];
@@ -852,7 +869,7 @@ var serializedATN = ["<model.serialized; wrap={",<\n> "}>"].join("");
* must be an object, default value is "null".
*/
initValue(typeName) ::= <<
-<javaTypeInitMap.(typeName)>
+<javacriptTypeInitMap.(typeName)>
>>
codeFileExtension() ::= ".js"
diff --git a/tool/resources/org/antlr/v4/tool/templates/codegen/Python2/Python2.stg b/tool/resources/org/antlr/v4/tool/templates/codegen/Python2/Python2.stg
index 287f366..570f165 100644
--- a/tool/resources/org/antlr/v4/tool/templates/codegen/Python2/Python2.stg
+++ b/tool/resources/org/antlr/v4/tool/templates/codegen/Python2/Python2.stg
@@ -52,6 +52,7 @@ ParserFile(file, parser, namedActions, contextSuperClass) ::= <<
from __future__ import print_function
from antlr4 import *
from io import StringIO
+import sys
<namedActions.header>
<parser>
@@ -166,16 +167,16 @@ class <parser.name> ( <if(superClass)><superClass><else>Parser<endif> ):
dumpActions(recog, argFuncs, actionFuncs, sempredFuncs) ::= <<
<if(actionFuncs)>
def action(self, localctx, ruleIndex, actionIndex):
- if self._actions is None:
- actions = dict()
+ if self._actions is None:
+ actions = dict()
<recog.actionFuncs.values:{f|
- actions[<f.ruleIndex>] = self.<f.name>_action }; separator="\n">
- self._actions = actions
- action = self._actions.get(ruleIndex, None)
- if action is not None:
- action(localctx, actionIndex)
- else:
- raise Exception("No registered action for:" + str(ruleIndex))
+ actions[<f.ruleIndex>] = self.<f.name>_action }; separator="\n">
+ self._actions = actions
+ action = self._actions.get(ruleIndex, None)
+ if action is not None:
+ action(localctx, actionIndex)
+ else:
+ raise Exception("No registered action for:" + str(ruleIndex))
<actionFuncs.values; separator="\n">
@@ -198,8 +199,8 @@ def sempred(self, localctx, ruleIndex, predIndex):
>>
parser_ctor(p) ::= <<
-def __init__(self, input):
- super(<parser.name>, self).__init__(input)
+def __init__(self, input, output=sys.stdout):
+ super(<parser.name>, self).__init__(input, output=output)
self.checkVersion("<file.ANTLRVersion>")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
@@ -334,7 +335,7 @@ token = self._input.LA(1)
<alt>
pass}; separator="\nel">
else:
- <error>
+ pass
>>
LL1OptionalBlockSingleAlt(choice, expr, alts, preamble, error, followExpr) ::= <<
@@ -517,7 +518,7 @@ if not <chunks>:
ExceptionClause(e, catchArg, catchAction) ::= <<
catch (<catchArg>) {
- <catchAction>
+ <catchAction>
}
>>
@@ -622,7 +623,7 @@ def <r.name>(self, i=None):
LexerRuleContext() ::= "RuleContext"
/** The rule context name is the rule followed by a suffix; e.g.,
- * r becomes rContext.
+ * r becomes rContext.
*/
RuleContextNameSuffix() ::= "Context"
@@ -740,6 +741,7 @@ LexerFile(lexerFile, lexer, namedActions) ::= <<
from __future__ import print_function
from antlr4 import *
from io import StringIO
+import sys
<namedActions.header>
@@ -756,10 +758,18 @@ class <lexer.name>(<if(superClass)><superClass><else>Lexer<endif>):
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
+<if(lexer.channels)>
+ <lexer.channels:{c| <c> = <lexer.channels.(c)>}; separator="\n">
+
+<endif>
+<if(rest(lexer.modes))>
<rest(lexer.modes):{m| <m> = <i>}; separator="\n">
+<endif>
<lexer.tokens:{k | <k> = <lexer.tokens.(k)>}; separator="\n", wrap, anchor>
+ channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN"<if (lexer.channels)>, <lexer.channels:{c| u"<c>"}; separator=", ", wrap, anchor><endif> ]
+
modeNames = [ <lexer.modes:{m| u"<m>"}; separator=", ", wrap, anchor> ]
literalNames = [ u"\<INVALID>",
@@ -772,8 +782,8 @@ class <lexer.name>(<if(superClass)><superClass><else>Lexer<endif>):
grammarFileName = u"<lexer.grammarFileName>"
- def __init__(self, input=None):
- super(<lexer.name>, self).__init__(input)
+ def __init__(self, input=None, output=sys.stdout):
+ super(<lexer.name>, self).__init__(input, output=output)
self.checkVersion("<lexerFile.ANTLRVersion>")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
@@ -796,10 +806,10 @@ def serializedATN():
>>
/** Using a type to init value map, try to init a type; if not in table
- * must be an object, default value is "null".
+ * must be an object, default value is "null".
*/
initValue(typeName) ::= <<
-<javaTypeInitMap.(typeName)>
+<pythonTypeInitMap.(typeName)>
>>
codeFileExtension() ::= ".py"
diff --git a/tool/resources/org/antlr/v4/tool/templates/codegen/Python3/Python3.stg b/tool/resources/org/antlr/v4/tool/templates/codegen/Python3/Python3.stg
index 643cec7..34e525b 100644
--- a/tool/resources/org/antlr/v4/tool/templates/codegen/Python3/Python3.stg
+++ b/tool/resources/org/antlr/v4/tool/templates/codegen/Python3/Python3.stg
@@ -51,6 +51,8 @@ ParserFile(file, parser, namedActions, contextSuperClass) ::= <<
# encoding: utf-8
from antlr4 import *
from io import StringIO
+from typing.io import TextIO
+import sys
<namedActions.header>
<parser>
@@ -174,16 +176,16 @@ class <parser.name> ( <if(superClass)><superClass><else>Parser<endif> ):
dumpActions(recog, argFuncs, actionFuncs, sempredFuncs) ::= <<
<if(actionFuncs)>
def action(self, localctx:RuleContext, ruleIndex:int, actionIndex:int):
- if self._actions is None:
- actions = dict()
+ if self._actions is None:
+ actions = dict()
<recog.actionFuncs.values:{f|
- actions[<f.ruleIndex>] = self.<f.name>_action }; separator="\n">
- self._actions = actions
- action = self._actions.get(ruleIndex, None)
- if action is not None:
- action(localctx, actionIndex)
- else:
- raise Exception("No registered action for:" + str(ruleIndex))
+ actions[<f.ruleIndex>] = self.<f.name>_action }; separator="\n">
+ self._actions = actions
+ action = self._actions.get(ruleIndex, None)
+ if action is not None:
+ action(localctx, actionIndex)
+ else:
+ raise Exception("No registered action for:" + str(ruleIndex))
<actionFuncs.values; separator="\n">
@@ -206,8 +208,8 @@ def sempred(self, localctx:RuleContext, ruleIndex:int, predIndex:int):
>>
parser_ctor(p) ::= <<
-def __init__(self, input:TokenStream):
- super().__init__(input)
+def __init__(self, input:TokenStream, output:TextIO = sys.stdout):
+ super().__init__(input, output)
self.checkVersion("<file.ANTLRVersion>")
self._interp = ParserATNSimulator(self, self.atn, self.decisionsToDFA, self.sharedContextCache)
self._predicates = None
@@ -342,7 +344,7 @@ token = self._input.LA(1)
<alt>
pass}; separator="\nel">
else:
- <error>
+ pass
>>
LL1OptionalBlockSingleAlt(choice, expr, alts, preamble, error, followExpr) ::= <<
@@ -525,7 +527,7 @@ if not <chunks>:
ExceptionClause(e, catchArg, catchAction) ::= <<
catch (<catchArg>) {
- <catchAction>
+ <catchAction>
}
>>
@@ -630,7 +632,7 @@ def <r.name>(self, i:int=None):
LexerRuleContext() ::= "RuleContext"
/** The rule context name is the rule followed by a suffix; e.g.,
- * r becomes rContext.
+ * r becomes rContext.
*/
RuleContextNameSuffix() ::= "Context"
@@ -745,6 +747,8 @@ LexerFile(lexerFile, lexer, namedActions) ::= <<
<fileHeader(lexerFile.grammarFileName, lexerFile.ANTLRVersion)>
from antlr4 import *
from io import StringIO
+from typing.io import TextIO
+import sys
<namedActions.header>
@@ -761,10 +765,18 @@ class <lexer.name>(<if(superClass)><superClass><else>Lexer<endif>):
decisionsToDFA = [ DFA(ds, i) for i, ds in enumerate(atn.decisionToState) ]
+<if(lexer.channels)>
+ <lexer.channels:{c| <c> = <lexer.channels.(c)>}; separator="\n">
+
+<endif>
+<if(rest(lexer.modes))>
<rest(lexer.modes):{m| <m> = <i>}; separator="\n">
+<endif>
<lexer.tokens:{k | <k> = <lexer.tokens.(k)>}; separator="\n", wrap, anchor>
+ channelNames = [ u"DEFAULT_TOKEN_CHANNEL", u"HIDDEN"<if (lexer.channels)>, <lexer.channels:{c| u"<c>"}; separator=", ", wrap, anchor><endif> ]
+
modeNames = [ <lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor> ]
literalNames = [ "\<INVALID>",
@@ -777,8 +789,8 @@ class <lexer.name>(<if(superClass)><superClass><else>Lexer<endif>):
grammarFileName = "<lexer.grammarFileName>"
- def __init__(self, input=None):
- super().__init__(input)
+ def __init__(self, input=None, output:TextIO = sys.stdout):
+ super().__init__(input, output)
self.checkVersion("<lexerFile.ANTLRVersion>")
self._interp = LexerATNSimulator(self, self.atn, self.decisionsToDFA, PredictionContextCache())
self._actions = None
@@ -801,10 +813,10 @@ def serializedATN():
>>
/** Using a type to init value map, try to init a type; if not in table
- * must be an object, default value is "null".
+ * must be an object, default value is "null".
*/
initValue(typeName) ::= <<
-<javaTypeInitMap.(typeName)>
+<pythonTypeInitMap.(typeName)>
>>
codeFileExtension() ::= ".py"
diff --git a/tool/resources/org/antlr/v4/tool/templates/codegen/Swift/Swift.stg b/tool/resources/org/antlr/v4/tool/templates/codegen/Swift/Swift.stg
index 3fcdf38..088cf0c 100755
--- a/tool/resources/org/antlr/v4/tool/templates/codegen/Swift/Swift.stg
+++ b/tool/resources/org/antlr/v4/tool/templates/codegen/Swift/Swift.stg
@@ -48,6 +48,9 @@ SwiftTypeMap ::= [
]
// args must be <object-model-object>, <fields-resulting-in-STs>
+accessLevelOpenOK(obj) ::= "<obj.accessLevel; null=\"open\">"
+accessLevelNotOpen(obj) ::= "<obj.accessLevel; null=\"public\">"
+
ParserFile(file, parser, namedActions,contextSuperClass) ::= <<
<fileHeader(file.grammarFileName, file.ANTLRVersion)>
<if(file.genPackage)>
@@ -71,7 +74,7 @@ import Antlr4
* This interface defines a complete listener for a parse tree produced by
* {@link <file.parserName>}.
*/
-public protocol <file.grammarName>Listener: ParseTreeListener {
+<accessLevelNotOpen(file)> protocol <file.grammarName>Listener: ParseTreeListener {
<file.listenerNames:{lname |
/**
<if(file.listenerLabelRuleNames.(lname))>
@@ -113,8 +116,8 @@ import Antlr4
* which can be extended to create a listener which only needs to handle a subset
* of the available methods.
*/
-open class <file.grammarName>BaseListener: <file.grammarName>Listener {
- public init() { \}
+<accessLevelOpenOK(file)> class <file.grammarName>BaseListener: <file.grammarName>Listener {
+ <accessLevelNotOpen(file)> init() { \}
<file.listenerNames:{lname |
/**
@@ -122,38 +125,38 @@ open class <file.grammarName>BaseListener: <file.grammarName>Listener {
*
* \<p>The default implementation does nothing.\</p>
*/
-open func enter<lname; format="cap">(_ ctx: <file.parserName>.<lname; format="cap">Context) { \}
+<accessLevelOpenOK(file)> func enter<lname; format="cap">(_ ctx: <file.parserName>.<lname; format="cap">Context) { \}
/**
* {@inheritDoc\}
*
* \<p>The default implementation does nothing.\</p>
*/
-open func exit<lname; format="cap">(_ ctx: <file.parserName>.<lname; format="cap">Context) { \}}; separator="\n">
+<accessLevelOpenOK(file)> func exit<lname; format="cap">(_ ctx: <file.parserName>.<lname; format="cap">Context) { \}}; separator="\n">
/**
* {@inheritDoc\}
*
* \<p>The default implementation does nothing.\</p>
*/
- open func enterEveryRule(_ ctx: ParserRuleContext) { }
+ <accessLevelOpenOK(file)> func enterEveryRule(_ ctx: ParserRuleContext) { }
/**
* {@inheritDoc\}
*
* \<p>The default implementation does nothing.\</p>
*/
- open func exitEveryRule(_ ctx: ParserRuleContext) { }
+ <accessLevelOpenOK(file)> func exitEveryRule(_ ctx: ParserRuleContext) { }
/**
* {@inheritDoc\}
*
* \<p>The default implementation does nothing.\</p>
*/
- open func visitTerminal(_ node: TerminalNode) { }
+ <accessLevelOpenOK(file)> func visitTerminal(_ node: TerminalNode) { }
/**
* {@inheritDoc\}
*
* \<p>The default implementation does nothing.\</p>
*/
- open func visitErrorNode(_ node: ErrorNode) { }
+ <accessLevelOpenOK(file)> func visitErrorNode(_ node: ErrorNode) { }
}
>>
@@ -172,7 +175,7 @@ import Antlr4
* @param \<T> The return type of the visit operation. Use {@link Void} for
* operations with no return type.
*/
-open class <file.grammarName>Visitor\<T>: ParseTreeVisitor\<T> {
+<accessLevelOpenOK(file)> class <file.grammarName>Visitor\<T>: ParseTreeVisitor\<T> {
<file.visitorNames:{lname |
/**
<if(file.visitorLabelRuleNames.(lname))>
@@ -185,7 +188,7 @@ open class <file.grammarName>Visitor\<T>: ParseTreeVisitor\<T> {
- ctx: the parse tree
- returns: the visitor result
*/
-open func visit<lname; format="cap">(_ ctx: <file.parserName>.<lname; format="cap">Context) -> T{
+<accessLevelOpenOK(file)> func visit<lname; format="cap">(_ ctx: <file.parserName>.<lname; format="cap">Context) -> T {
fatalError(#function + " must be overridden")
\}
}; separator="\n">
@@ -208,7 +211,7 @@ import Antlr4
* @param \<T> The return type of the visit operation. Use {@link Void} for
* operations with no return type.
*/
-open class <file.grammarName>BaseVisitor\<T>: AbstractParseTreeVisitor\<T> {
+<accessLevelOpenOK(file)> class <file.grammarName>BaseVisitor\<T>: AbstractParseTreeVisitor\<T> {
<file.visitorNames:{lname |
/**
* {@inheritDoc\}
@@ -216,7 +219,7 @@ open class <file.grammarName>BaseVisitor\<T>: AbstractParseTreeVisitor\<T> {
* \<p>The default implementation returns the result of calling
* {@link #visitChildren\} on {@code ctx\}.\</p>
*/
-open func visit<lname; format="cap">(_ ctx: <file.parserName>.<lname; format="cap">Context) -> T? { return visitChildren(ctx) \}}; separator="\n">
+<accessLevelOpenOK(file)> func visit<lname; format="cap">(_ ctx: <file.parserName>.<lname; format="cap">Context) -> T? { return visitChildren(ctx) \}}; separator="\n">
}
>>
@@ -230,7 +233,7 @@ Parser(parser, funcs, atn, sempredFuncs, superClass) ::= <<
Parser_(parser, funcs, atn, sempredFuncs, ctor, superClass) ::= <<
<!//@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})!>
-open class <parser.name>: <superClass; null="Parser"> {
+<accessLevelOpenOK(parser)> class <parser.name>: <superClass; null="Parser"> {
internal static var _decisionToDFA: [DFA] = {
var decisionToDFA = [DFA]()
@@ -241,38 +244,46 @@ open class <parser.name>: <superClass; null="Parser"> {
}
return decisionToDFA
}()
- internal static let _sharedContextCache: PredictionContextCache = PredictionContextCache()
+
+ internal static let _sharedContextCache = PredictionContextCache()
+
<if(parser.tokens)>
- public enum Tokens: Int {
+ <accessLevelNotOpen(parser)>
+ enum Tokens: Int {
case EOF = -1, <parser.tokens:{k | <k> = <parser.tokens.(k)>}; separator=", ", wrap, anchor>
}
<endif>
- public static let <parser.rules:{r | RULE_<r.name> = <r.index>}; separator=", ", wrap, anchor>
- public static let ruleNames: [String] = [
+
+ <accessLevelNotOpen(parser)>
+ static let <parser.rules:{r | RULE_<r.name> = <r.index>}; separator=", ", wrap, anchor>
+
+ <accessLevelNotOpen(parser)>
+ static let ruleNames: [String] = [
<parser.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor>
]
- <vocabulary(parser.literalNames, parser.symbolicNames)>
+ <vocabulary(parser.literalNames, parser.symbolicNames,
+ accessLevelNotOpen(parser))>
- override
- open func getGrammarFileName() -> String { return "<parser.grammarFileName; format="java-escape">" }
+ override <accessLevelOpenOK(parser)>
+ func getGrammarFileName() -> String { return "<parser.grammarFileName; format="java-escape">" }
- override
- open func getRuleNames() -> [String] { return <parser.name>.ruleNames }
+ override <accessLevelOpenOK(parser)>
+ func getRuleNames() -> [String] { return <parser.name>.ruleNames }
- override
- open func getSerializedATN() -> String { return <parser.name>._serializedATN }
+ override <accessLevelOpenOK(parser)>
+ func getSerializedATN() -> String { return <parser.name>._serializedATN }
- override
- open func getATN() -> ATN { return <parser.name>._ATN }
+ override <accessLevelOpenOK(parser)>
+ func getATN() -> ATN { return <parser.name>._ATN }
<namedActions.members>
<parser:(ctor)()>
<funcs; separator="\n">
<if(sempredFuncs)>
- override
- open func sempred(_ _localctx: RuleContext?, _ ruleIndex: Int, _ predIndex: Int)throws -> Bool {
+ override <accessLevelOpenOK(parser)>
+ func sempred(_ _localctx: RuleContext?, _ ruleIndex: Int, _ predIndex: Int)throws -> Bool {
switch (ruleIndex) {
<parser.sempredFuncs.values:{f|
case <f.ruleIndex>:
@@ -285,51 +296,30 @@ case <f.ruleIndex>:
<endif>
<atn>
- public static let _serializedATN : String = <parser.name>ATN().jsonString
- public static let _ATN: ATN = ATNDeserializer().deserializeFromJson(_serializedATN)
+
+ <accessLevelNotOpen(parser)>
+ static let _serializedATN = <parser.name>ATN().jsonString
+
+ <accessLevelNotOpen(parser)>
+ static let _ATN = ATNDeserializer().deserializeFromJson(_serializedATN)
}
>>
-vocabulary(literalNames, symbolicNames) ::= <<
+vocabulary(literalNames, symbolicNames, accessLevel) ::= <<
private static let _LITERAL_NAMES: [String?] = [
<literalNames:{t | <t>}; null="nil", separator=", ", wrap, anchor>
]
private static let _SYMBOLIC_NAMES: [String?] = [
<symbolicNames:{t | <t>}; null="nil", separator=", ", wrap, anchor>
]
-public static let VOCABULARY: Vocabulary = Vocabulary(_LITERAL_NAMES, _SYMBOLIC_NAMES)
-
-/**
- * @deprecated Use {@link #VOCABULARY} instead.
- */
-//@Deprecated
-public let tokenNames: [String?]? = {
- let length = _SYMBOLIC_NAMES.count
- var tokenNames = [String?](repeating: nil, count: length)
- for i in 0..\<length {
- var name = VOCABULARY.getLiteralName(i)
- if name == nil {
- name = VOCABULARY.getSymbolicName(i)
- }
- if name == nil {
- name = "\<INVALID>"
- }
- tokenNames[i] = name
- }
- return tokenNames
-}()
-
-override
-<!//@Deprecated!>
-open func getTokenNames() -> [String?]? {
- return tokenNames
-}
+<accessLevel>
+static let VOCABULARY = Vocabulary(_LITERAL_NAMES, _SYMBOLIC_NAMES)
>>
dumpActions(recog, argFuncs, actionFuncs, sempredFuncs) ::= <<
<if(actionFuncs)>
-override
-open func action(_ _localctx: RuleContext?, _ ruleIndex: Int, _ actionIndex: Int) throws {
+override <accessLevelOpenOK(parser)>
+func action(_ _localctx: RuleContext?, _ ruleIndex: Int, _ actionIndex: Int) throws {
switch (ruleIndex) {
<recog.actionFuncs.values:{f|
case <f.ruleIndex>:
@@ -341,8 +331,8 @@ case <f.ruleIndex>:
<actionFuncs.values; separator="\n">
<endif>
<if(sempredFuncs)>
-override
-open func sempred(_ _localctx: RuleContext?, _ ruleIndex: Int,_ predIndex: Int) throws -> Bool{
+override <accessLevelOpenOK(parser)>
+func sempred(_ _localctx: RuleContext?, _ ruleIndex: Int,_ predIndex: Int) throws -> Bool {
switch (ruleIndex) {
<recog.sempredFuncs.values:{f|
case <f.ruleIndex>:
@@ -357,15 +347,18 @@ case <f.ruleIndex>:
parser_ctor(p) ::= <<
-open override func getVocabulary() -> Vocabulary {
+override <accessLevelOpenOK(parser)>
+func getVocabulary() -> Vocabulary {
return <p.name>.VOCABULARY
}
-public override init(_ input:TokenStream)throws {
- RuntimeMetaData.checkVersion("4.6", RuntimeMetaData.VERSION)
+override <accessLevelNotOpen(parser)>
+init(_ input:TokenStream) throws {
+ RuntimeMetaData.checkVersion("4.7.1", RuntimeMetaData.VERSION)
try super.init(input)
_interp = ParserATNSimulator(self,<p.name>._ATN,<p.name>._decisionToDFA, <parser.name>._sharedContextCache)
}
+
>>
/* This generates a private method since the actionIndex is generated, making an
@@ -401,7 +394,7 @@ RuleFunction(currentRule,args,code,locals,ruleCtx,altLabelCtxs,namedActions,fina
<ruleCtx>
<altLabelCtxs:{l | <altLabelCtxs.(l)>}; separator="\n">
@discardableResult
-<if(currentRule.modifiers)><currentRule.modifiers:{f | <f> }><else>open func <endif><currentRule.name>(<if(first(args))>_ <endif><args; separator=", _">) throws -> <currentRule.ctxType> {
+<if(currentRule.modifiers)><currentRule.modifiers:{f | <f> }><else> <accessLevelOpenOK(parser)> func <endif><currentRule.name>(<if(first(args))>_ <endif><args; separator=", _">) throws -> <currentRule.ctxType> {
var _localctx: <currentRule.ctxType> = <currentRule.ctxType>(_ctx, getState()<currentRule.args:{a | , <a.name>}>)
try enterRule(_localctx, <currentRule.startState>, <parser.name>.RULE_<currentRule.name>)
<namedActions.init>
@@ -439,7 +432,7 @@ LeftRecursiveRuleFunction(currentRule,args,code,locals,ruleCtx,altLabelCtxs,
<ruleCtx>
<altLabelCtxs:{l | <altLabelCtxs.(l)>}; separator="\n">
-<if(currentRule.modifiers)><currentRule.modifiers:{f | <f> }><else>public final <endif> func <currentRule.name>( <if(first(args))>_ <endif><args; separator=", _">) throws -> <currentRule.ctxType> {
+<if(currentRule.modifiers)><currentRule.modifiers:{f | <f> }><else> <accessLevelNotOpen(parser)> final <endif> func <currentRule.name>( <if(first(args))>_ <endif><args; separator=", _">) throws -> <currentRule.ctxType> {
return try <currentRule.name>(0<currentRule.args:{a | , <a.name>}>)
}
@discardableResult
@@ -510,7 +503,7 @@ switch (<parser.name>.Tokens(rawValue: try _input.LA(1))!) {
<alt>
break}; separator="\n">
default:
- <error>
+ break
}
>>
@@ -612,7 +605,7 @@ case <i><if(!choice.ast.greedy)>+1<endif>:
Sync(s) ::= "sync(<s.expecting.name>);"
-ThrowNoViableAlt(t) ::= "throw try ANTLRException.recognition(e: NoViableAltException(self))"
+ThrowNoViableAlt(t) ::= "throw ANTLRException.recognition(e: NoViableAltException(self))"
TestSetInline(s) ::= <<
<!<s.bitsets:{bits | <if(rest(rest(bits.ttypes)))><bitsetBitfieldComparison(s, bits)><else><bitsetInlineComparison(s, bits)><endif>}; separator=" || ">!>
@@ -717,7 +710,7 @@ ArgAction(a, chunks) ::= "<chunks>"
SemPred(p, chunks, failChunks) ::= <<
setState(<p.stateNumber>)
if (!(<chunks>)) {
- throw try ANTLRException.recognition(e:FailedPredicateException(self, <p.predicate><if(failChunks)>, <failChunks><elseif(p.msg)>, <p.msg><endif>))
+ throw ANTLRException.recognition(e:FailedPredicateException(self, <p.predicate><if(failChunks)>, <failChunks><elseif(p.msg)>, <p.msg><endif>))
}
>>
@@ -781,33 +774,45 @@ AddToLabelList(a) ::= "<ctx(a.label)>.<a.listName>.append(<labelref(a.label)>)"
TokenDecl(t) ::= "<t.name>: <SwiftTypeMap.(TokenLabelType())>!"
TokenTypeDecl(t) ::= "var <t.name>: Int = 0"
-TokenListDecl(t) ::= "<t.name>: Array\<Token> = Array\<Token>()"
+TokenListDecl(t) ::= "<t.name>: [Token] = [Token]()"
RuleContextDecl(r) ::= "<r.name>: <r.ctxName>!"
-RuleContextListDecl(rdecl) ::= "<rdecl.name>:Array\<<rdecl.ctxName>> = Array\<<rdecl.ctxName>>()"
-
-ContextTokenGetterDecl(t) ::=
- "open func <t.name>() -> TerminalNode? { return getToken(<parser.name>.Tokens.<t.name>.rawValue, 0) }"
-ContextTokenListGetterDecl(t) ::=
- "open func <t.name>() -> Array\<TerminalNode> { return getTokens(<parser.name>.Tokens.<t.name>.rawValue) }"
-ContextTokenListIndexedGetterDecl(t) ::= <<
-open func <t.name>(_ i:Int) -> TerminalNode?{
- return getToken(<parser.name>.Tokens.<t.name>.rawValue, i)
-}
+RuleContextListDecl(rdecl) ::= "<rdecl.name>: [<rdecl.ctxName>] = [<rdecl.ctxName>]()"
+
+ContextTokenGetterDecl(t) ::= <<
+ <accessLevelOpenOK(parser)>
+ func <t.name>() -> TerminalNode? {
+ return getToken(<parser.name>.Tokens.<t.name>.rawValue, 0)
+ }
>>
-ContextRuleGetterDecl(r) ::= <<
-open func <r.name>() -> <r.ctxName>? {
- return getRuleContext(<r.ctxName>.self,0)
-}
+ContextTokenListGetterDecl(t) ::= <<
+ <accessLevelOpenOK(parser)>
+ func <t.name>() -> [TerminalNode] {
+ return getTokens(<parser.name>.Tokens.<t.name>.rawValue)
+ }
>>
-ContextRuleListGetterDecl(r) ::= <<
-open func <r.name>() -> Array\<<r.ctxName>\> {
- return getRuleContexts(<r.ctxName>.self)
-}
+ContextTokenListIndexedGetterDecl(t) ::= <<
+ <accessLevelOpenOK(parser)>
+ func <t.name>(_ i:Int) -> TerminalNode? {
+ return getToken(<parser.name>.Tokens.<t.name>.rawValue, i)
+ }
+>>
+ContextRuleGetterDecl(r) ::= <<
+ <accessLevelOpenOK(parser)>
+ func <r.name>() -> <r.ctxName>? {
+ return getRuleContext(<r.ctxName>.self, 0)
+ }
+>>
+ContextRuleListGetterDecl(r) ::= <<
+ <accessLevelOpenOK(parser)>
+ func <r.name>() -> [<r.ctxName>] {
+ return getRuleContexts(<r.ctxName>.self)
+ }
>>
ContextRuleListIndexedGetterDecl(r) ::= <<
-open func <r.name>(_ i: Int) -> <r.ctxName>? {
- return getRuleContext(<r.ctxName>.self,i)
-}
+ <accessLevelOpenOK(parser)>
+ func <r.name>(_ i: Int) -> <r.ctxName>? {
+ return getRuleContext(<r.ctxName>.self, i)
+ }
>>
LexerRuleContext() ::= "RuleContext"
@@ -827,20 +832,26 @@ CaptureNextTokenType(d) ::= "<d.varName> = try _input.LA(1)"
StructDecl(struct,ctorAttrs,attrs,getters,dispatchMethods,interfaces,extensionMembers,
superClass={ParserRuleContext}) ::= <<
-open class <struct.name>:<if(contextSuperClass)><contextSuperClass><else>ParserRuleContext<endif><if(interfaces)>, <interfaces; separator=", "><endif> {
- <attrs:{a | public var <a>}; separator="\n">
+
+<accessLevelNotOpen(parser)> class <struct.name>: <if(contextSuperClass)><contextSuperClass><else>ParserRuleContext<endif><if(interfaces)>, <interfaces; separator=", "><endif> {
+ <attrs:{a | <accessLevelOpenOK(parser)> var <a>}; separator="\n">
<getters:{g | <g>}; separator="\n">
- <! <if(ctorAttrs)>public init(_ parent: ParserRuleContext,_ invokingState: Int) { super.init(parent, invokingState) }<endif> !>
- <if(ctorAttrs)>
- public convenience init(_ parent: ParserRuleContext?, _ invokingState: Int<ctorAttrs:{a | , _ <a>}>) {
+ <! <if(ctorAttrs)> <accessLevelNotOpen(parser)> init(_ parent: ParserRuleContext,_ invokingState: Int) { super.init(parent, invokingState) }<endif> !>
+ <if(ctorAttrs)>
+ <accessLevelNotOpen(parser)> convenience init(_ parent: ParserRuleContext?, _ invokingState: Int<ctorAttrs:{a | , _ <a>}>) {
self.init(parent, invokingState)
<struct.ctorAttrs:{a | self.<a.name> = <a.name>;}; separator="\n">
- }
+ }
<endif>
- open override func getRuleIndex() -> Int { return <parser.name>.RULE_<struct.derivedFromName> }
+
+ override <accessLevelOpenOK(parser)>
+ func getRuleIndex() -> Int {
+ return <parser.name>.RULE_<struct.derivedFromName>
+ }
<if(struct.provideCopyFrom)> <! don't need copy unless we have subclasses !>
- <!public init() { }!>
- public func copyFrom(_ ctx: <struct.name>) {
+ <! <accessLevelNotOpen(parser)> init() { }!>
+ <accessLevelOpenOK(parser)>
+ func copyFrom(_ ctx: <struct.name>) {
super.copyFrom(ctx)
<struct.attrs:{a | self.<a.name> = ctx.<a.name>;}; separator="\n">
}
@@ -851,10 +862,12 @@ open class <struct.name>:<if(contextSuperClass)><contextSuperClass><else>ParserR
>>
AltLabelStructDecl(struct,attrs,getters,dispatchMethods) ::= <<
-public final class <struct.name>: <currentRule.name; format="cap">Context {
- <attrs:{a | public var <a>}; separator="\n">
+<accessLevelNotOpen(parser)> class <struct.name>: <currentRule.name; format="cap">Context {
+ <attrs:{a | <accessLevelNotOpen(parser)> var <a>}; separator="\n">
<getters:{g | <g>}; separator="\n">
- public init(_ ctx: <currentRule.name; format="cap">Context) {
+
+ <accessLevelNotOpen(parser)>
+ init(_ ctx: <currentRule.name; format="cap">Context) {
super.init()
copyFrom(ctx)
}
@@ -863,22 +876,23 @@ public final class <struct.name>: <currentRule.name; format="cap">Context {
>>
ListenerDispatchMethod(method) ::= <<
-override
-open func <if(method.isEnter)>enter<else>exit<endif>Rule(_ listener: ParseTreeListener) {
- if listener is <parser.grammarName>Listener {
- (listener as! <parser.grammarName>Listener).<if(method.isEnter)>enter<else>exit<endif><struct.derivedFromName; format="cap">(self)
+override <accessLevelOpenOK(parser)>
+func <if(method.isEnter)>enter<else>exit<endif>Rule(_ listener: ParseTreeListener) {
+ if let listener = listener as? <parser.grammarName>Listener {
+ listener.<if(method.isEnter)>enter<else>exit<endif><struct.derivedFromName; format="cap">(self)
}
}
>>
VisitorDispatchMethod(method) ::= <<
-override
-open func accept\<T>(_ visitor: ParseTreeVisitor\<T>) -> T? {
- if visitor is <parser.grammarName>Visitor {
- return (visitor as! <parser.grammarName>Visitor\<T>).visit<struct.derivedFromName; format="cap">(self)
- }else if visitor is <parser.grammarName>BaseVisitor {
- return (visitor as! <parser.grammarName>BaseVisitor\<T>).visit<struct.derivedFromName; format="cap">(self)
- }
+override <accessLevelOpenOK(parser)>
+func accept\<T>(_ visitor: ParseTreeVisitor\<T>) -> T? {
+ if let visitor = visitor as? <parser.grammarName>Visitor {
+ return visitor.visit<struct.derivedFromName; format="cap">(self)
+ }
+ else if let visitor = visitor as? <parser.grammarName>BaseVisitor {
+ return visitor.visit<struct.derivedFromName; format="cap">(self)
+ }
else {
return visitor.visitChildren(self)
}
@@ -948,7 +962,8 @@ import Antlr4
<lexer>
>>
Lexer(lexer, atn, actionFuncs, sempredFuncs, superClass) ::= <<
-open class <lexer.name>: <superClass; null="Lexer"> {
+<accessLevelOpenOK(lexer)> class <lexer.name>: <superClass; null="Lexer"> {
+
internal static var _decisionToDFA: [DFA] = {
var decisionToDFA = [DFA]()
let length = <lexer.name>._ATN.getNumberOfDecisions()
@@ -959,52 +974,77 @@ open class <lexer.name>: <superClass; null="Lexer"> {
return decisionToDFA
}()
- internal static let _sharedContextCache:PredictionContextCache = PredictionContextCache()
- public static let <lexer.tokens:{k | <k>=<lexer.tokens.(k)>}; separator=", ", wrap, anchor>
+ internal static let _sharedContextCache = PredictionContextCache()
+
+ <accessLevelNotOpen(lexer)>
+ static let <lexer.tokens:{k | <k>=<lexer.tokens.(k)>}; separator=", ", wrap, anchor>
+
<if(lexer.channels)>
- public let <lexer.channels:{k | <k>=<lexer.channels.(k)>}; separator=", ", wrap, anchor>
+ <accessLevelNotOpen(lexer)>
+ static let <lexer.channels:{k | <k>=<lexer.channels.(k)>}; separator=", ", wrap, anchor>
<endif>
- <rest(lexer.modes):{m| public static let <m>: Int = <i>;}; separator="\n">
- public static let modeNames: [String] = [
+ <if(rest(lexer.modes))>
+ <accessLevelNotOpen(lexer)>
+ static let <rest(lexer.modes):{m| <m>=<i>}; separator=", ", wrap, anchor>
+ <endif>
+ <accessLevelNotOpen(lexer)>
+ static let channelNames: [String] = [
+ "DEFAULT_TOKEN_CHANNEL", "HIDDEN"<if (lexer.channels)>, <lexer.channels:{c| "<c>"}; separator=", ", wrap, anchor><endif>
+ ]
+
+ <accessLevelNotOpen(lexer)>
+ static let modeNames: [String] = [
<lexer.modes:{m| "<m>"}; separator=", ", wrap, anchor>
]
- public static let ruleNames: [String] = [
+ <accessLevelNotOpen(lexer)>
+ static let ruleNames: [String] = [
<lexer.ruleNames:{r | "<r>"}; separator=", ", wrap, anchor>
]
- <vocabulary(lexer.literalNames, lexer.symbolicNames)>
+ <vocabulary(lexer.literalNames, lexer.symbolicNames,
+ accessLevelNotOpen(lexer))>
<namedActions.members>
- open override func getVocabulary() -> Vocabulary {
- return <lexer.name>.VOCABULARY
- }
- public override init(_ input: CharStream) {
+ override <accessLevelOpenOK(lexer)>
+ func getVocabulary() -> Vocabulary {
+ return <lexer.name>.VOCABULARY
+ }
+
+ <accessLevelNotOpen(lexer)>
+ required init(_ input: CharStream) {
RuntimeMetaData.checkVersion("<lexerFile.ANTLRVersion>", RuntimeMetaData.VERSION)
super.init(input)
_interp = LexerATNSimulator(self, <lexer.name>._ATN, <lexer.name>._decisionToDFA, <lexer.name>._sharedContextCache)
}
- override
- open func getGrammarFileName() -> String { return "<lexer.grammarFileName>" }
+ override <accessLevelOpenOK(lexer)>
+ func getGrammarFileName() -> String { return "<lexer.grammarFileName>" }
- override
- open func getRuleNames() -> [String] { return <lexer.name>.ruleNames }
+ override <accessLevelOpenOK(lexer)>
+ func getRuleNames() -> [String] { return <lexer.name>.ruleNames }
- override
- open func getSerializedATN() -> String { return <lexer.name>._serializedATN }
+ override <accessLevelOpenOK(lexer)>
+ func getSerializedATN() -> String { return <lexer.name>._serializedATN }
- override
- open func getModeNames() -> [String] { return <lexer.name>.modeNames }
+ override <accessLevelOpenOK(lexer)>
+ func getChannelNames() -> [String] { return <lexer.name>.channelNames }
- override
- open func getATN() -> ATN { return <lexer.name>._ATN }
+ override <accessLevelOpenOK(lexer)>
+ func getModeNames() -> [String] { return <lexer.name>.modeNames }
+
+ override <accessLevelOpenOK(lexer)>
+ func getATN() -> ATN { return <lexer.name>._ATN }
<dumpActions(lexer, "", actionFuncs, sempredFuncs)>
<atn>
- public static let _serializedATN: String = <lexer.name>ATN().jsonString
- public static let _ATN: ATN = ATNDeserializer().deserializeFromJson(_serializedATN)
+
+ <accessLevelNotOpen(lexer)>
+ static let _serializedATN: String = <lexer.name>ATN().jsonString
+
+ <accessLevelNotOpen(lexer)>
+ static let _ATN: ATN = ATNDeserializer().deserializeFromJson(_serializedATN)
}
>>
diff --git a/tool/resources/org/antlr/v4/tool/templates/messages/formats/gnu.stg b/tool/resources/org/antlr/v4/tool/templates/messages/formats/gnu.stg
index f243ab7..c7d06d9 100644
--- a/tool/resources/org/antlr/v4/tool/templates/messages/formats/gnu.stg
+++ b/tool/resources/org/antlr/v4/tool/templates/messages/formats/gnu.stg
@@ -31,9 +31,9 @@ This file contains the actual layout of the messages emitted by ANTLR.
This file contains the format that mimicks GCC output.
*/
-location(file, line, column) ::= "<file>:<line>:"
+location(file, line, column) ::= "<file>:<line>:<column>:"
-message(id, text) ::= "<text> (<id>)"
+message(id, text) ::= "<text> [error <id>]"
report(location, message, type) ::= "<location> <type>: <message>"
diff --git a/tool/resources/org/antlr/v4/tool/templates/unicodedata.st b/tool/resources/org/antlr/v4/tool/templates/unicodedata.st
new file mode 100644
index 0000000..0f22c73
--- /dev/null
+++ b/tool/resources/org/antlr/v4/tool/templates/unicodedata.st
@@ -0,0 +1,62 @@
+unicodedata(propertyCodePointRanges, propertyAliases) ::= <<
+package org.antlr.v4.unicode;
+
+import java.util.Arrays;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Locale;
+import java.util.Map;
+
+import org.antlr.v4.runtime.misc.IntervalSet;
+import org.antlr.v4.runtime.misc.Interval;
+
+/**
+ * Code-generated utility class mapping Unicode properties to Unicode code point ranges.
+ */
+public abstract class UnicodeData {
+ private static final Map\<String, IntervalSet\> propertyCodePointRanges = new HashMap\<\>(<length(propertyCodePointRanges)>);
+ private static final Map\<String, String\> propertyAliases = new HashMap\<\>(<length(propertyAliases)>);
+
+ // Work around Java 64k bytecode method limit by splitting up static
+ // initialization into one method per Unicode property
+
+ <propertyCodePointRanges.keys:{ k | // Unicode code points with property "<k>"
+static private void addProperty<i>() {
+ List\<Interval\> intervals = Arrays.asList(
+ <propertyCodePointRanges.(k).intervals:{ interval | Interval.of(<interval.a>, <interval.b>)}; separator=",\n">
+ );
+ IntervalSet codePointRanges = new IntervalSet(intervals);
+ codePointRanges.setReadonly(true);
+ propertyCodePointRanges.put("<k>".toLowerCase(Locale.US), codePointRanges);
+\}}; separator="\n\n">
+
+ // Property aliases
+ static private void addPropertyAliases() {
+ <propertyAliases.keys:{ k | propertyAliases.put("<k>".toLowerCase(Locale.US), "<propertyAliases.(k)>".toLowerCase(Locale.US)); }; separator="\n">
+ }
+
+ // Put it all together
+ static {
+ <propertyCodePointRanges.keys:{ k | addProperty<i>(); }; separator="\n">
+ addPropertyAliases();
+ }
+
+ private static String normalize(String propertyCodeOrAlias) {
+ return propertyCodeOrAlias.toLowerCase(Locale.US).replace('-', '_');
+ }
+
+ /**
+ * Given a Unicode property (general category code, binary property name, or script name),
+ * returns the {@link IntervalSet} of Unicode code point ranges which have that property.
+ */
+ public static IntervalSet getPropertyCodePoints(String propertyCodeOrAlias) {
+ String normalizedPropertyCodeOrAlias = normalize(propertyCodeOrAlias);
+ IntervalSet result = propertyCodePointRanges.get(normalizedPropertyCodeOrAlias);
+ if (result == null) {
+ String propertyCode = propertyAliases.get(normalizedPropertyCodeOrAlias);
+ result = propertyCodePointRanges.get(propertyCode);
+ }
+ return result;
+ }
+}
+>>