diff --git a/ports/dart/.gitignore b/ports/dart/.gitignore
new file mode 100644
index 0000000..881e659
--- /dev/null
+++ b/ports/dart/.gitignore
@@ -0,0 +1,9 @@
+# https://dart.dev/guides/libraries/private-files
+# Created by `dart pub`
+.dart_tool/
+
+# Avoid committing pubspec.lock for library packages; see
+# https://dart.dev/guides/libraries/private-files#pubspeclock.
+pubspec.lock
+
+!lib/
\ No newline at end of file
diff --git a/ports/dart/CHANGELOG.md b/ports/dart/CHANGELOG.md
new file mode 100644
index 0000000..98f106f
--- /dev/null
+++ b/ports/dart/CHANGELOG.md
@@ -0,0 +1,5 @@
+## 1.0.0+1
+
+## 1.0.0
+
+- Translated from: [antlr4-c3 java](https://github.com/mike-lischke/antlr4-c3/tree/main/ports/java)
\ No newline at end of file
diff --git a/ports/dart/LICENSE b/ports/dart/LICENSE
new file mode 100644
index 0000000..5bcd611
--- /dev/null
+++ b/ports/dart/LICENSE
@@ -0,0 +1,23 @@
+MIT License
+
+Copyright (c) 2024 VMware, Inc & dudu.ltd. All Rights Reserved.
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
diff --git a/ports/dart/README-EN.md b/ports/dart/README-EN.md
new file mode 100644
index 0000000..b9201f3
--- /dev/null
+++ b/ports/dart/README-EN.md
@@ -0,0 +1,60 @@
+
Antlr4 Code Completion Core
+
+
+
+
+
+
+
+
+
+ 中文
+ 丨
+ English
+
+
+This is the Dart version of the antlr4-c3 library. Translated from: [antlr4-c3 java](https://github.com/mike-lischke/antlr4-c3/tree/main/ports/java)
+
+## Usage
+
+Write or obtain an antlr4 grammar file, then use antlr4 to generate Dart code.
+
+```shell
+# Expr.g4 is an antlr4 grammar file, which you can write yourself or obtain from the internet
+# 4.13.2 is determined by the antlr4 version in pubspec.yaml
+# ../example/gen is the directory for the generated code, please modify according to your actual situation
+antlr4 -v 4.13.2 -Dlanguage=Dart Expr.g4 -o ../example/gen
+```
+
+Then use the generated code in your project.
+
+```dart
+import 'dart:math';
+
+import 'package:antlr4/antlr4.dart';
+import 'package:antlr4_c3/antlr4_c3.dart';
+
+import 'gen/ExprLexer.dart';
+import 'gen/ExprParser.dart';
+
+void main() {
+ var expression = 'var c = a + b()';
+ var lexer = ExprLexer(InputStream.fromString(expression));
+ var tokens = CommonTokenStream(lexer);
+ var parser = ExprParser(tokens);
+
+ lexer.removeErrorListeners();
+ parser.removeErrorListeners();
+
+ parser.expression();
+ var core = CodeCompletionCore(parser, null, null);
+
+ var candidates = core.collectCandidates(max(0, tokens.size), null);
+ print(candidates);
+}
+```
+
+## License
+
+antlr4_c3 is licensed under the MIT License.
diff --git a/ports/dart/README.md b/ports/dart/README.md
new file mode 100644
index 0000000..3d23005
--- /dev/null
+++ b/ports/dart/README.md
@@ -0,0 +1,65 @@
+
+ Antlr4 Code Completion Core
+
+
+
+
+
+
+
+
+ 中文丨
+
+ English
+
+
+
+这是antlr4-c3库的Dart版本。翻译自:[antlr4-c3 java](https://github.com/mike-lischke/antlr4-c3/tree/main/ports/java)
+
+## 用法
+
+编写或者获取一个antlr4的语法文件,然后使用antlr4生成Dart代码。
+
+```shell
+# Expr.g4 为 antlr4 语法文件,可以自己编写或者从网上获取
+# 4.13.2 由 pubspec.yaml 中的 antlr4 版本决定
+# ../example/gen 为生成的代码目录,请根据实际情况修改
+antlr4 -v 4.13.2 -Dlanguage=Dart Expr.g4 -o ../example/gen
+```
+
+然后在你的代码中使用生成的代码。
+
+```dart
+import 'dart:math';
+
+import 'package:antlr4/antlr4.dart';
+import 'package:antlr4_c3/antlr4_c3.dart';
+
+import 'gen/ExprLexer.dart';
+import 'gen/ExprParser.dart';
+
+void main() {
+ var expression = 'var c = a + b()';
+ var lexer = ExprLexer(InputStream.fromString(expression));
+ var tokens = CommonTokenStream(lexer);
+ var parser = ExprParser(tokens);
+
+ lexer.removeErrorListeners();
+ parser.removeErrorListeners();
+
+ parser.expression();
+ var core = CodeCompletionCore(parser, null, null);
+
+ var candidates = core.collectCandidates(max(0, tokens.size), null);
+ print(candidates);
+}
+```
+
+
+## 开源协议
+
+antlr4_c3 使用 MIT 协议。
+
+
+
diff --git a/ports/dart/analysis_options.yaml b/ports/dart/analysis_options.yaml
new file mode 100644
index 0000000..dee8927
--- /dev/null
+++ b/ports/dart/analysis_options.yaml
@@ -0,0 +1,30 @@
+# This file configures the static analysis results for your project (errors,
+# warnings, and lints).
+#
+# This enables the 'recommended' set of lints from `package:lints`.
+# This set helps identify many issues that may lead to problems when running
+# or consuming Dart code, and enforces writing Dart using a single, idiomatic
+# style and format.
+#
+# If you want a smaller set of lints you can change this to specify
+# 'package:lints/core.yaml'. These are just the most critical lints
+# (the recommended set includes the core lints).
+# The core lints are also what is used by pub.dev for scoring packages.
+
+include: package:lints/recommended.yaml
+
+# Uncomment the following section to specify additional rules.
+
+# linter:
+# rules:
+# - camel_case_types
+
+# analyzer:
+# exclude:
+# - path/to/excluded/files/**
+
+# For more information about the core and recommended set of lints, see
+# https://dart.dev/go/core-lints
+
+# For additional information about configuring this file, see
+# https://dart.dev/guides/language/analysis-options
diff --git a/ports/dart/example/antlr4_c3_example.dart b/ports/dart/example/antlr4_c3_example.dart
new file mode 100644
index 0000000..ee96632
--- /dev/null
+++ b/ports/dart/example/antlr4_c3_example.dart
@@ -0,0 +1,31 @@
+/*
+ * Copyright © 2024 VMware, Inc & dudu.ltd. All Rights Reserved.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * See LICENSE file for more info.
+ */
+
+import 'dart:math';
+
+import 'package:antlr4/antlr4.dart';
+import 'package:antlr4_c3/antlr4_c3.dart';
+
+import 'gen/ExprLexer.dart';
+import 'gen/ExprParser.dart';
+
+void main() {
+ var expression = 'var c = a + b()';
+ var lexer = ExprLexer(InputStream.fromString(expression));
+ var tokens = CommonTokenStream(lexer);
+ var parser = ExprParser(tokens);
+
+ lexer.removeErrorListeners();
+ parser.removeErrorListeners();
+
+ parser.expression();
+ var core = CodeCompletionCore(parser, null, null);
+
+ var candidates = core.collectCandidates(max(0, tokens.size), null);
+ print(candidates);
+}
diff --git a/ports/dart/example/gen/Expr.interp b/ports/dart/example/gen/Expr.interp
new file mode 100644
index 0000000..5fc347a
--- /dev/null
+++ b/ports/dart/example/gen/Expr.interp
@@ -0,0 +1,38 @@
+token literal names:
+null
+null
+null
+'+'
+'-'
+'*'
+'/'
+'='
+'('
+')'
+null
+null
+
+token symbolic names:
+null
+VAR
+LET
+PLUS
+MINUS
+MULTIPLY
+DIVIDE
+EQUAL
+OPEN_PAR
+CLOSE_PAR
+ID
+WS
+
+rule names:
+expression
+assignment
+simpleExpression
+variableRef
+functionRef
+
+
+atn:
+[4, 1, 11, 42, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 1, 0, 1, 0, 3, 0, 13, 8, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 2, 3, 2, 23, 8, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 5, 2, 31, 8, 2, 10, 2, 12, 2, 34, 9, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 4, 1, 4, 1, 4, 0, 1, 4, 5, 0, 2, 4, 6, 8, 0, 3, 1, 0, 1, 2, 1, 0, 3, 4, 1, 0, 5, 6, 40, 0, 12, 1, 0, 0, 0, 2, 14, 1, 0, 0, 0, 4, 22, 1, 0, 0, 0, 6, 35, 1, 0, 0, 0, 8, 37, 1, 0, 0, 0, 10, 13, 3, 2, 1, 0, 11, 13, 3, 4, 2, 0, 12, 10, 1, 0, 0, 0, 12, 11, 1, 0, 0, 0, 13, 1, 1, 0, 0, 0, 14, 15, 7, 0, 0, 0, 15, 16, 5, 10, 0, 0, 16, 17, 5, 7, 0, 0, 17, 18, 3, 4, 2, 0, 18, 3, 1, 0, 0, 0, 19, 20, 6, 2, -1, 0, 20, 23, 3, 6, 3, 0, 21, 23, 3, 8, 4, 0, 22, 19, 1, 0, 0, 0, 22, 21, 1, 0, 0, 0, 23, 32, 1, 0, 0, 0, 24, 25, 10, 4, 0, 0, 25, 26, 7, 1, 0, 0, 26, 31, 3, 4, 2, 5, 27, 28, 10, 3, 0, 0, 28, 29, 7, 2, 0, 0, 29, 31, 3, 4, 2, 4, 30, 24, 1, 0, 0, 0, 30, 27, 1, 0, 0, 0, 31, 34, 1, 0, 0, 0, 32, 30, 1, 0, 0, 0, 32, 33, 1, 0, 0, 0, 33, 5, 1, 0, 0, 0, 34, 32, 1, 0, 0, 0, 35, 36, 5, 10, 0, 0, 36, 7, 1, 0, 0, 0, 37, 38, 5, 10, 0, 0, 38, 39, 5, 8, 0, 0, 39, 40, 5, 9, 0, 0, 40, 9, 1, 0, 0, 0, 4, 12, 22, 30, 32]
\ No newline at end of file
diff --git a/ports/dart/example/gen/Expr.tokens b/ports/dart/example/gen/Expr.tokens
new file mode 100644
index 0000000..be523c1
--- /dev/null
+++ b/ports/dart/example/gen/Expr.tokens
@@ -0,0 +1,18 @@
+VAR=1
+LET=2
+PLUS=3
+MINUS=4
+MULTIPLY=5
+DIVIDE=6
+EQUAL=7
+OPEN_PAR=8
+CLOSE_PAR=9
+ID=10
+WS=11
+'+'=3
+'-'=4
+'*'=5
+'/'=6
+'='=7
+'('=8
+')'=9
diff --git a/ports/dart/example/gen/ExprBaseListener.dart b/ports/dart/example/gen/ExprBaseListener.dart
new file mode 100644
index 0000000..5aefabd
--- /dev/null
+++ b/ports/dart/example/gen/ExprBaseListener.dart
@@ -0,0 +1,68 @@
+// Generated from Expr.g4 by ANTLR 4.13.2
+// ignore_for_file: unused_import, unused_local_variable, prefer_single_quotes
+import 'package:antlr4/antlr4.dart';
+
+import 'ExprParser.dart';
+import 'ExprListener.dart';
+
+
+/// This class provides an empty implementation of [ExprListener],
+/// which can be extended to create a listener which only needs to handle
+/// a subset of the available methods.
+class ExprBaseListener implements ExprListener {
+ /// The default implementation does nothing.
+ @override
+ void enterExpression(ExpressionContext ctx) {}
+
+ /// The default implementation does nothing.
+ @override
+ void exitExpression(ExpressionContext ctx) {}
+
+ /// The default implementation does nothing.
+ @override
+ void enterAssignment(AssignmentContext ctx) {}
+
+ /// The default implementation does nothing.
+ @override
+ void exitAssignment(AssignmentContext ctx) {}
+
+ /// The default implementation does nothing.
+ @override
+ void enterSimpleExpression(SimpleExpressionContext ctx) {}
+
+ /// The default implementation does nothing.
+ @override
+ void exitSimpleExpression(SimpleExpressionContext ctx) {}
+
+ /// The default implementation does nothing.
+ @override
+ void enterVariableRef(VariableRefContext ctx) {}
+
+ /// The default implementation does nothing.
+ @override
+ void exitVariableRef(VariableRefContext ctx) {}
+
+ /// The default implementation does nothing.
+ @override
+ void enterFunctionRef(FunctionRefContext ctx) {}
+
+ /// The default implementation does nothing.
+ @override
+ void exitFunctionRef(FunctionRefContext ctx) {}
+
+ /// The default implementation does nothing.
+ @override
+ void enterEveryRule(ParserRuleContext ctx) {}
+
+ /// The default implementation does nothing.
+ @override
+ void exitEveryRule(ParserRuleContext ctx) {}
+
+ /// The default implementation does nothing.
+ @override
+ void visitTerminal(TerminalNode node) {}
+
+ /// The default implementation does nothing.
+ @override
+ void visitErrorNode(ErrorNode node) {}
+}
diff --git a/ports/dart/example/gen/ExprLexer.dart b/ports/dart/example/gen/ExprLexer.dart
new file mode 100644
index 0000000..4c087bf
--- /dev/null
+++ b/ports/dart/example/gen/ExprLexer.dart
@@ -0,0 +1,85 @@
+// Generated from Expr.g4 by ANTLR 4.13.2
+// ignore_for_file: unused_import, unused_local_variable, prefer_single_quotes
+import 'package:antlr4/antlr4.dart';
+
+
+class ExprLexer extends Lexer {
+ static final checkVersion = () => RuntimeMetaData.checkVersion('4.13.2', RuntimeMetaData.VERSION);
+
+ static final List _decisionToDFA = List.generate(
+ _ATN.numberOfDecisions, (i) => DFA(_ATN.getDecisionState(i), i));
+ static final PredictionContextCache _sharedContextCache = PredictionContextCache();
+ static const int
+ TOKEN_VAR = 1, TOKEN_LET = 2, TOKEN_PLUS = 3, TOKEN_MINUS = 4, TOKEN_MULTIPLY = 5,
+ TOKEN_DIVIDE = 6, TOKEN_EQUAL = 7, TOKEN_OPEN_PAR = 8, TOKEN_CLOSE_PAR = 9,
+ TOKEN_ID = 10, TOKEN_WS = 11;
+ @override
+ final List channelNames = [
+ 'DEFAULT_TOKEN_CHANNEL', 'HIDDEN'
+ ];
+
+ @override
+ final List modeNames = [
+ 'DEFAULT_MODE'
+ ];
+
+ @override
+ final List ruleNames = [
+ 'VAR', 'LET', 'PLUS', 'MINUS', 'MULTIPLY', 'DIVIDE', 'EQUAL', 'OPEN_PAR',
+ 'CLOSE_PAR', 'ID', 'WS'
+ ];
+
+ static final List _LITERAL_NAMES = [
+ null, null, null, "'+'", "'-'", "'*'", "'/'", "'='", "'('", "')'"
+ ];
+ static final List _SYMBOLIC_NAMES = [
+ null, "VAR", "LET", "PLUS", "MINUS", "MULTIPLY", "DIVIDE", "EQUAL",
+ "OPEN_PAR", "CLOSE_PAR", "ID", "WS"
+ ];
+ static final Vocabulary VOCABULARY = VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
+
+ @override
+ Vocabulary get vocabulary {
+ return VOCABULARY;
+ }
+
+
+ ExprLexer(CharStream input) : super(input) {
+ interpreter = LexerATNSimulator(_ATN, _decisionToDFA, _sharedContextCache, recog: this);
+ }
+
+ @override
+ List get serializedATN => _serializedATN;
+
+ @override
+ String get grammarFileName => 'Expr.g4';
+
+ @override
+ ATN getATN() { return _ATN; }
+
+ static const List _serializedATN = [
+ 4,0,11,56,6,-1,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,
+ 7,6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,1,0,1,0,1,0,1,0,1,1,1,1,1,1,
+ 1,1,1,2,1,2,1,3,1,3,1,4,1,4,1,5,1,5,1,6,1,6,1,7,1,7,1,8,1,8,1,9,1,
+ 9,5,9,48,8,9,10,9,12,9,51,9,9,1,10,1,10,1,10,1,10,0,0,11,1,1,3,2,5,
+ 3,7,4,9,5,11,6,13,7,15,8,17,9,19,10,21,11,1,0,9,2,0,86,86,118,118,
+ 2,0,65,65,97,97,2,0,82,82,114,114,2,0,76,76,108,108,2,0,69,69,101,
+ 101,2,0,84,84,116,116,2,0,65,90,97,122,4,0,48,57,65,90,95,95,97,122,
+ 3,0,9,10,13,13,32,32,56,0,1,1,0,0,0,0,3,1,0,0,0,0,5,1,0,0,0,0,7,1,
+ 0,0,0,0,9,1,0,0,0,0,11,1,0,0,0,0,13,1,0,0,0,0,15,1,0,0,0,0,17,1,0,
+ 0,0,0,19,1,0,0,0,0,21,1,0,0,0,1,23,1,0,0,0,3,27,1,0,0,0,5,31,1,0,0,
+ 0,7,33,1,0,0,0,9,35,1,0,0,0,11,37,1,0,0,0,13,39,1,0,0,0,15,41,1,0,
+ 0,0,17,43,1,0,0,0,19,45,1,0,0,0,21,52,1,0,0,0,23,24,7,0,0,0,24,25,
+ 7,1,0,0,25,26,7,2,0,0,26,2,1,0,0,0,27,28,7,3,0,0,28,29,7,4,0,0,29,
+ 30,7,5,0,0,30,4,1,0,0,0,31,32,5,43,0,0,32,6,1,0,0,0,33,34,5,45,0,0,
+ 34,8,1,0,0,0,35,36,5,42,0,0,36,10,1,0,0,0,37,38,5,47,0,0,38,12,1,0,
+ 0,0,39,40,5,61,0,0,40,14,1,0,0,0,41,42,5,40,0,0,42,16,1,0,0,0,43,44,
+ 5,41,0,0,44,18,1,0,0,0,45,49,7,6,0,0,46,48,7,7,0,0,47,46,1,0,0,0,48,
+ 51,1,0,0,0,49,47,1,0,0,0,49,50,1,0,0,0,50,20,1,0,0,0,51,49,1,0,0,0,
+ 52,53,7,8,0,0,53,54,1,0,0,0,54,55,6,10,0,0,55,22,1,0,0,0,2,0,49,1,
+ 0,1,0
+ ];
+
+ static final ATN _ATN =
+ ATNDeserializer().deserialize(_serializedATN);
+}
\ No newline at end of file
diff --git a/ports/dart/example/gen/ExprLexer.interp b/ports/dart/example/gen/ExprLexer.interp
new file mode 100644
index 0000000..5f3d2e5
--- /dev/null
+++ b/ports/dart/example/gen/ExprLexer.interp
@@ -0,0 +1,50 @@
+token literal names:
+null
+null
+null
+'+'
+'-'
+'*'
+'/'
+'='
+'('
+')'
+null
+null
+
+token symbolic names:
+null
+VAR
+LET
+PLUS
+MINUS
+MULTIPLY
+DIVIDE
+EQUAL
+OPEN_PAR
+CLOSE_PAR
+ID
+WS
+
+rule names:
+VAR
+LET
+PLUS
+MINUS
+MULTIPLY
+DIVIDE
+EQUAL
+OPEN_PAR
+CLOSE_PAR
+ID
+WS
+
+channel names:
+DEFAULT_TOKEN_CHANNEL
+HIDDEN
+
+mode names:
+DEFAULT_MODE
+
+atn:
+[4, 0, 11, 56, 6, -1, 2, 0, 7, 0, 2, 1, 7, 1, 2, 2, 7, 2, 2, 3, 7, 3, 2, 4, 7, 4, 2, 5, 7, 5, 2, 6, 7, 6, 2, 7, 7, 7, 2, 8, 7, 8, 2, 9, 7, 9, 2, 10, 7, 10, 1, 0, 1, 0, 1, 0, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 1, 2, 1, 3, 1, 3, 1, 4, 1, 4, 1, 5, 1, 5, 1, 6, 1, 6, 1, 7, 1, 7, 1, 8, 1, 8, 1, 9, 1, 9, 5, 9, 48, 8, 9, 10, 9, 12, 9, 51, 9, 9, 1, 10, 1, 10, 1, 10, 1, 10, 0, 0, 11, 1, 1, 3, 2, 5, 3, 7, 4, 9, 5, 11, 6, 13, 7, 15, 8, 17, 9, 19, 10, 21, 11, 1, 0, 9, 2, 0, 86, 86, 118, 118, 2, 0, 65, 65, 97, 97, 2, 0, 82, 82, 114, 114, 2, 0, 76, 76, 108, 108, 2, 0, 69, 69, 101, 101, 2, 0, 84, 84, 116, 116, 2, 0, 65, 90, 97, 122, 4, 0, 48, 57, 65, 90, 95, 95, 97, 122, 3, 0, 9, 10, 13, 13, 32, 32, 56, 0, 1, 1, 0, 0, 0, 0, 3, 1, 0, 0, 0, 0, 5, 1, 0, 0, 0, 0, 7, 1, 0, 0, 0, 0, 9, 1, 0, 0, 0, 0, 11, 1, 0, 0, 0, 0, 13, 1, 0, 0, 0, 0, 15, 1, 0, 0, 0, 0, 17, 1, 0, 0, 0, 0, 19, 1, 0, 0, 0, 0, 21, 1, 0, 0, 0, 1, 23, 1, 0, 0, 0, 3, 27, 1, 0, 0, 0, 5, 31, 1, 0, 0, 0, 7, 33, 1, 0, 0, 0, 9, 35, 1, 0, 0, 0, 11, 37, 1, 0, 0, 0, 13, 39, 1, 0, 0, 0, 15, 41, 1, 0, 0, 0, 17, 43, 1, 0, 0, 0, 19, 45, 1, 0, 0, 0, 21, 52, 1, 0, 0, 0, 23, 24, 7, 0, 0, 0, 24, 25, 7, 1, 0, 0, 25, 26, 7, 2, 0, 0, 26, 2, 1, 0, 0, 0, 27, 28, 7, 3, 0, 0, 28, 29, 7, 4, 0, 0, 29, 30, 7, 5, 0, 0, 30, 4, 1, 0, 0, 0, 31, 32, 5, 43, 0, 0, 32, 6, 1, 0, 0, 0, 33, 34, 5, 45, 0, 0, 34, 8, 1, 0, 0, 0, 35, 36, 5, 42, 0, 0, 36, 10, 1, 0, 0, 0, 37, 38, 5, 47, 0, 0, 38, 12, 1, 0, 0, 0, 39, 40, 5, 61, 0, 0, 40, 14, 1, 0, 0, 0, 41, 42, 5, 40, 0, 0, 42, 16, 1, 0, 0, 0, 43, 44, 5, 41, 0, 0, 44, 18, 1, 0, 0, 0, 45, 49, 7, 6, 0, 0, 46, 48, 7, 7, 0, 0, 47, 46, 1, 0, 0, 0, 48, 51, 1, 0, 0, 0, 49, 47, 1, 0, 0, 0, 49, 50, 1, 0, 0, 0, 50, 20, 1, 0, 0, 0, 51, 49, 1, 0, 0, 0, 52, 53, 7, 8, 0, 0, 53, 54, 1, 0, 0, 0, 54, 55, 6, 10, 0, 0, 55, 22, 1, 0, 0, 0, 2, 0, 49, 1, 0, 1, 0]
\ No newline at end of file
diff --git a/ports/dart/example/gen/ExprLexer.tokens b/ports/dart/example/gen/ExprLexer.tokens
new file mode 100644
index 0000000..be523c1
--- /dev/null
+++ b/ports/dart/example/gen/ExprLexer.tokens
@@ -0,0 +1,18 @@
+VAR=1
+LET=2
+PLUS=3
+MINUS=4
+MULTIPLY=5
+DIVIDE=6
+EQUAL=7
+OPEN_PAR=8
+CLOSE_PAR=9
+ID=10
+WS=11
+'+'=3
+'-'=4
+'*'=5
+'/'=6
+'='=7
+'('=8
+')'=9
diff --git a/ports/dart/example/gen/ExprListener.dart b/ports/dart/example/gen/ExprListener.dart
new file mode 100644
index 0000000..ddb83d4
--- /dev/null
+++ b/ports/dart/example/gen/ExprListener.dart
@@ -0,0 +1,44 @@
+// Generated from Expr.g4 by ANTLR 4.13.2
+// ignore_for_file: unused_import, unused_local_variable, prefer_single_quotes
+import 'package:antlr4/antlr4.dart';
+
+import 'ExprParser.dart';
+
+/// This abstract class defines a complete listener for a parse tree produced by
+/// [ExprParser].
+abstract class ExprListener extends ParseTreeListener {
+ /// Enter a parse tree produced by [ExprParser.expression].
+ /// [ctx] the parse tree
+ void enterExpression(ExpressionContext ctx);
+ /// Exit a parse tree produced by [ExprParser.expression].
+ /// [ctx] the parse tree
+ void exitExpression(ExpressionContext ctx);
+
+ /// Enter a parse tree produced by [ExprParser.assignment].
+ /// [ctx] the parse tree
+ void enterAssignment(AssignmentContext ctx);
+ /// Exit a parse tree produced by [ExprParser.assignment].
+ /// [ctx] the parse tree
+ void exitAssignment(AssignmentContext ctx);
+
+ /// Enter a parse tree produced by [ExprParser.simpleExpression].
+ /// [ctx] the parse tree
+ void enterSimpleExpression(SimpleExpressionContext ctx);
+ /// Exit a parse tree produced by [ExprParser.simpleExpression].
+ /// [ctx] the parse tree
+ void exitSimpleExpression(SimpleExpressionContext ctx);
+
+ /// Enter a parse tree produced by [ExprParser.variableRef].
+ /// [ctx] the parse tree
+ void enterVariableRef(VariableRefContext ctx);
+ /// Exit a parse tree produced by [ExprParser.variableRef].
+ /// [ctx] the parse tree
+ void exitVariableRef(VariableRefContext ctx);
+
+ /// Enter a parse tree produced by [ExprParser.functionRef].
+ /// [ctx] the parse tree
+ void enterFunctionRef(FunctionRefContext ctx);
+ /// Exit a parse tree produced by [ExprParser.functionRef].
+ /// [ctx] the parse tree
+ void exitFunctionRef(FunctionRefContext ctx);
+}
\ No newline at end of file
diff --git a/ports/dart/example/gen/ExprParser.dart b/ports/dart/example/gen/ExprParser.dart
new file mode 100644
index 0000000..3d30e05
--- /dev/null
+++ b/ports/dart/example/gen/ExprParser.dart
@@ -0,0 +1,366 @@
+// Generated from Expr.g4 by ANTLR 4.13.2
+// ignore_for_file: unused_import, unused_local_variable, prefer_single_quotes
+import 'package:antlr4/antlr4.dart';
+
+import 'ExprListener.dart';
+import 'ExprBaseListener.dart';
+const int RULE_expression = 0, RULE_assignment = 1, RULE_simpleExpression = 2,
+ RULE_variableRef = 3, RULE_functionRef = 4;
+class ExprParser extends Parser {
+ static final checkVersion = () => RuntimeMetaData.checkVersion('4.13.2', RuntimeMetaData.VERSION);
+ static const int TOKEN_EOF = IntStream.EOF;
+
+ static final List _decisionToDFA = List.generate(
+ _ATN.numberOfDecisions, (i) => DFA(_ATN.getDecisionState(i), i));
+ static final PredictionContextCache _sharedContextCache = PredictionContextCache();
+ static const int TOKEN_VAR = 1, TOKEN_LET = 2, TOKEN_PLUS = 3, TOKEN_MINUS = 4,
+ TOKEN_MULTIPLY = 5, TOKEN_DIVIDE = 6, TOKEN_EQUAL = 7,
+ TOKEN_OPEN_PAR = 8, TOKEN_CLOSE_PAR = 9, TOKEN_ID = 10,
+ TOKEN_WS = 11;
+
+ @override
+ final List ruleNames = [
+ 'expression', 'assignment', 'simpleExpression', 'variableRef', 'functionRef'
+ ];
+
+ static final List _LITERAL_NAMES = [
+ null, null, null, "'+'", "'-'", "'*'", "'/'", "'='", "'('", "')'"
+ ];
+ static final List _SYMBOLIC_NAMES = [
+ null, "VAR", "LET", "PLUS", "MINUS", "MULTIPLY", "DIVIDE", "EQUAL",
+ "OPEN_PAR", "CLOSE_PAR", "ID", "WS"
+ ];
+ static final Vocabulary VOCABULARY = VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
+
+ @override
+ Vocabulary get vocabulary {
+ return VOCABULARY;
+ }
+
+ @override
+ String get grammarFileName => 'Expr.g4';
+
+ @override
+ List get serializedATN => _serializedATN;
+
+ @override
+ ATN getATN() {
+ return _ATN;
+ }
+
+ ExprParser(TokenStream input) : super(input) {
+ interpreter = ParserATNSimulator(this, _ATN, _decisionToDFA, _sharedContextCache);
+ }
+
+ ExpressionContext expression() {
+ dynamic _localctx = ExpressionContext(context, state);
+ enterRule(_localctx, 0, RULE_expression);
+ try {
+ state = 12;
+ errorHandler.sync(this);
+ switch (tokenStream.LA(1)!) {
+ case TOKEN_VAR:
+ case TOKEN_LET:
+ enterOuterAlt(_localctx, 1);
+ state = 10;
+ assignment();
+ break;
+ case TOKEN_ID:
+ enterOuterAlt(_localctx, 2);
+ state = 11;
+ simpleExpression(0);
+ break;
+ default:
+ throw NoViableAltException(this);
+ }
+ } on RecognitionException catch (re) {
+ _localctx.exception = re;
+ errorHandler.reportError(this, re);
+ errorHandler.recover(this, re);
+ } finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
+ AssignmentContext assignment() {
+ dynamic _localctx = AssignmentContext(context, state);
+ enterRule(_localctx, 2, RULE_assignment);
+ int _la;
+ try {
+ enterOuterAlt(_localctx, 1);
+ state = 14;
+ _la = tokenStream.LA(1)!;
+ if (!(_la == TOKEN_VAR || _la == TOKEN_LET)) {
+ errorHandler.recoverInline(this);
+ } else {
+ if ( tokenStream.LA(1)! == IntStream.EOF ) matchedEOF = true;
+ errorHandler.reportMatch(this);
+ consume();
+ }
+ state = 15;
+ match(TOKEN_ID);
+ state = 16;
+ match(TOKEN_EQUAL);
+ state = 17;
+ simpleExpression(0);
+ } on RecognitionException catch (re) {
+ _localctx.exception = re;
+ errorHandler.reportError(this, re);
+ errorHandler.recover(this, re);
+ } finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
+ SimpleExpressionContext simpleExpression([int _p = 0]) {
+ final _parentctx = context;
+ final _parentState = state;
+ dynamic _localctx = SimpleExpressionContext(context, _parentState);
+ var _prevctx = _localctx;
+ var _startState = 4;
+ enterRecursionRule(_localctx, 4, RULE_simpleExpression, _p);
+ int _la;
+ try {
+ int _alt;
+ enterOuterAlt(_localctx, 1);
+ state = 22;
+ errorHandler.sync(this);
+ switch (interpreter!.adaptivePredict(tokenStream, 1, context)) {
+ case 1:
+ state = 20;
+ variableRef();
+ break;
+ case 2:
+ state = 21;
+ functionRef();
+ break;
+ }
+ context!.stop = tokenStream.LT(-1);
+ state = 32;
+ errorHandler.sync(this);
+ _alt = interpreter!.adaptivePredict(tokenStream, 3, context);
+ while (_alt != 2 && _alt != ATN.INVALID_ALT_NUMBER) {
+ if (_alt == 1) {
+ if (parseListeners != null) triggerExitRuleEvent();
+ _prevctx = _localctx;
+ state = 30;
+ errorHandler.sync(this);
+ switch (interpreter!.adaptivePredict(tokenStream, 2, context)) {
+ case 1:
+ _localctx = SimpleExpressionContext(_parentctx, _parentState);
+ pushNewRecursionContext(_localctx, _startState, RULE_simpleExpression);
+ state = 24;
+ if (!(precpred(context, 4))) {
+ throw FailedPredicateException(this, "precpred(context, 4)");
+ }
+ state = 25;
+ _la = tokenStream.LA(1)!;
+ if (!(_la == TOKEN_PLUS || _la == TOKEN_MINUS)) {
+ errorHandler.recoverInline(this);
+ } else {
+ if ( tokenStream.LA(1)! == IntStream.EOF ) matchedEOF = true;
+ errorHandler.reportMatch(this);
+ consume();
+ }
+ state = 26;
+ simpleExpression(5);
+ break;
+ case 2:
+ _localctx = SimpleExpressionContext(_parentctx, _parentState);
+ pushNewRecursionContext(_localctx, _startState, RULE_simpleExpression);
+ state = 27;
+ if (!(precpred(context, 3))) {
+ throw FailedPredicateException(this, "precpred(context, 3)");
+ }
+ state = 28;
+ _la = tokenStream.LA(1)!;
+ if (!(_la == TOKEN_MULTIPLY || _la == TOKEN_DIVIDE)) {
+ errorHandler.recoverInline(this);
+ } else {
+ if ( tokenStream.LA(1)! == IntStream.EOF ) matchedEOF = true;
+ errorHandler.reportMatch(this);
+ consume();
+ }
+ state = 29;
+ simpleExpression(4);
+ break;
+ }
+ }
+ state = 34;
+ errorHandler.sync(this);
+ _alt = interpreter!.adaptivePredict(tokenStream, 3, context);
+ }
+ } on RecognitionException catch (re) {
+ _localctx.exception = re;
+ errorHandler.reportError(this, re);
+ errorHandler.recover(this, re);
+ } finally {
+ unrollRecursionContexts(_parentctx);
+ }
+ return _localctx;
+ }
+
+ VariableRefContext variableRef() {
+ dynamic _localctx = VariableRefContext(context, state);
+ enterRule(_localctx, 6, RULE_variableRef);
+ try {
+ enterOuterAlt(_localctx, 1);
+ state = 35;
+ match(TOKEN_ID);
+ } on RecognitionException catch (re) {
+ _localctx.exception = re;
+ errorHandler.reportError(this, re);
+ errorHandler.recover(this, re);
+ } finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
+ FunctionRefContext functionRef() {
+ dynamic _localctx = FunctionRefContext(context, state);
+ enterRule(_localctx, 8, RULE_functionRef);
+ try {
+ enterOuterAlt(_localctx, 1);
+ state = 37;
+ match(TOKEN_ID);
+ state = 38;
+ match(TOKEN_OPEN_PAR);
+ state = 39;
+ match(TOKEN_CLOSE_PAR);
+ } on RecognitionException catch (re) {
+ _localctx.exception = re;
+ errorHandler.reportError(this, re);
+ errorHandler.recover(this, re);
+ } finally {
+ exitRule();
+ }
+ return _localctx;
+ }
+
+ @override
+ bool sempred(RuleContext? _localctx, int ruleIndex, int predIndex) {
+ switch (ruleIndex) {
+ case 2:
+ return _simpleExpression_sempred(_localctx as SimpleExpressionContext?, predIndex);
+ }
+ return true;
+ }
+ bool _simpleExpression_sempred(dynamic _localctx, int predIndex) {
+ switch (predIndex) {
+ case 0: return precpred(context, 4);
+ case 1: return precpred(context, 3);
+ }
+ return true;
+ }
+
+ static const List _serializedATN = [
+ 4,1,11,42,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,1,0,1,0,3,0,13,8,
+ 0,1,1,1,1,1,1,1,1,1,1,1,2,1,2,1,2,3,2,23,8,2,1,2,1,2,1,2,1,2,1,2,1,
+ 2,5,2,31,8,2,10,2,12,2,34,9,2,1,3,1,3,1,4,1,4,1,4,1,4,1,4,0,1,4,5,
+ 0,2,4,6,8,0,3,1,0,1,2,1,0,3,4,1,0,5,6,40,0,12,1,0,0,0,2,14,1,0,0,0,
+ 4,22,1,0,0,0,6,35,1,0,0,0,8,37,1,0,0,0,10,13,3,2,1,0,11,13,3,4,2,0,
+ 12,10,1,0,0,0,12,11,1,0,0,0,13,1,1,0,0,0,14,15,7,0,0,0,15,16,5,10,
+ 0,0,16,17,5,7,0,0,17,18,3,4,2,0,18,3,1,0,0,0,19,20,6,2,-1,0,20,23,
+ 3,6,3,0,21,23,3,8,4,0,22,19,1,0,0,0,22,21,1,0,0,0,23,32,1,0,0,0,24,
+ 25,10,4,0,0,25,26,7,1,0,0,26,31,3,4,2,5,27,28,10,3,0,0,28,29,7,2,0,
+ 0,29,31,3,4,2,4,30,24,1,0,0,0,30,27,1,0,0,0,31,34,1,0,0,0,32,30,1,
+ 0,0,0,32,33,1,0,0,0,33,5,1,0,0,0,34,32,1,0,0,0,35,36,5,10,0,0,36,7,
+ 1,0,0,0,37,38,5,10,0,0,38,39,5,8,0,0,39,40,5,9,0,0,40,9,1,0,0,0,4,
+ 12,22,30,32
+ ];
+
+ static final ATN _ATN =
+ ATNDeserializer().deserialize(_serializedATN);
+}
+class ExpressionContext extends ParserRuleContext {
+ AssignmentContext? assignment() => getRuleContext(0);
+ SimpleExpressionContext? simpleExpression() => getRuleContext(0);
+ ExpressionContext([ParserRuleContext? parent, int? invokingState]) : super(parent, invokingState);
+ @override
+ int get ruleIndex => RULE_expression;
+ @override
+ void enterRule(ParseTreeListener listener) {
+ if (listener is ExprListener) listener.enterExpression(this);
+ }
+ @override
+ void exitRule(ParseTreeListener listener) {
+ if (listener is ExprListener) listener.exitExpression(this);
+ }
+}
+
+class AssignmentContext extends ParserRuleContext {
+ TerminalNode? ID() => getToken(ExprParser.TOKEN_ID, 0);
+ TerminalNode? EQUAL() => getToken(ExprParser.TOKEN_EQUAL, 0);
+ SimpleExpressionContext? simpleExpression() => getRuleContext(0);
+ TerminalNode? VAR() => getToken(ExprParser.TOKEN_VAR, 0);
+ TerminalNode? LET() => getToken(ExprParser.TOKEN_LET, 0);
+ AssignmentContext([ParserRuleContext? parent, int? invokingState]) : super(parent, invokingState);
+ @override
+ int get ruleIndex => RULE_assignment;
+ @override
+ void enterRule(ParseTreeListener listener) {
+ if (listener is ExprListener) listener.enterAssignment(this);
+ }
+ @override
+ void exitRule(ParseTreeListener listener) {
+ if (listener is ExprListener) listener.exitAssignment(this);
+ }
+}
+
+class SimpleExpressionContext extends ParserRuleContext {
+ VariableRefContext? variableRef() => getRuleContext(0);
+ FunctionRefContext? functionRef() => getRuleContext(0);
+ List simpleExpressions() => getRuleContexts();
+ SimpleExpressionContext? simpleExpression(int i) => getRuleContext(i);
+ TerminalNode? PLUS() => getToken(ExprParser.TOKEN_PLUS, 0);
+ TerminalNode? MINUS() => getToken(ExprParser.TOKEN_MINUS, 0);
+ TerminalNode? MULTIPLY() => getToken(ExprParser.TOKEN_MULTIPLY, 0);
+ TerminalNode? DIVIDE() => getToken(ExprParser.TOKEN_DIVIDE, 0);
+ SimpleExpressionContext([ParserRuleContext? parent, int? invokingState]) : super(parent, invokingState);
+ @override
+ int get ruleIndex => RULE_simpleExpression;
+ @override
+ void enterRule(ParseTreeListener listener) {
+ if (listener is ExprListener) listener.enterSimpleExpression(this);
+ }
+ @override
+ void exitRule(ParseTreeListener listener) {
+ if (listener is ExprListener) listener.exitSimpleExpression(this);
+ }
+}
+
+class VariableRefContext extends ParserRuleContext {
+ TerminalNode? ID() => getToken(ExprParser.TOKEN_ID, 0);
+ VariableRefContext([ParserRuleContext? parent, int? invokingState]) : super(parent, invokingState);
+ @override
+ int get ruleIndex => RULE_variableRef;
+ @override
+ void enterRule(ParseTreeListener listener) {
+ if (listener is ExprListener) listener.enterVariableRef(this);
+ }
+ @override
+ void exitRule(ParseTreeListener listener) {
+ if (listener is ExprListener) listener.exitVariableRef(this);
+ }
+}
+
+class FunctionRefContext extends ParserRuleContext {
+ TerminalNode? ID() => getToken(ExprParser.TOKEN_ID, 0);
+ TerminalNode? OPEN_PAR() => getToken(ExprParser.TOKEN_OPEN_PAR, 0);
+ TerminalNode? CLOSE_PAR() => getToken(ExprParser.TOKEN_CLOSE_PAR, 0);
+ FunctionRefContext([ParserRuleContext? parent, int? invokingState]) : super(parent, invokingState);
+ @override
+ int get ruleIndex => RULE_functionRef;
+ @override
+ void enterRule(ParseTreeListener listener) {
+ if (listener is ExprListener) listener.enterFunctionRef(this);
+ }
+ @override
+ void exitRule(ParseTreeListener listener) {
+ if (listener is ExprListener) listener.exitFunctionRef(this);
+ }
+}
+
diff --git a/ports/dart/grammar/Expr.g4 b/ports/dart/grammar/Expr.g4
new file mode 100644
index 0000000..7be09ef
--- /dev/null
+++ b/ports/dart/grammar/Expr.g4
@@ -0,0 +1,34 @@
+grammar Expr;
+expression: assignment | simpleExpression;
+
+assignment
+ : (VAR | LET) ID EQUAL simpleExpression
+;
+
+simpleExpression
+ : simpleExpression (PLUS | MINUS) simpleExpression
+ | simpleExpression (MULTIPLY | DIVIDE) simpleExpression
+ | variableRef
+ | functionRef
+;
+
+variableRef
+ : ID
+;
+
+functionRef
+ : ID OPEN_PAR CLOSE_PAR
+;
+
+VAR: [vV] [aA] [rR];
+LET: [lL] [eE] [tT];
+
+PLUS: '+';
+MINUS: '-';
+MULTIPLY: '*';
+DIVIDE: '/';
+EQUAL: '=';
+OPEN_PAR: '(';
+CLOSE_PAR: ')';
+ID: [a-zA-Z] [a-zA-Z0-9_]*;
+WS: [ \n\r\t] -> channel(HIDDEN);
diff --git a/ports/dart/grammar/compiler.bat b/ports/dart/grammar/compiler.bat
new file mode 100644
index 0000000..133d1f3
--- /dev/null
+++ b/ports/dart/grammar/compiler.bat
@@ -0,0 +1 @@
+antlr4 -v 4.13.2 -Dlanguage=Dart Expr.g4 -o ../example/gen
\ No newline at end of file
diff --git a/ports/dart/lib/antlr4_c3.dart b/ports/dart/lib/antlr4_c3.dart
new file mode 100644
index 0000000..195d3c6
--- /dev/null
+++ b/ports/dart/lib/antlr4_c3.dart
@@ -0,0 +1,18 @@
+// ignore_for_file: depend_on_referenced_packages
+
+/*
+ * Copyright © 2024 VMware, Inc & dudu.ltd. All Rights Reserved.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * See LICENSE file for more info.
+ */
+
+library;
+
+import 'package:antlr4/antlr4.dart';
+import 'package:logging/logging.dart';
+
+part 'src/antlr4_c3_base.dart';
+
+// TODO: Export any libraries intended for clients of this package.
diff --git a/ports/dart/lib/src/antlr4_c3_base.dart b/ports/dart/lib/src/antlr4_c3_base.dart
new file mode 100644
index 0000000..8bc9994
--- /dev/null
+++ b/ports/dart/lib/src/antlr4_c3_base.dart
@@ -0,0 +1,587 @@
+// ignore_for_file: unused_local_variable
+
+/*
+ * Copyright © 2024 VMware, Inc & dudu.ltd. All Rights Reserved.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * See LICENSE file for more info.
+ */
+
+// Translated from Java to Dart
+// https://github.com/mike-lischke/antlr4-c3
+
+part of '../antlr4_c3.dart';
+
+class CodeCompletionCore {
+ static final logger = Logger('CodeCompletionCore');
+
+ bool showResult = true;
+ bool showDebugOutput = false;
+ bool debugOutputWithTransitions = true;
+ bool showRuleStack = true;
+
+ Set ignoredTokens = {};
+ Set preferredRules = {};
+
+ Parser parser;
+ late ATN atn;
+ late Vocabulary vocabulary;
+ late List ruleNames;
+ List tokens = [];
+
+ int tokenStartIndex = 0;
+ int statesProcessed = 0;
+
+ final Map>> shortcutMap = {};
+ final CandidatesCollection candidates = CandidatesCollection();
+
+ static final Map> followSetsByATN = {};
+
+ CodeCompletionCore(
+ this.parser, [
+ Set? preferredRules,
+ Set? ignoredTokens,
+ ]) {
+ atn = parser.getATN();
+ vocabulary = parser.vocabulary;
+ ruleNames = parser.ruleNames;
+ if (preferredRules != null) {
+ this.preferredRules = preferredRules;
+ }
+ if (ignoredTokens != null) {
+ this.ignoredTokens = ignoredTokens;
+ }
+ }
+
+ Set getPreferredRules() {
+ return Set.unmodifiable(preferredRules);
+ }
+
+ void setPreferredRules(Set preferredRules) {
+ this.preferredRules = Set.from(preferredRules);
+ }
+
+ CandidatesCollection collectCandidates(
+ int caretTokenIndex, ParserRuleContext? context) {
+ shortcutMap.clear();
+ candidates.rules.clear();
+ candidates.tokens.clear();
+ statesProcessed = 0;
+
+ tokenStartIndex = context?.start != null ? context!.start!.tokenIndex : 0;
+ TokenStream tokenStream = parser.inputStream;
+
+ int currentIndex = tokenStream.index;
+ tokenStream.seek(tokenStartIndex);
+ tokens = [];
+ int offset = 1;
+ while (true) {
+ Token token = tokenStream.LT(offset++)!;
+ tokens.add(token);
+ if (token.tokenIndex >= caretTokenIndex || token.type == Token.EOF) {
+ break;
+ }
+ }
+ tokenStream.seek(currentIndex);
+
+ List callStack = [];
+ int startRule = context != null ? context.ruleIndex : 0;
+ processRule(atn.ruleToStartState[startRule], 0, callStack, "\n");
+
+ tokenStream.seek(currentIndex);
+
+ for (int ruleId in preferredRules) {
+ final shortcut = shortcutMap[ruleId];
+ if (shortcut == null || shortcut.isEmpty) {
+ continue;
+ }
+ final startToken = shortcut.keys.reduce((a, b) => a > b ? a : b);
+ final endSet = shortcut[startToken]!;
+ final endToken = endSet.isEmpty
+ ? tokens.length - 1
+ : endSet.reduce((a, b) => a > b ? a : b);
+ final startOffset = tokens[startToken].startIndex;
+ final endOffset = tokens[endToken].type == Token.EOF
+ ? tokens[endToken].startIndex
+ : tokens[endToken - 1].stopIndex + 1;
+
+ final ruleStartStop = [startOffset, endOffset];
+ candidates.rulePositions[ruleId] = ruleStartStop;
+ }
+
+ if (showResult && logger.isLoggable(Level.FINE)) {
+ StringBuffer logMessage = StringBuffer();
+
+ logMessage.write("States processed: $statesProcessed\n");
+
+ logMessage.write("Collected rules:\n");
+
+ candidates.rules.forEach((key, value) {
+ logMessage.write(" $key, path: ");
+ for (int token in value) {
+ logMessage.write("${ruleNames[token]} ");
+ }
+ logMessage.write("\n");
+ });
+
+ logMessage.write("Collected Tokens:\n");
+ candidates.tokens.forEach((key, value) {
+ logMessage.write(" ${vocabulary.getDisplayName(key)}");
+ for (int following in value) {
+ logMessage.write(" ${vocabulary.getDisplayName(following)}");
+ }
+ logMessage.write("\n");
+ });
+ logger.fine(logMessage.toString());
+ }
+
+ return candidates;
+ }
+
+ bool checkPredicate(PredicateTransition transition) {
+ return transition.predicate.eval(parser, ParserRuleContext.EMPTY);
+ }
+
+ bool translateToRuleIndex(List ruleStack) {
+ if (preferredRules.isEmpty) return false;
+
+ for (int i = 0; i < ruleStack.length; ++i) {
+ if (preferredRules.contains(ruleStack[i])) {
+ List path = List.from(ruleStack.sublist(0, i));
+ bool addNew = true;
+ candidates.rules.forEach((key, value) {
+ if (key == ruleStack[i] &&
+ value.length == path.length &&
+ value == path) {
+ addNew = false;
+ }
+ });
+
+ if (addNew) {
+ candidates.rules[ruleStack[i]] = path;
+ if (showDebugOutput && logger.isLoggable(Level.FINE)) {
+ logger.fine("=====> collected: ${ruleNames[i]}");
+ }
+ }
+ return true;
+ }
+ }
+
+ return false;
+ }
+
+ List getFollowingTokens(Transition initialTransition) {
+ List result = [];
+ List seen = [];
+ List pipeline = [initialTransition.target];
+
+ while (pipeline.isNotEmpty) {
+ ATNState state = pipeline.removeLast();
+
+ for (Transition transition in state.transitions) {
+ if (transition.type == TransitionType.ATOM) {
+ if (!transition.isEpsilon) {
+ List list = transition.label!.toList();
+ if (list.length == 1 && !ignoredTokens.contains(list[0])) {
+ result.add(list[0]);
+ pipeline.add(transition.target);
+ }
+ } else {
+ pipeline.add(transition.target);
+ }
+ }
+ }
+ }
+
+ return result;
+ }
+
+ List determineFollowSets(ATNState start, ATNState stop) {
+ List result = [];
+ Set seen = {};
+ List ruleStack = [];
+
+ collectFollowSets(start, stop, result, seen, ruleStack);
+
+ return result;
+ }
+
+ void collectFollowSets(
+ ATNState s,
+ ATNState stopState,
+ List followSets,
+ Set seen,
+ List ruleStack) {
+ if (seen.contains(s)) return;
+
+ seen.add(s);
+
+ if (s == stopState || s.stateType == StateType.RULE_STOP) {
+ FollowSetWithPath set = FollowSetWithPath();
+ set.intervals = IntervalSet.ofOne(Token.EPSILON);
+ set.path = List.from(ruleStack);
+ set.following = [];
+ followSets.add(set);
+ return;
+ }
+
+ for (Transition transition in s.transitions) {
+ if (transition.type == TransitionType.RULE) {
+ RuleTransition ruleTransition = transition as RuleTransition;
+ if (ruleStack.contains(ruleTransition.target.ruleIndex)) {
+ continue;
+ }
+ ruleStack.add(ruleTransition.target.ruleIndex);
+ collectFollowSets(
+ transition.target, stopState, followSets, seen, ruleStack);
+ ruleStack.removeLast();
+ } else if (transition.type == TransitionType.PREDICATE) {
+ if (checkPredicate(transition as PredicateTransition)) {
+ collectFollowSets(
+ transition.target, stopState, followSets, seen, ruleStack);
+ }
+ } else if (transition.isEpsilon) {
+ collectFollowSets(
+ transition.target, stopState, followSets, seen, ruleStack);
+ } else if (transition.type == TransitionType.WILDCARD) {
+ FollowSetWithPath set = FollowSetWithPath();
+ set.intervals =
+ IntervalSet.ofRange(Token.MIN_USER_TOKEN_TYPE, atn.maxTokenType);
+ set.path = List.from(ruleStack);
+ set.following = [];
+ followSets.add(set);
+ } else {
+ IntervalSet? label = transition.label;
+ if (label != null && label.length > 0) {
+ if (transition.type == TransitionType.NOT_SET) {
+ label = label.complement(IntervalSet.ofRange(
+ Token.MIN_USER_TOKEN_TYPE, atn.maxTokenType));
+ }
+ FollowSetWithPath set = FollowSetWithPath();
+ set.intervals = label!;
+ set.path = List.from(ruleStack);
+ set.following = getFollowingTokens(transition);
+ followSets.add(set);
+ }
+ }
+ }
+ }
+
+ Set processRule(ATNState startState, int tokenIndex, List callStack,
+ String indentation) {
+ Map>? positionMap = shortcutMap[startState.ruleIndex];
+ if (positionMap == null) {
+ positionMap = {};
+ shortcutMap[startState.ruleIndex] = positionMap;
+ } else {
+ if (positionMap.containsKey(tokenIndex)) {
+ if (showDebugOutput) {
+ logger.fine("=====> shortcut");
+ }
+ return positionMap[tokenIndex]!;
+ }
+ }
+
+ Set result = {};
+
+ Map? setsPerState =
+ followSetsByATN[parser.runtimeType.toString()];
+ if (setsPerState == null) {
+ setsPerState = {};
+ followSetsByATN[parser.runtimeType.toString()] = setsPerState;
+ }
+
+ FollowSetsHolder? followSets = setsPerState[startState.stateNumber];
+ if (followSets == null) {
+ followSets = FollowSetsHolder();
+ setsPerState[startState.stateNumber] = followSets;
+ RuleStopState stop = atn.ruleToStopState[startState.ruleIndex];
+ followSets.sets = determineFollowSets(startState, stop);
+
+ IntervalSet combined = IntervalSet();
+ for (FollowSetWithPath set in followSets.sets) {
+ combined.addAll(set.intervals);
+ }
+ followSets.combined = combined;
+ }
+
+ callStack.add(startState.ruleIndex);
+ int currentSymbol = tokens[tokenIndex].type;
+
+ if (tokenIndex >= tokens.length - 1) {
+ if (preferredRules.contains(startState.ruleIndex)) {
+ translateToRuleIndex(callStack);
+ } else {
+ for (FollowSetWithPath set in followSets.sets) {
+ List fullPath = List.from(callStack)..addAll(set.path);
+ if (!translateToRuleIndex(fullPath)) {
+ for (int symbol in set.intervals.toList()) {
+ if (!ignoredTokens.contains(symbol)) {
+ if (showDebugOutput && logger.isLoggable(Level.FINE)) {
+ logger.fine(
+ "=====> collected: ${vocabulary.getDisplayName(symbol)}");
+ }
+ if (!candidates.tokens.containsKey(symbol)) {
+ candidates.tokens[symbol] = set.following;
+ } else {
+ if (candidates.tokens[symbol] != set.following) {
+ candidates.tokens[symbol] = [];
+ }
+ }
+ } else {
+ logger.fine("====> collection: Ignoring token: $symbol");
+ }
+ }
+ }
+ }
+ }
+
+ callStack.removeLast();
+ return result;
+ } else {
+ if (!followSets.combined.contains(Token.EPSILON) &&
+ !followSets.combined.contains(currentSymbol)) {
+ callStack.removeLast();
+ return result;
+ }
+ }
+
+ List statePipeline = [PipelineEntry(startState, tokenIndex)];
+
+ while (statePipeline.isNotEmpty) {
+ PipelineEntry currentEntry = statePipeline.removeLast();
+ statesProcessed++;
+
+ currentSymbol = tokens[currentEntry.tokenIndex].type;
+
+ bool atCaret = currentEntry.tokenIndex >= tokens.length - 1;
+ if (logger.isLoggable(Level.FINE)) {
+ printDescription(
+ indentation,
+ currentEntry.state,
+ generateBaseDescription(currentEntry.state),
+ currentEntry.tokenIndex);
+ if (showRuleStack) {
+ printRuleState(callStack);
+ }
+ }
+
+ switch (currentEntry.state.stateType) {
+ case StateType.RULE_START:
+ indentation += " ";
+ break;
+
+ case StateType.RULE_STOP:
+ result.add(currentEntry.tokenIndex);
+ continue;
+
+ default:
+ break;
+ }
+
+ for (Transition transition in currentEntry.state.transitions) {
+ switch (transition.type) {
+ case TransitionType.RULE:
+ Set endStatus = processRule(transition.target,
+ currentEntry.tokenIndex, callStack, indentation);
+ for (int position in endStatus) {
+ statePipeline.add(PipelineEntry(
+ (transition as RuleTransition).followState, position));
+ }
+ break;
+
+ case TransitionType.PREDICATE:
+ if (checkPredicate(transition as PredicateTransition)) {
+ statePipeline.add(
+ PipelineEntry(transition.target, currentEntry.tokenIndex));
+ }
+ break;
+
+ case TransitionType.WILDCARD:
+ if (atCaret) {
+ if (!translateToRuleIndex(callStack)) {
+ for (int token in IntervalSet.ofRange(
+ Token.MIN_USER_TOKEN_TYPE, atn.maxTokenType)
+ .toList()) {
+ if (!ignoredTokens.contains(token)) {
+ candidates.tokens[token] = [];
+ }
+ }
+ }
+ } else {
+ statePipeline.add(PipelineEntry(
+ transition.target, currentEntry.tokenIndex + 1));
+ }
+ break;
+
+ default:
+ if (transition.isEpsilon) {
+ statePipeline.add(
+ PipelineEntry(transition.target, currentEntry.tokenIndex));
+ continue;
+ }
+
+ IntervalSet? set = transition.label;
+ if (set != null && set.length > 0) {
+ if (transition.type == TransitionType.NOT_SET) {
+ set = set.complement(IntervalSet.ofRange(
+ Token.MIN_USER_TOKEN_TYPE, atn.maxTokenType));
+ }
+ if (atCaret) {
+ if (!translateToRuleIndex(callStack)) {
+ List list = set!.toList();
+ bool addFollowing = list.length == 1;
+ for (int symbol in list) {
+ if (!ignoredTokens.contains(symbol)) {
+ if (showDebugOutput && logger.isLoggable(Level.FINE)) {
+ logger.fine(
+ "=====> collected: ${vocabulary.getDisplayName(symbol)}");
+ }
+ if (addFollowing) {
+ candidates.tokens[symbol] =
+ getFollowingTokens(transition);
+ } else {
+ candidates.tokens[symbol] = [];
+ }
+ } else {
+ logger.fine("====> collected: Ignoring token: $symbol");
+ }
+ }
+ }
+ } else {
+ if (set!.contains(currentSymbol)) {
+ if (showDebugOutput && logger.isLoggable(Level.FINE)) {
+ logger.fine(
+ "=====> consumed: ${vocabulary.getDisplayName(currentSymbol)}");
+ }
+ statePipeline.add(PipelineEntry(
+ transition.target, currentEntry.tokenIndex + 1));
+ }
+ }
+ }
+ }
+ }
+ }
+
+ callStack.removeLast();
+
+ positionMap[tokenIndex] = result;
+
+ return result;
+ }
+
+ List atnStateTypeMap = [
+ "invalid",
+ "basic",
+ "rule start",
+ "block start",
+ "plus block start",
+ "star block start",
+ "token start",
+ "rule stop",
+ "block end",
+ "star loop back",
+ "star loop entry",
+ "plus loop back",
+ "loop end"
+ ];
+
+ String generateBaseDescription(ATNState state) {
+ String stateValue = (state.stateNumber == ATNState.INVALID_STATE_NUMBER)
+ ? "Invalid"
+ : state.stateNumber.toString();
+ return "[$stateValue ${atnStateTypeMap[state.stateType.index]}] in ${ruleNames[state.ruleIndex]}";
+ }
+
+ void printDescription(String currentIndent, ATNState state,
+ String baseDescription, int tokenIndex) {
+ StringBuffer output = StringBuffer(currentIndent);
+
+ StringBuffer transitionDescription = StringBuffer();
+ if (debugOutputWithTransitions && logger.isLoggable(Level.FINER)) {
+ for (Transition transition in state.transitions) {
+ StringBuffer labels = StringBuffer();
+ List symbols =
+ transition.label != null ? transition.label!.toList() : [];
+ if (symbols.length > 2) {
+ labels.write(
+ "${vocabulary.getDisplayName(symbols[0])} .. ${vocabulary.getDisplayName(symbols[symbols.length - 1])}");
+ } else {
+ for (int symbol in symbols) {
+ if (labels.isNotEmpty) {
+ labels.write(", ");
+ }
+ labels.write(vocabulary.getDisplayName(symbol));
+ }
+ }
+ if (labels.isEmpty) {
+ labels.write("ε");
+ }
+ transitionDescription
+ ..write("\n")
+ ..write(currentIndent)
+ ..write("\t(")
+ ..write(labels)
+ ..write(") [")
+ ..write(transition.target.stateNumber)
+ ..write(" ")
+ ..write(atnStateTypeMap[transition.target.stateType.index])
+ ..write("] in ")
+ ..write(ruleNames[transition.target.ruleIndex]);
+ }
+
+ if (tokenIndex >= tokens.length - 1) {
+ output.write("<<${tokenStartIndex + tokenIndex}>> ");
+ } else {
+ output.write("<${tokenStartIndex + tokenIndex}> ");
+ }
+ logger.finer(
+ "$output Current state: $baseDescription$transitionDescription");
+ }
+ }
+
+ void printRuleState(List stack) {
+ if (stack.isEmpty) {
+ logger.fine("");
+ return;
+ }
+
+ if (logger.isLoggable(Level.FINER)) {
+ StringBuffer sb = StringBuffer();
+ for (int rule in stack) {
+ sb.write(" ${ruleNames[rule]}\n");
+ }
+ logger.finer(sb.toString());
+ }
+ }
+}
+
+class CandidatesCollection {
+ Map> tokens = {};
+ Map> rules = {};
+ Map> rulePositions = {};
+
+ @override
+ String toString() {
+ return "CandidatesCollection{tokens=$tokens, rules=$rules, ruleStrings=$rulePositions}";
+ }
+}
+
+class FollowSetWithPath {
+ late IntervalSet intervals;
+ late List path;
+ late List following;
+}
+
+class FollowSetsHolder {
+ late List sets;
+ late IntervalSet combined;
+}
+
+class PipelineEntry {
+ ATNState state;
+ int tokenIndex;
+
+ PipelineEntry(this.state, this.tokenIndex);
+}
diff --git a/ports/dart/pubspec.yaml b/ports/dart/pubspec.yaml
new file mode 100644
index 0000000..681de6e
--- /dev/null
+++ b/ports/dart/pubspec.yaml
@@ -0,0 +1,17 @@
+name: antlr4_c3
+description: A grammar agnostic code completion engine for ANTLR4 based parsers
+version: 1.0.0+1
+repository: https://github.com/CorvusYe/antlr4-c3/tree/main/ports/dart
+
+environment:
+ sdk: ^3.5.4
+
+# Add regular dependencies here.
+dependencies:
+ antlr4: ^4.13.2
+ logging: ^1.3.0
+ # path: ^1.8.0
+
+dev_dependencies:
+ lints: ^4.0.0
+ test: ^1.24.0
diff --git a/ports/dart/test/antlr4_c3_test.dart b/ports/dart/test/antlr4_c3_test.dart
new file mode 100644
index 0000000..4b9977a
--- /dev/null
+++ b/ports/dart/test/antlr4_c3_test.dart
@@ -0,0 +1,160 @@
+/*
+ * Copyright © 2024 VMware, Inc & dudu.ltd. All Rights Reserved.
+ *
+ * SPDX-License-Identifier: MIT
+ *
+ * See LICENSE file for more info.
+ */
+
+import 'package:antlr4/antlr4.dart';
+import 'package:antlr4_c3/antlr4_c3.dart';
+import 'package:test/test.dart';
+
+import '../example/gen/ExprLexer.dart';
+import '../example/gen/ExprParser.dart';
+
+class CountingErrorListener extends BaseErrorListener {
+ int errorCount = 0;
+
+ @override
+ void syntaxError(
+ Recognizer recognizer,
+ Object? offendingSymbol,
+ int? line,
+ int charPositionInLine,
+ String msg,
+ RecognitionException? e,
+ ) {
+ super.syntaxError(
+ recognizer, offendingSymbol, line, charPositionInLine, msg, e);
+ errorCount++;
+ }
+}
+
+void main() {
+ test('simpleExpressionTest', () {
+ print('\nsimpleExpressionTest');
+
+ var expression = 'var c = a + b()';
+ var lexer = ExprLexer(InputStream.fromString(expression));
+ var tokens = CommonTokenStream(lexer);
+ var parser = ExprParser(tokens);
+
+ lexer.removeErrorListeners();
+ parser.removeErrorListeners();
+ var errorListener = CountingErrorListener();
+ parser.addErrorListener(errorListener);
+
+ parser.expression();
+
+ expect(errorListener.errorCount, 0);
+
+ var core = CodeCompletionCore(parser, null, null);
+
+ var candidates = core.collectCandidates(0, null);
+
+ expect(candidates.tokens.length, 3);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_VAR), isTrue);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_LET), isTrue);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_ID), isTrue);
+
+ // expect(candidates.tokens[ExprLexer.TOKEN_VAR], []);
+ // expect(candidates.tokens[ExprLexer.TOKEN_LET], []);
+ expect(candidates.tokens[ExprLexer.TOKEN_ID], []);
+
+ candidates = core.collectCandidates(1, null);
+ expect(candidates.tokens.length, 1);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_ID), isTrue);
+
+ candidates = core.collectCandidates(2, null);
+ expect(candidates.tokens.length, 1);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_ID), isTrue);
+
+ candidates = core.collectCandidates(4, null);
+ expect(candidates.tokens.length, 1);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_EQUAL), isTrue);
+
+ candidates = core.collectCandidates(6, null);
+ expect(candidates.tokens.length, 1);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_ID), isTrue);
+
+ candidates = core.collectCandidates(8, null);
+ expect(candidates.tokens.length, 5);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_PLUS), isTrue);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_MINUS), isTrue);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_MULTIPLY), isTrue);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_DIVIDE), isTrue);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_OPEN_PAR), isTrue);
+ });
+
+ test('typicalExpressionTest', () {
+ print('\ntypicalExpressionTest');
+
+ var expression = 'var c = a + b';
+ var lexer = ExprLexer(InputStream.fromString(expression));
+ var tokens = CommonTokenStream(lexer);
+ var parser = ExprParser(tokens);
+ parser.interpreter?.predictionMode =
+ PredictionMode.LL_EXACT_AMBIG_DETECTION;
+
+ lexer.removeErrorListeners();
+ parser.removeErrorListeners();
+ var errorListener = CountingErrorListener();
+ parser.addErrorListener(errorListener);
+
+ parser.expression();
+
+ expect(errorListener.errorCount, 0);
+
+ var preferredRules = {RULE_functionRef, RULE_variableRef};
+ var ignoredTokens = {
+ ExprLexer.TOKEN_ID,
+ ExprLexer.TOKEN_PLUS,
+ ExprLexer.TOKEN_MINUS,
+ ExprLexer.TOKEN_MULTIPLY,
+ ExprLexer.TOKEN_DIVIDE,
+ ExprLexer.TOKEN_EQUAL
+ };
+
+ var core = CodeCompletionCore(parser, preferredRules, ignoredTokens);
+
+ var candidates = core.collectCandidates(0, null);
+
+ expect(candidates.tokens.length, 2);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_VAR), isTrue);
+ expect(candidates.tokens.containsKey(ExprLexer.TOKEN_LET), isTrue);
+
+ // expect(candidates.tokens[ExprLexer.TOKEN_VAR], []);
+ // expect(candidates.tokens[ExprLexer.TOKEN_LET], []);
+
+ candidates = core.collectCandidates(2, null);
+ expect(candidates.tokens.length, 0);
+
+ candidates = core.collectCandidates(4, null);
+ expect(candidates.tokens.length, 0);
+
+ candidates = core.collectCandidates(6, null);
+ expect(candidates.tokens.length, 0);
+ expect(candidates.rules.length, 2);
+
+ var found = 0;
+ candidates.rules.forEach((key, value) {
+ if (key == RULE_functionRef || key == RULE_variableRef) {
+ found++;
+ }
+ });
+ expect(found, 2);
+
+ candidates = core.collectCandidates(7, null);
+ expect(candidates.tokens.length, 0);
+ expect(candidates.rules.length, 1);
+
+ found = 0;
+ candidates.rules.forEach((key, value) {
+ if (key == RULE_functionRef || key == RULE_variableRef) {
+ found++;
+ }
+ });
+ expect(found, 1);
+ });
+}