diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..abc78ba --- /dev/null +++ b/.gitignore @@ -0,0 +1,2 @@ +/.gradle/ +/build/ diff --git a/build.gradle b/build.gradle index fd39f48..7795c84 100644 --- a/build.gradle +++ b/build.gradle @@ -9,13 +9,15 @@ // Apply the java plugin to add support for Java plugins { + id "maven" id "maven-publish" + id "signing" id "eclipse" id "java" } project.group = 'com.onkiup' -project.version = '0.8' +project.version = '0.9' compileJava { sourceCompatibility = '1.8' @@ -32,22 +34,42 @@ repositories { // In this section you declare the dependencies for your production and test code dependencies { - // The production code uses the SLF4J logging API at compile time - compile 'org.slf4j:slf4j-api:+' - compile 'org.slf4j:slf4j-log4j12:+' - // https://mvnrepository.com/artifact/org.reflections/reflections - compile group: 'org.reflections', name: 'reflections', version: '0.9.11' - compile group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.12.1' + compile "com.onkiup:linker-parser-api:${project.version}" + // The production code uses the SLF4J logging API at compile time + compileOnly group: 'org.slf4j', name: 'slf4j-api', version: '1.7.28' + compileOnly group: 'org.slf4j', name: 'slf4j-log4j12', version: '1.7.28' + // https://mvnrepository.com/artifact/org.reflections/reflections + compileOnly group: 'org.reflections', name: 'reflections', version: '0.9.11' + compileOnly group: 'org.apache.logging.log4j', name: 'log4j-core', version: '2.12.1' - // Declare the dependency for your favourite test framework you want to use in your tests. - // TestNG is also supported by the Gradle Test task. Just change the - // testCompile dependency to testCompile 'org.testng:testng:6.8.1' and add - // 'test.useTestNG()' to your build script. - testCompile 'junit:junit:4.12' - testCompile group: 'org.mockito', name: 'mockito-core', version: '3.0.0' - testCompile group: 'org.powermock', name: 'powermock-module-junit4', version: '2.0.2' - testCompile group: 'org.powermock', name: 'powermock-api-mockito2', version: '2.0.2' + + // Declare the dependency for your favourite test framework you want to use in your tests. + // TestNG is also supported by the Gradle Test task. Just change the + // testCompile dependency to testCompile 'org.testng:testng:6.8.1' and add + // 'test.useTestNG()' to your build script. + testCompile 'junit:junit:4.12' + testCompile group: 'org.mockito', name: 'mockito-core', version: '3.0.0' + testCompile group: 'org.powermock', name: 'powermock-module-junit4', version: '2.0.2' + testCompile group: 'org.powermock', name: 'powermock-api-mockito2', version: '2.0.2' +} + +task javadocJar(type: Jar) { + classifier = 'javadoc' + from javadoc +} + +task sourcesJar(type: Jar) { + classifier = 'sources' + from sourceSets.main.allSource +} + +artifacts { + archives javadocJar, sourcesJar +} + +signing { + sign configurations.archives } publishing { @@ -57,3 +79,48 @@ publishing { } } } + +uploadArchives { + repositories { + mavenDeployer { + beforeDeployment { MavenDeployment deployment -> signing.signPom(deployment) } + + repository(url: "https://oss.sonatype.org/service/local/staging/deploy/maven2/") { + //authentication(userName: nexusUsername, password: nexusPassword) + } + + snapshotRepository(url: "https://oss.sonatype.org/content/repositories/snapshots/") { + //authentication(userName: nexusUsername, password: nexusPassword) + } + + pom.project { + name 'Linker-Parser' + packaging 'jar' + // optionally artifactId can be defined here + description 'Text parser that uses Java classes as grammar definitions' + url 'https://github.com/chedim/linker-parser' + + scm { + connection 'scm:git:https://github.com/chedim/linker-parser.git' + developerConnection 'scm:git:git@github.com:chedim/linker-parser.git' + url 'https://github.com/chedim/linker-parser' + } + + licenses { + license { + name 'MIT License' + url 'https://raw.githubusercontent.com/chedim/linker-parser/master/LICENSE' + } + } + + developers { + developer { + id 'chedim' + name 'Dmitrii Chechetkin' + email 'chedim@chedim.com' + } + } + } + } + } +} diff --git a/gradle/wrapper/gradle-wrapper.jar b/gradle/wrapper/gradle-wrapper.jar index 9ab0a83..87b738c 100644 Binary files a/gradle/wrapper/gradle-wrapper.jar and b/gradle/wrapper/gradle-wrapper.jar differ diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 45a4d68..8a486c8 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,4 +1,5 @@ -distributionUrl=https\://services.gradle.org/distributions/gradle-4.4.1-bin.zip +#Thu Oct 17 16:33:41 EDT 2019 +distributionUrl=https\://services.gradle.org/distributions/gradle-5.2.1-all.zip distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists zipStorePath=wrapper/dists diff --git a/gradlew b/gradlew index cccdd3d..af6708f 100755 --- a/gradlew +++ b/gradlew @@ -28,7 +28,7 @@ APP_NAME="Gradle" APP_BASE_NAME=`basename "$0"` # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -DEFAULT_JVM_OPTS="" +DEFAULT_JVM_OPTS='"-Xmx64m"' # Use the maximum available, or set MAX_FD != -1 to use that value. MAX_FD="maximum" diff --git a/gradlew.bat b/gradlew.bat index e95643d..0f8d593 100644 --- a/gradlew.bat +++ b/gradlew.bat @@ -14,7 +14,7 @@ set APP_BASE_NAME=%~n0 set APP_HOME=%DIRNAME% @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. -set DEFAULT_JVM_OPTS= +set DEFAULT_JVM_OPTS="-Xmx64m" @rem Find java.exe if defined JAVA_HOME goto findJavaFromJavaHome diff --git a/settings.gradle b/settings.gradle index 8ffd6f7..0f93e44 100644 --- a/settings.gradle +++ b/settings.gradle @@ -17,3 +17,5 @@ include 'services:webservice' */ rootProject.name = 'linker-parser' +include 'linker-parser-api' + diff --git a/src/main/java/com/onkiup/linker/parser/EnumRule.java b/src/main/java/com/onkiup/linker/parser/EnumRule.java deleted file mode 100644 index 20dcaf9..0000000 --- a/src/main/java/com/onkiup/linker/parser/EnumRule.java +++ /dev/null @@ -1,5 +0,0 @@ -package com.onkiup.linker.parser; - -public interface EnumRule { - -} diff --git a/src/main/java/com/onkiup/linker/parser/LinkerParser.java b/src/main/java/com/onkiup/linker/parser/LinkerParser.java deleted file mode 100644 index d6d7cde..0000000 --- a/src/main/java/com/onkiup/linker/parser/LinkerParser.java +++ /dev/null @@ -1,5 +0,0 @@ -package com.onkiup.linker.parser; - -public class LinkerParser { - private Class startProduction; -} diff --git a/src/main/java/com/onkiup/linker/parser/NumberMatcher.java b/src/main/java/com/onkiup/linker/parser/NumberMatcher.java index ff63acf..8e2fcf5 100644 --- a/src/main/java/com/onkiup/linker/parser/NumberMatcher.java +++ b/src/main/java/com/onkiup/linker/parser/NumberMatcher.java @@ -3,7 +3,6 @@ import java.lang.reflect.Constructor; import java.lang.reflect.InvocationTargetException; -@Deprecated public class NumberMatcher implements TokenMatcher { private Constructor pattern; private Class type; @@ -20,24 +19,25 @@ public NumberMatcher(Class type) { @Override public TokenTestResult apply(CharSequence buffer) { try { - pattern.newInstance(buffer.toString()); - return TestResult.matchContinue(buffer.length(), buffer.toString()); + if (buffer.length() > 0 && buffer.charAt(buffer.length() - 1) == ' ') { + // a fix for Number constructors that eat trailing characters + throw new InvocationTargetException(new NumberFormatException("--")); + } + + return TestResult.matchContinue(buffer.length(), pattern.newInstance(buffer.toString())); } catch (InvocationTargetException nfe) { Throwable cause = nfe.getCause(); if (!(cause instanceof NumberFormatException)) { return TestResult.fail(); } - if (cause.getMessage().indexOf("out of range") > -1){ + if (cause.getMessage() != null && cause.getMessage().indexOf("out of range") > -1){ return TestResult.fail(); } if (buffer.length() > 1) { - // rolling back one character + // rolling back one character (under the assumption that buffer accumulation performed on a char-by-char basis) try { - char drop = buffer.charAt(buffer.length() - 1); - if (drop != '.') { - Number token = pattern.newInstance(buffer.subSequence(0, buffer.length())); - return TestResult.match(buffer.length() - 1, token); - } + Number token = pattern.newInstance(buffer.subSequence(0, buffer.length() - 1)); + return TestResult.match(buffer.length() - 1, token); } catch (InvocationTargetException nfe2) { if (nfe2.getCause() instanceof NumberFormatException) { // this is fine diff --git a/src/main/java/com/onkiup/linker/parser/ParserContext.java b/src/main/java/com/onkiup/linker/parser/ParserContext.java new file mode 100644 index 0000000..9f0f995 --- /dev/null +++ b/src/main/java/com/onkiup/linker/parser/ParserContext.java @@ -0,0 +1,74 @@ +package com.onkiup.linker.parser; + +import java.io.Reader; +import java.util.Map; +import java.util.WeakHashMap; +import java.util.stream.Stream; + +import org.reflections.Reflections; +import org.reflections.scanners.SubTypesScanner; +import org.reflections.util.ClasspathHelper; +import org.reflections.util.ConfigurationBuilder; + +public class ParserContext implements LinkerParser { + + private static InheritableThreadLocal INSTANCE = new InheritableThreadLocal<>(); + + private Reflections reflections = new Reflections(new ConfigurationBuilder() + .setUrls(ClasspathHelper.forClassLoader(TokenGrammar.class.getClassLoader())) + .setScanners(new SubTypesScanner(true)) + ); + + private Class> extension; + + private Map extensions = new WeakHashMap<>(); + + private Class target; + + private TokenGrammar grammar; + + public static ParserContext get() { + ParserContext instance = INSTANCE.get(); + if (instance == null) { + instance = new ParserContext(); + INSTANCE.set(instance); + } + return instance; + } + + public Stream> implementations(Class junction) { + return subClasses(junction) + .filter(TokenGrammar::isConcrete) + .filter(TokenGrammar::isRule); + } + + public Stream> subClasses(Class parent) { + return reflections.getSubTypesOf(parent).stream(); + } + + /** + * This method can be used to limit the scope in which grammar tokens are looked up for + * @param classLoader a classloader to take classpath from + */ + public void classLoader(ClassLoader classLoader) { + reflections = new Reflections(new ConfigurationBuilder() + .setUrls(ClasspathHelper.forClassLoader(classLoader)) + .setScanners(new SubTypesScanner(true)) + ); + } + + @Override + public LinkerParser target(Class target) { + this.target = target; + this.grammar = TokenGrammar.forClass(target); + return this; + } + + @Override + public X parse(String sourceName, Reader from) { + if (grammar == null) { + grammar = TokenGrammar.forClass(target); + } + return grammar.tokenize(sourceName, from); + } +} diff --git a/src/main/java/com/onkiup/linker/parser/PatternMatcher.java b/src/main/java/com/onkiup/linker/parser/PatternMatcher.java index 4d532e5..c403de7 100644 --- a/src/main/java/com/onkiup/linker/parser/PatternMatcher.java +++ b/src/main/java/com/onkiup/linker/parser/PatternMatcher.java @@ -19,6 +19,10 @@ public PatternMatcher(String pattern) { } public PatternMatcher(CapturePattern pattern) { + this(pattern, false); + } + + public PatternMatcher(CapturePattern pattern, boolean ignoreCase) { String matcherPattern = pattern.pattern(); if (matcherPattern.length() == 0) { String value = pattern.value(); @@ -34,7 +38,7 @@ public PatternMatcher(CapturePattern pattern) { } this.replacement = pattern.replacement(); this.until = pattern.until(); - this.pattern = Pattern.compile(matcherPattern); + this.pattern = Pattern.compile(matcherPattern, ignoreCase ? Pattern.CASE_INSENSITIVE : 0); matcher = this.pattern.matcher(""); } diff --git a/src/main/java/com/onkiup/linker/parser/Rule.java b/src/main/java/com/onkiup/linker/parser/Rule.java deleted file mode 100644 index 5edcd0c..0000000 --- a/src/main/java/com/onkiup/linker/parser/Rule.java +++ /dev/null @@ -1,120 +0,0 @@ -package com.onkiup.linker.parser; - -import java.io.IOException; -import java.io.InputStream; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.OutputStream; -import java.util.Optional; -import java.util.concurrent.ConcurrentHashMap; - -import com.onkiup.linker.parser.token.CollectionToken; -import com.onkiup.linker.parser.token.PartialToken; -import com.onkiup.linker.parser.token.RuleToken; -import com.onkiup.linker.parser.token.VariantToken; - -// in 0.4: -// - changed Metadata to hold PartialTokens instead of ParserLocations -// in 0.2.2: -// - added "C" type parameter -// - made it implement Consumer -/** - * Main interface for all grammar definitions - */ -public interface Rule { - - static class Metadata { - private static ConcurrentHashMap metadata = new ConcurrentHashMap<>(); - - public static Optional metadata(Rule rule) { - return Optional.ofNullable(metadata.get(rule)); - } - - public static void metadata(Rule rule, PartialToken token) { - metadata.put(rule, token); - } - - static void remove(Rule rule) { - metadata.remove(rule); - } - } - - static X load(InputStream is) throws IOException, ClassNotFoundException { - ObjectInputStream ois = new ObjectInputStream(is); - return load(ois); - } - - static X load(ObjectInputStream ois) throws IOException, ClassNotFoundException { - Object result = ois.readObject(); - if (result instanceof Rule) { - return (X)result; - } - String resultType = result == null ? "null" : result.getClass().getName(); - throw new IllegalArgumentException(resultType + " is not a Rule"); - } - - /** - * @return parent token or null if this token is root token - */ - default Optional parent() { - return Metadata.metadata(this) - .map(meta -> { - do { - meta = (PartialToken) meta.parent().orElse(null); - } while (!(meta instanceof RuleToken)); - return meta; - }) - .flatMap(PartialToken::token); - } - - /** - * @return true if this token was successfully populated; false if parser is still working on some of the token's fields - */ - default boolean populated() { - return Metadata.metadata(this) - .map(PartialToken::isPopulated) - .orElse(false); - } - - default void onPopulated() { - - } - - default Optional metadata() { - return Metadata.metadata(this); - } - - default ParserLocation location() { - return metadata().map(PartialToken::location).orElse(null); - } - - /** - * Reevaluation callback. - * Called by parser every time it updates the token - */ - default void reevaluate() { - - } - - /** - * Invalidation callback - * called by arser every time it detaches the token from the tree - */ - default void invalidate() { - - } - - default CharSequence source() { - return metadata().map(PartialToken::source).orElse(null); - } - - default void store(OutputStream os) throws IOException { - ObjectOutputStream oos = new ObjectOutputStream(os); - store(oos); - } - - default void store(ObjectOutputStream oos) throws IOException { - oos.writeObject(this); - } -} - diff --git a/src/main/java/com/onkiup/linker/parser/TerminalMatcher.java b/src/main/java/com/onkiup/linker/parser/TerminalMatcher.java index fec2555..2a207ec 100644 --- a/src/main/java/com/onkiup/linker/parser/TerminalMatcher.java +++ b/src/main/java/com/onkiup/linker/parser/TerminalMatcher.java @@ -4,10 +4,12 @@ public class TerminalMatcher implements TokenMatcher { private final String pattern; private final int patternLen; + private final boolean ignoreCase; - public TerminalMatcher(String pattern) { + public TerminalMatcher(String pattern, boolean ignoreCase) { this.pattern = pattern; this.patternLen = pattern.length(); + this.ignoreCase = ignoreCase; } @Override @@ -15,13 +17,15 @@ public TokenTestResult apply(CharSequence buffer) { int bufferLen = buffer.length(); int charsToCompare = Math.min(patternLen, bufferLen); for (int i = 0; i < charsToCompare; i++) { - if (pattern.charAt(i) != buffer.charAt(i)) { + char patternChar = ignoreCase ? Character.toLowerCase(pattern.charAt(i)) : pattern.charAt(i); + char bufferChar = ignoreCase ? Character.toLowerCase(buffer.charAt(i)) : buffer.charAt(i); + if (patternChar != bufferChar) { return TestResult.fail(); } } if (patternLen <= bufferLen) { - return TestResult.match(patternLen, pattern); + return TestResult.match(patternLen, buffer.subSequence(0, patternLen)); } return TestResult.continueNoMatch(); } diff --git a/src/main/java/com/onkiup/linker/parser/TokenGrammar.java b/src/main/java/com/onkiup/linker/parser/TokenGrammar.java index 646149e..87200fc 100644 --- a/src/main/java/com/onkiup/linker/parser/TokenGrammar.java +++ b/src/main/java/com/onkiup/linker/parser/TokenGrammar.java @@ -4,6 +4,7 @@ import java.io.Reader; import java.io.StringReader; import java.lang.reflect.Modifier; +import java.util.Arrays; import java.util.Enumeration; import java.util.Optional; import java.util.concurrent.atomic.AtomicInteger; @@ -41,7 +42,7 @@ public class TokenGrammar { public static TokenGrammar forClass(Class type) { return new TokenGrammar<>(type, null); } - + /** * For future handling of metatokens like comments * @param type resulting token type @@ -155,6 +156,7 @@ public X tokenize(Reader source) throws SyntaxError { */ public X tokenize(String sourceName, Reader source) throws SyntaxError { AtomicInteger position = new AtomicInteger(0); + ParserContext.get().classLoader(getTokenType().getClassLoader()); SelfPopulatingBuffer buffer = null; try { buffer = new SelfPopulatingBuffer(sourceName, source); @@ -162,7 +164,7 @@ public X tokenize(String sourceName, Reader source) throws SyntaxError { throw new RuntimeException("Failed to read source " + sourceName, e); } try { - CompoundToken rootToken = CompoundToken.forClass(type, new ParserLocation(sourceName, 0, 0, 0)); + CompoundToken rootToken = CompoundToken.forClass(type, 0, new ParserLocation(sourceName, 0, 0, 0)); ConsumingToken.ConsumptionState.rootBuffer(rootToken, buffer); CompoundToken parent = rootToken; ConsumingToken consumer = nextConsumingToken(parent).orElseThrow(() -> new ParserError("No possible consuming tokens found", parent)); @@ -482,5 +484,10 @@ private void restoreLoggingLayouts() { } } } + + public static boolean isRule(Class aClass) { + return Arrays.stream(aClass.getInterfaces()) + .anyMatch(Rule.class::equals); + } } diff --git a/src/main/java/com/onkiup/linker/parser/TokenMatcher.java b/src/main/java/com/onkiup/linker/parser/TokenMatcher.java deleted file mode 100644 index 7b95deb..0000000 --- a/src/main/java/com/onkiup/linker/parser/TokenMatcher.java +++ /dev/null @@ -1,70 +0,0 @@ -package com.onkiup.linker.parser; - -import java.lang.reflect.Field; -import java.lang.reflect.Modifier; -import java.util.function.Function; - -import com.onkiup.linker.parser.annotation.CapturePattern; -import com.onkiup.linker.parser.annotation.ContextAware; -import com.onkiup.linker.parser.token.CompoundToken; -import com.onkiup.linker.parser.util.LoggerLayout; - -@FunctionalInterface -public interface TokenMatcher extends Function { - - public static TokenMatcher forField(CompoundToken parent, Field field) { - Class type = field.getType(); - return forField(parent, field, type); - } - - public static TokenMatcher forField(CompoundToken parent, Field field, Class type) { - if (type.isArray()) { - throw new IllegalArgumentException("Array fields should be handled as ArrayTokens"); - } else if (Rule.class.isAssignableFrom(type)) { - throw new IllegalArgumentException("Rule fields should be handled as RuleTokens"); - } else if (type != String.class) { - throw new IllegalArgumentException("Unsupported field type: " + type); - } - - try { - field.setAccessible(true); - if (Modifier.isStatic(field.getModifiers())) { - String terminal = (String) field.get(null); - if (terminal == null) { - throw new IllegalArgumentException("null terminal"); - } - - return new TerminalMatcher(terminal); - } else if (field.isAnnotationPresent(CapturePattern.class)) { - CapturePattern pattern = field.getAnnotation(CapturePattern.class); - return new PatternMatcher(pattern); - } else if (field.isAnnotationPresent(ContextAware.class)) { - ContextAware contextAware = field.getAnnotation(ContextAware.class); - if (contextAware.matchField().length() > 0) { - Object token = parent.token().orElseThrow(() -> new IllegalStateException("Parent token is null")); - Field dependency = field.getDeclaringClass().getDeclaredField(contextAware.matchField()); - dependency.setAccessible(true); - Object fieldValue = dependency.get(token); - if (fieldValue instanceof String) { - parent.log("Creating context-aware matcher for field $" + field.getName() + " to be equal to '" + - LoggerLayout.sanitize(fieldValue) + "' value of target field $" + dependency.getName()); - return new TerminalMatcher((String)fieldValue); - } else if (fieldValue == null) { - parent.log("Creating context-aware null matcher for field $" + field.getName() + " to be equal to null value of target field $" + dependency.getName()); - return new NullMatcher(); - } else { - throw new IllegalArgumentException("Unable to create field matcher for target field value of type '" + fieldValue.getClass().getName() + "'"); - } - } else { - throw new IllegalArgumentException("Misconfigured ContextAware annotation?"); - } - } else { - throw new IllegalArgumentException("Non-static String fields MUST have CapturePattern annotation"); - } - } catch (Exception e) { - throw new IllegalArgumentException("Failed to create matcher for field " + field, e); - } - } - -} - diff --git a/src/main/java/com/onkiup/linker/parser/annotation/AdjustPriority.java b/src/main/java/com/onkiup/linker/parser/annotation/AdjustPriority.java deleted file mode 100644 index 16beaa6..0000000 --- a/src/main/java/com/onkiup/linker/parser/annotation/AdjustPriority.java +++ /dev/null @@ -1,26 +0,0 @@ -package com.onkiup.linker.parser.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Adjusts concrete token priority that affects token testing order for grammar junctions ({@link com.onkiup.linker.parser.token.VariantToken}) - * (tokens tested in ascending order of their priority: token with priority 0 will be tested prior to token with priority 9999) - */ -@Target(ElementType.TYPE) -@Retention(RetentionPolicy.RUNTIME) -public @interface AdjustPriority { - /** - * @return value to which token's priority should be adjusted - */ - int value(); - - /** - * @return boolean flag that indicates whether this priority adjustment should be propagated to parent token - * (used primarily for arithmetical equations) - */ - boolean propagate() default false; -} - diff --git a/src/main/java/com/onkiup/linker/parser/annotation/Alternatives.java b/src/main/java/com/onkiup/linker/parser/annotation/Alternatives.java deleted file mode 100644 index 1963893..0000000 --- a/src/main/java/com/onkiup/linker/parser/annotation/Alternatives.java +++ /dev/null @@ -1,19 +0,0 @@ -package com.onkiup.linker.parser.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Instructs {@link com.onkiup.linker.parser.token.VariantToken} instances to use provided list of alternatives instead of generating it using Reflections - */ -@Target(ElementType.TYPE) -@Retention(RetentionPolicy.RUNTIME) -public @interface Alternatives { - /** - * @return an array with alternatives to use - */ - Class[] value(); -} - diff --git a/src/main/java/com/onkiup/linker/parser/annotation/CaptureLimit.java b/src/main/java/com/onkiup/linker/parser/annotation/CaptureLimit.java deleted file mode 100644 index 5738a6c..0000000 --- a/src/main/java/com/onkiup/linker/parser/annotation/CaptureLimit.java +++ /dev/null @@ -1,23 +0,0 @@ -package com.onkiup.linker.parser.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Limits number of elements (array members, characters, etc) to be captured into the annotated field - */ -@Target(ElementType.FIELD) -@Retention(RetentionPolicy.RUNTIME) -public @interface CaptureLimit { - /** - * @return Minimum number of elements required for the token to be populated - */ - int min() default 0; - - /** - * @return Maximum number of elements allowed to be populated into the token for it to not fail - */ - int max() default Integer.MAX_VALUE; -} diff --git a/src/main/java/com/onkiup/linker/parser/annotation/CapturePattern.java b/src/main/java/com/onkiup/linker/parser/annotation/CapturePattern.java deleted file mode 100644 index aa72599..0000000 --- a/src/main/java/com/onkiup/linker/parser/annotation/CapturePattern.java +++ /dev/null @@ -1,41 +0,0 @@ -package com.onkiup.linker.parser.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * This annotation can be used on String fields to define capturing terminal limits - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.FIELD) -public @interface CapturePattern { - /** - * Accepts a regular expression that will be used to match characters from the input - * If provided then "until" parameter will be ignored - */ - String value() default ""; - - /** - * Deprecated, use value instead - */ - @Deprecated - String pattern() default ""; - - /** - * Accepts a regular expression replacement parameter that can be used either to: - * - transform matched by defined as "value()" regexp text - * - transform matched by "until()" limiter and append transformation result to the end of captured text - */ - String replacement() default ""; - - /** - * Accepts a regular expression that Parser will use as stop token for capturing process - * If no "replacement()" is specified, then matched by this expression stop token will be discarded - * If "replacement()" is specified, then stop token will be transformed using that value and appended to captured text - * Ignored if either "value()" or "pattern()" are not empty - */ - String until() default ""; -} - diff --git a/src/main/java/com/onkiup/linker/parser/annotation/ContextAware.java b/src/main/java/com/onkiup/linker/parser/annotation/ContextAware.java deleted file mode 100644 index bb7c268..0000000 --- a/src/main/java/com/onkiup/linker/parser/annotation/ContextAware.java +++ /dev/null @@ -1,19 +0,0 @@ -package com.onkiup.linker.parser.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Allows context-aware token matching - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.FIELD) -public @interface ContextAware { - /** - * Instructs the parser to create a ConsumingToken for this field that would exactly match value from a previously populated field - * @return - */ - String matchField() default ""; -} diff --git a/src/main/java/com/onkiup/linker/parser/annotation/CustomMatcher.java b/src/main/java/com/onkiup/linker/parser/annotation/CustomMatcher.java deleted file mode 100644 index a9de049..0000000 --- a/src/main/java/com/onkiup/linker/parser/annotation/CustomMatcher.java +++ /dev/null @@ -1,16 +0,0 @@ -package com.onkiup.linker.parser.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -import com.onkiup.linker.parser.TokenMatcher; - -@Deprecated -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.FIELD) -public @interface CustomMatcher { - Class value(); -} - diff --git a/src/main/java/com/onkiup/linker/parser/annotation/IgnoreCharacters.java b/src/main/java/com/onkiup/linker/parser/annotation/IgnoreCharacters.java deleted file mode 100644 index c37b944..0000000 --- a/src/main/java/com/onkiup/linker/parser/annotation/IgnoreCharacters.java +++ /dev/null @@ -1,24 +0,0 @@ -package com.onkiup.linker.parser.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Instructs the parser to ignore provided characters before matching every field of the rule - */ -@Target(ElementType.TYPE) -@Retention(RetentionPolicy.RUNTIME) -public @interface IgnoreCharacters { - /** - * @return string with characters to ignore - */ - String value() default ""; - - /** - * @return a flag that indicates that parser should also use ignored charcters list from the parent token - */ - boolean inherit() default false; -} - diff --git a/src/main/java/com/onkiup/linker/parser/annotation/IgnoreVariant.java b/src/main/java/com/onkiup/linker/parser/annotation/IgnoreVariant.java deleted file mode 100644 index 5a43465..0000000 --- a/src/main/java/com/onkiup/linker/parser/annotation/IgnoreVariant.java +++ /dev/null @@ -1,16 +0,0 @@ -package com.onkiup.linker.parser.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Instructs parser to completely ignore this class during parsing - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.TYPE) -public @interface IgnoreVariant { - -} - diff --git a/src/main/java/com/onkiup/linker/parser/annotation/MetaToken.java b/src/main/java/com/onkiup/linker/parser/annotation/MetaToken.java deleted file mode 100644 index 5c2d41a..0000000 --- a/src/main/java/com/onkiup/linker/parser/annotation/MetaToken.java +++ /dev/null @@ -1,16 +0,0 @@ -package com.onkiup.linker.parser.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Marks a rule definition class as a MetaToken, - * which causes VariantToken to "hide" matched instances of marked class by detaching them from the AST and - * putting them into the next matched variant's metadata - */ -@Target(ElementType.TYPE) -@Retention(RetentionPolicy.RUNTIME) -public @interface MetaToken { -} diff --git a/src/main/java/com/onkiup/linker/parser/annotation/OptionalToken.java b/src/main/java/com/onkiup/linker/parser/annotation/OptionalToken.java deleted file mode 100644 index 4433281..0000000 --- a/src/main/java/com/onkiup/linker/parser/annotation/OptionalToken.java +++ /dev/null @@ -1,33 +0,0 @@ -package com.onkiup.linker.parser.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * Marks a field as optional - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.FIELD) -public @interface OptionalToken { - /** - * Instructs the parser to treat this field as optional only if its possible position - * in the source contains returned characters instead - * @return characters to test for - */ - String whenFollowedBy() default ""; - - /** - * Instructs the parser to treat this field as optional only when other (previously processed) field is null - * @return the name of the other field to test - */ - String whenFieldIsNull() default ""; - - /** - * Instructs the parser to treat this field as optional only when other (previously processed) field is not null - * @return the name of the other field to test - */ - String whenFieldNotNull() default ""; -} - diff --git a/src/main/java/com/onkiup/linker/parser/annotation/SkipIfFollowedBy.java b/src/main/java/com/onkiup/linker/parser/annotation/SkipIfFollowedBy.java deleted file mode 100644 index e132faf..0000000 --- a/src/main/java/com/onkiup/linker/parser/annotation/SkipIfFollowedBy.java +++ /dev/null @@ -1,17 +0,0 @@ -package com.onkiup.linker.parser.annotation; - -import java.lang.annotation.ElementType; -import java.lang.annotation.Retention; -import java.lang.annotation.RetentionPolicy; -import java.lang.annotation.Target; - -/** - * @see {@link OptionalToken#whenFollowedBy()} - * @implNote current behaviour is similar to {@link OptionalToken#whenFollowedBy()} (the parser first tries to process the field and tests if its optional only when matching fails), but this may change later (so that the parser skips the field completely when optionality test succeeds without trying to match it) - */ -@Retention(RetentionPolicy.RUNTIME) -@Target(ElementType.FIELD) -public @interface SkipIfFollowedBy { - String value(); -} - diff --git a/src/main/java/com/onkiup/linker/parser/token/AbstractToken.java b/src/main/java/com/onkiup/linker/parser/token/AbstractToken.java index 3efcea4..42ebfb5 100644 --- a/src/main/java/com/onkiup/linker/parser/token/AbstractToken.java +++ b/src/main/java/com/onkiup/linker/parser/token/AbstractToken.java @@ -17,6 +17,7 @@ public abstract class AbstractToken implements PartialToken, Serializable { private CompoundToken parent; + private PartialToken previousToken, nextToken; /** * The field for which this token was created */ @@ -35,6 +36,7 @@ public abstract class AbstractToken implements PartialToken, Serializable private CharSequence optionalCondition; private transient Logger logger; private LinkedList metatokens = new LinkedList(); + private final int childNumber; /** * Main constructor @@ -42,14 +44,23 @@ public abstract class AbstractToken implements PartialToken, Serializable * @param targetField field for which this token is being constructed * @param location token's location in parser's buffer */ - public AbstractToken(CompoundToken parent, Field targetField, ParserLocation location) { + public AbstractToken(CompoundToken parent, int childNumber, Field targetField, ParserLocation location) { this.parent = parent; this.field = targetField; this.location = location; + this.childNumber = childNumber; readFlags(field); } + public void previousToken(PartialToken previousToken) { + this.previousToken = previousToken; + } + + public void nextToken(PartialToken nextToken) { + this.nextToken = nextToken; + } + /** * Sets optionality flag on this token: optional tokens don't propagate matching failures to their parents */ @@ -224,5 +235,20 @@ public void addMetaToken(Object metatoken) { public LinkedList metaTokens() { return metatokens; } + + @Override + public int position() { + return childNumber; + } + + @Override + public Optional> nextToken() { + return Optional.empty(); + } + + @Override + public Optional> previousToken() { + return Optional.empty(); + } } diff --git a/src/main/java/com/onkiup/linker/parser/token/CollectionToken.java b/src/main/java/com/onkiup/linker/parser/token/CollectionToken.java index 58f9c6e..d03b0e6 100644 --- a/src/main/java/com/onkiup/linker/parser/token/CollectionToken.java +++ b/src/main/java/com/onkiup/linker/parser/token/CollectionToken.java @@ -49,8 +49,8 @@ public class CollectionToken extends AbstractToken implements CompoundToke * @param tokenType type of the resulting array * @param location location of the token in parser's buffer */ - public CollectionToken(CompoundToken parent, Field field, Class tokenType, ParserLocation location) { - super(parent, field, location); + public CollectionToken(CompoundToken parent, int childIndex, Field field, Class tokenType, ParserLocation location) { + super(parent, childIndex, field, location); lastTokenEnd = location; this.fieldType = tokenType; this.memberType = fieldType.getComponentType(); @@ -186,7 +186,7 @@ public Optional> nextChild() { if (captureLimit == null || captureLimit.max() > children.size()) { if (nextMember == children.size()) { log("creating partial token for member#{}", children.size()); - current = PartialToken.forField(this, targetField().orElse(null), memberType, lastTokenEnd); + current = PartialToken.forField(this, children.size(), targetField().orElse(null), memberType, lastTokenEnd); children.add(current); } else if (nextMember < children.size()) { current = children.get(nextMember); @@ -273,4 +273,17 @@ public CharSequence dumpTree(int offset, CharSequence prefix, CharSequence child } return result; } + + @Override + public int childCount() { + return children.size(); + } + + @Override + public Optional> child(int position) { + if (position < 0 || position >= children.size()) { + return Optional.empty(); + } + return Optional.ofNullable(children.get(position)); + } } diff --git a/src/main/java/com/onkiup/linker/parser/token/CompoundToken.java b/src/main/java/com/onkiup/linker/parser/token/CompoundToken.java deleted file mode 100644 index e6f2393..0000000 --- a/src/main/java/com/onkiup/linker/parser/token/CompoundToken.java +++ /dev/null @@ -1,197 +0,0 @@ -package com.onkiup.linker.parser.token; - -import java.io.Serializable; -import java.util.Arrays; -import java.util.Objects; -import java.util.Optional; -import java.util.function.Consumer; - -import com.onkiup.linker.parser.ParserLocation; -import com.onkiup.linker.parser.Rule; -import com.onkiup.linker.parser.TokenGrammar; - -/** - * Common interface for any tokens that can contain children tokens - * @param the type of resulting token - */ -public interface CompoundToken extends PartialToken, Serializable { - - /** - * Creates a new CompoundToken for the provided class - * @param type class for which new token should be created - * @param position position at which the token will be located in the parser's input - * @return created CompoundToken - */ - static CompoundToken forClass(Class type, ParserLocation position) { - if (position == null) { - position = new ParserLocation(null, 0, 0, 0); - } - if (TokenGrammar.isConcrete(type)) { - return new RuleToken(null, null, type, position); - } else { - return new VariantToken(null, null, type, position); - } - } - - /** - * Callback method invoked every time a child token is successfully populated from parser's input - */ - void onChildPopulated(); - - /** - * Callback method invoked every time a child token population fails - */ - void onChildFailed(); - - /** - * @return the number of children left to be filled - */ - int unfilledChildren(); - - /** - * @return true if token contains any unfilled children - */ - default boolean hasUnfilledChildren() { - return unfilledChildren() > 0; - } - - /** - * @return true when this token has only one unfilled child left - */ - default boolean onlyOneUnfilledChildLeft() { - return unfilledChildren() == 1; - } - - /** - * @return the number of currently populating child - */ - int currentChild(); - - /** - * Forces the token to move its internal children pointer so that next populating child will be from the provided position - * @param newIndex the position of the child to be populated next - */ - void nextChild(int newIndex); - - /** - * @return all previously created children, optionally excluding any possible future children - */ - PartialToken[] children(); - - /** - * @param children an array of PartialToken objects to replace current token's children with - */ - void children(PartialToken[] children); - - /** - * @return the next child of this token to be populated - */ - Optional> nextChild(); - - /** - * Walks through token's children in reverse order removing them until the first child with alternativesLeft() > 0 - * If no such child found, then returns full token source - * @return source for removed tokens - */ - default void traceback() { - log("!!! TRACING BACK"); - PartialToken[] children = children(); - if (children.length == 0) { - invalidate(); - onFail(); - return; - } - int newSize = 0; - for (int i = children.length - 1; i > -1; i--) { - PartialToken child = children[i]; - if (child == null) { - continue; - } - - child.traceback(); - - if (!child.isFailed()) { - log("found alternatives at child#{}", i); - newSize = i + 1; - break; - } - - child.onFail(); - } - - if (newSize > 0) { - PartialToken[] newChildren = new PartialToken[newSize]; - System.arraycopy(children, 0, newChildren, 0, newSize); - children(newChildren); - nextChild(newSize - 1); - dropPopulated(); - log("Traced back to child #{}: {}", newSize - 1, newChildren[newSize-1].tag()); - } else { - onFail(); - } - } - - /** - * @return number of alternatives for this token, including its children - */ - @Override - default boolean alternativesLeft() { - PartialToken[] children = children(); - for (int i = 0; i < children.length; i++) { - PartialToken child = children[i]; - if (child != null) { - log("getting alternatives from child#{} {}", i, child.tag()); - if (child.alternativesLeft()) { - log("child#{} {} reported that it has alternatives", i, child.tag()); - return true; - } - } - } - return false; - } - - @Override - default int basePriority() { - int result = PartialToken.super.basePriority(); - - for (PartialToken child : children()) { - if (child != null && child.propagatePriority()) { - result += child.basePriority(); - } - } - - return result; - } - - /** - * Rotates this token - */ - default void rotate() { - } - - /** - * @return true when this token can be rotated - */ - default boolean rotatable() { - return false; - } - - /** - * Performs reverse-rotation on this token - */ - default void unrotate() { - } - - /** - * Uses the given visitor to walk over the AST starting with this token - * @param visitor token visitor - */ - @Override - default void visit(Consumer> visitor) { - Arrays.stream(children()) - .filter(Objects::nonNull) - .forEach(child -> child.visit(visitor)); - PartialToken.super.visit(visitor); - } -} - diff --git a/src/main/java/com/onkiup/linker/parser/token/ConsumingToken.java b/src/main/java/com/onkiup/linker/parser/token/ConsumingToken.java deleted file mode 100644 index 6aae21c..0000000 --- a/src/main/java/com/onkiup/linker/parser/token/ConsumingToken.java +++ /dev/null @@ -1,296 +0,0 @@ -package com.onkiup.linker.parser.token; - -import java.io.Serializable; -import java.util.Optional; -import java.util.concurrent.ConcurrentHashMap; -import java.util.function.Function; - -import com.onkiup.linker.parser.ParserLocation; -import com.onkiup.linker.parser.Rule; -import com.onkiup.linker.parser.TestResult; -import com.onkiup.linker.parser.TokenMatcher; -import com.onkiup.linker.parser.TokenTestResult; -import com.onkiup.linker.parser.util.LoggerLayout; -import com.onkiup.linker.parser.util.ParserError; - -/** - * Interfacde that represents any token that can advance parser by consuming characters from parser's buffer - * @param type of resulting token - */ -public interface ConsumingToken extends PartialToken, Serializable { - - /** - * Provides TokenMatcher for default consumption algorithm - * @param matcher the matcher to use against consumed characters - */ - default void setTokenMatcher(TokenMatcher matcher) { - ConsumptionState.create(this, matcher); - } - - /** - * Callback method invoked upon partial or full match against consumed characters - * @param token resulting token, as provided by previously configured matcher - */ - void onConsumeSuccess(Object token); - - /** - * Attempts to consume next character - * @return true if consumption should continue - */ - default boolean consume() { - ConsumptionState consumption = ConsumptionState.of(this).orElseThrow(() -> new ParserError("No consumption state found (call ConsumingToken::setTokenMatcher to create it first)", this)); - - boolean doNext = consumption.consume(); - - TokenTestResult result = consumption.test(); - - if (result.isFailed()) { - log("failed; switching to lookahead mode"); - consumption.setFailed(); - consumption.lookahead(); - consumption.clear(); - onFail(); - return false; - } else if (result.isMatch()) { - consumption.trim(result.getTokenLength()); - log("matched at position {}", consumption.end().position()); - onConsumeSuccess(result.getToken()); - onPopulated(consumption.end()); - return false; - } - - if (result.isMatchContinue()) { - log("matched; continuing..."); - onConsumeSuccess(result.getToken()); - onPopulated(consumption.end()); - } else if (consumption.hitEnd()) { - onFail(); - } - - return doNext; - } - - @Override - default void invalidate() { - PartialToken.super.invalidate(); - ConsumptionState.discard(this); - } - - @Override - default void atEnd() { - parent().ifPresent(CompoundToken::atEnd); - } - - /** - * A helper class that implements major parts of consumption algorithm and stores consumption states for ConsumingToken instances - */ - class ConsumptionState { - private static final ConcurrentHashMap states = new ConcurrentHashMap<>(); - private static final ConcurrentHashMap buffers = new ConcurrentHashMap<>(); - - /** - * Returns previously registered ConsumptionState for the given token - * @param token token whose ConsumptionState should be returned - * @return ConsumptionState instance for provided token - */ - private static synchronized Optional of(ConsumingToken token) { - return Optional.ofNullable(states.get(token)); - } - - /** - * Creates and registers a new ConsumptionState for the given token - * @param token token for which a new ConsumptionState should be created - * @param tester function that will be used to match consumed characters - */ - private static void create(ConsumingToken token, Function tester) { - states.put(token, new ConsumptionState(token, tester)); - } - - /** - * Register given ConsuptionState for given token - * @param token a token for which given ConsumptionState should be registered - * @param state ConsumptionState that sould be registered for the given token - */ - static void inject(ConsumingToken token, ConsumptionState state) { - states.put(token, state); - } - - /** - * Discards ConsumptionState registered for given token - * @param token token whose ConsumptionState should be discarded - */ - private static void discard(ConsumingToken token) { - states.remove(token); - } - - /** - * List of characters to ignore at the beginning of consumption - */ - private final String ignoredCharacters; - /** - * The tester used to match consumed characters - */ - private final Function tester; - /** - * Pointers to the buffer - */ - private ParserLocation start, end, ignored; - /** - * Failure flag - */ - private boolean failed; - /** - * the token with which this consumption is associated - */ - private ConsumingToken token; - /** - * parser buffer - */ - private CharSequence buffer; - - private boolean hitEnd = false; - - private ConsumptionState(ConsumingToken token, Function tester) { - this.token = token; - this.ignoredCharacters = token.ignoredCharacters(); - this.tester = tester; - this.start = this.end = this.ignored = token.location(); - this.buffer = rootBuffer(token.root()).orElseThrow(() -> - new RuntimeException("No root buffer registered for token " + token)); - } - - ConsumptionState(ParserLocation start, ParserLocation ignored, ParserLocation end) { - this.ignoredCharacters = ""; - this.tester = null; - this.start = start; - this.end = end; - this.ignored = ignored; - } - - /** - * Stores a parser buffer used to populate given AST root - * @param rootToken AST root whose parser buffer should be stored - * @param buffer buffer used to populate the given AST - * @param - */ - public static void rootBuffer(PartialToken rootToken, CharSequence buffer) { - buffers.put(rootToken, buffer); - } - - /** - * @param root root token of the AST - * @return parser buffer used to populate given AST - */ - public static Optional rootBuffer(PartialToken root) { - return Optional.ofNullable(buffers.get(root)); - } - - /** - * @return consumed characters minus ignored prefix - */ - protected CharSequence buffer() { - return buffer.subSequence(ignored.position(), end.position()); - } - - /** - * @return consumed characters, including ignored prefix - */ - protected CharSequence consumed() { - return buffer.subSequence(start.position(), end.position()); - } - - /** - * @return location in parser's buffer immediately after the last consumed character or consumption start location when no characters were consumed - */ - protected ParserLocation end() { - return end; - } - - /** - * @param character character to test - * @return true if provided character should be ignored and no non-ignorable characters were previously consumed - */ - private boolean ignored(int character) { - return ignoredCharacters != null && ignoredCharacters.chars().anyMatch(ignored -> ignored == character); - } - - /** - * Consumes the character at consumption's end location and advances that location if the character was consumed - * @return true if consumption process can proceed to the next character or false if the consumption should be stopped - */ - private boolean consume() { - if (end.position() < buffer.length()) { - char consumed = buffer.charAt(end.position()); - end = end.advance(consumed); - if (end.position() - ignored.position() < 2 && ignored(consumed)) { - ignored = ignored.advance(consumed); - token.log("Ignored '{}' ({} - {} - {})", LoggerLayout.sanitize(consumed), start.position(), ignored.position(), end.position()); - return true; - } - token.log("Consumed '{}' ({} - {} - {})", LoggerLayout.sanitize(consumed), start.position(), ignored.position(), end.position()); - return true; - } else { - hitEnd = true; - } - return false; - } - - /** - * @return true if consumption ended at parser buffer's end - */ - private boolean hitEnd() { - return hitEnd; - } - - /** - * tests configured TokenMatcher against consumed characters (excluding ignored prefix) - * @return reported by TokenMatcher test result structure - */ - private TokenTestResult test() { - if (end.position() - ignored.position() == 0) { - return TestResult.continueNoMatch(); - } - return tester.apply(buffer()); - } - - /** - * Marks this consumption as failed - */ - private void setFailed() { - failed = true; - } - - /** - * @return true if this consumption was marked as failed - */ - private boolean failed() { - return failed; - } - - /** - * adjusts internal buffer pointers so that the number of consumed after ignored prefix characters appears to be equal to the given number - * @param size the new size for consumption buffer - */ - private void trim(int size) { - end = ignored.advance(buffer().subSequence(0, size)); - } - - /** - * reinitializes internal buffer pointers - */ - private void clear() { - end = ignored = start; - } - - /** - * performs lookahead on consumption's token - */ - private void lookahead() { - token.lookahead(buffer, ignored.position()); - token.log("Lookahead complete"); - token.onFail(); - } - - } -} - diff --git a/src/main/java/com/onkiup/linker/parser/token/EnumToken.java b/src/main/java/com/onkiup/linker/parser/token/EnumToken.java index e0ad28b..2aaadb7 100644 --- a/src/main/java/com/onkiup/linker/parser/token/EnumToken.java +++ b/src/main/java/com/onkiup/linker/parser/token/EnumToken.java @@ -4,10 +4,16 @@ import java.lang.reflect.Field; import java.util.ArrayList; import java.util.HashMap; +import java.util.Iterator; +import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Optional; +import java.util.Set; +import javax.rmi.CORBA.Util; + +import com.google.common.annotations.VisibleForTesting; import com.onkiup.linker.parser.ParserLocation; import com.onkiup.linker.parser.PatternMatcher; import com.onkiup.linker.parser.Rule; @@ -16,7 +22,9 @@ import com.onkiup.linker.parser.TokenMatcher; import com.onkiup.linker.parser.TokenTestResult; import com.onkiup.linker.parser.annotation.CapturePattern; +import com.onkiup.linker.parser.annotation.MatchTerminal; import com.onkiup.linker.parser.util.ParserError; +import com.onkiup.linker.parser.util.Utils; /** * Partial token used to populate Enum fields @@ -26,21 +34,25 @@ public class EnumToken extends AbstractToken implements ConsumingToken, Serializable { private Class enumType; - private transient int nextVariant = 0; private transient Map variants = new HashMap<>(); private X token; - private boolean failed, populated; - private String ignoreCharacters; + private List variantKeys; + private int currentKeyIndex = 0; - public EnumToken(CompoundToken parent, Field field, Class enumType, ParserLocation location) { - super(parent, field, location); + public EnumToken(CompoundToken parent, int position, Field field, Class enumType, ParserLocation location) { + super(parent, position, field, location); this.enumType = enumType; + boolean ignoreCaseFromTarget = Utils.ignoreCase(field); for (X variant : enumType.getEnumConstants()) { try { Field variantField = enumType.getDeclaredField(variant.name()); - CapturePattern annotation = variantField.getAnnotation(CapturePattern.class); - TokenMatcher matcher = annotation == null ? new TerminalMatcher(variant.toString()) : new PatternMatcher(annotation); + CapturePattern pattern = variantField.getAnnotation(CapturePattern.class); + MatchTerminal terminal = variantField.getAnnotation(MatchTerminal.class); + boolean ignoreCase = ignoreCaseFromTarget || Utils.ignoreCase(variantField); + TokenMatcher matcher = pattern != null ? new PatternMatcher(pattern, ignoreCase) : + terminal != null ? new TerminalMatcher(terminal.value(), ignoreCase) : + new TerminalMatcher(variant.toString(), ignoreCase); variants.put(variant, matcher); } catch (ParserError pe) { throw pe; @@ -49,31 +61,35 @@ public EnumToken(CompoundToken parent, Field field, Class enumType, ParserLoc } } + variantKeys = new ArrayList<>(variants.keySet()); + setTokenMatcher(buffer -> { - if (variants.size() == 0) { + if (variantKeys.size() == 0) { return TestResult.fail(); } - List failed = new ArrayList<>(); - for (Map.Entry entry : variants.entrySet()) { - TokenTestResult result = entry.getValue().apply(buffer); - if (result.isMatch()) { - return TestResult.match(result.getTokenLength(), entry.getKey()); - } else if (result.isFailed()) { - failed.add(entry.getKey()); + TokenTestResult result; + do { + TokenMatcher variantMatcher = variants.get(variantKeys.get(currentKeyIndex)); + result = variantMatcher.apply(buffer); + if (result.isFailed()) { + if (++currentKeyIndex < variantKeys.size()) { + result = null; + } else { + return result; + } } - } - - failed.forEach(variants::remove); - - if (variants.size() == 0) { - return TestResult.fail(); - } + } while (result == null); - return TestResult.continueNoMatch(); + return result; }); } + @VisibleForTesting + void reset() { + currentKeyIndex = 0; + } + @Override public Optional token() { return Optional.ofNullable(token); @@ -91,9 +107,27 @@ public void atEnd() { @Override public void onConsumeSuccess(Object value) { - token = (X) value; - this.populated = true; + token = variantKeys.get(currentKeyIndex); } + /** + * Handler that will be invoked upon token matching failure + */ + @Override + public void onFail() { + super.onFail(); + reset(); + } + + /** + * Handler for token population event + * + * @param end location after the last character matched with this token + */ + @Override + public void onPopulated(ParserLocation end) { + super.onPopulated(end); + reset(); + } } diff --git a/src/main/java/com/onkiup/linker/parser/token/NumberToken.java b/src/main/java/com/onkiup/linker/parser/token/NumberToken.java new file mode 100644 index 0000000..ae6cd2a --- /dev/null +++ b/src/main/java/com/onkiup/linker/parser/token/NumberToken.java @@ -0,0 +1,51 @@ +package com.onkiup.linker.parser.token; + +import java.lang.reflect.Field; +import java.util.Optional; + +import com.onkiup.linker.parser.NumberMatcher; +import com.onkiup.linker.parser.ParserLocation; + +public class NumberToken extends AbstractToken implements ConsumingToken { + + private X token; + private Class tokenType; + + public NumberToken(CompoundToken parent, int position, Field targetField, ParserLocation location) { + super(parent, position, targetField, location); + this.tokenType = (Class)targetField.getType(); + + setTokenMatcher(new NumberMatcher(tokenType)); + } + + @Override + public void onConsumeSuccess(Object token) { + this.token = (X) token; + } + + @Override + public Optional token() { + return Optional.ofNullable(token); + } + + /** + * @return the type of resulting java token + */ + @Override + public Class tokenType() { + return tokenType; + } + + /** + * A callback that is invoked when token matching hits end of parser input + * An invocation should result in either token failure or population + */ + @Override + public void atEnd() { + if (token == null) { + onFail(); + } + onPopulated(end()); + ConsumingToken.super.atEnd(); + } +} diff --git a/src/main/java/com/onkiup/linker/parser/token/PartialToken.java b/src/main/java/com/onkiup/linker/parser/token/PartialToken.java deleted file mode 100644 index ae208c1..0000000 --- a/src/main/java/com/onkiup/linker/parser/token/PartialToken.java +++ /dev/null @@ -1,516 +0,0 @@ -package com.onkiup.linker.parser.token; - -import java.io.IOException; -import java.io.InputStream; -import java.io.ObjectInputStream; -import java.io.ObjectOutputStream; -import java.io.OutputStream; -import java.io.Serializable; -import java.lang.reflect.Field; -import java.util.LinkedList; -import java.util.List; -import java.util.Objects; -import java.util.Optional; -import java.util.function.Consumer; -import java.util.function.Function; -import java.util.function.Predicate; - -import org.slf4j.Logger; - -import com.onkiup.linker.parser.ParserLocation; -import com.onkiup.linker.parser.Rule; -import com.onkiup.linker.parser.TokenGrammar; -import com.onkiup.linker.parser.annotation.AdjustPriority; -import com.onkiup.linker.parser.annotation.MetaToken; -import com.onkiup.linker.parser.annotation.OptionalToken; -import com.onkiup.linker.parser.annotation.SkipIfFollowedBy; -import com.onkiup.linker.parser.util.LoggerLayout; -import com.onkiup.linker.parser.util.ParserError; -import com.onkiup.linker.parser.util.TextUtils; -import com.sun.istack.internal.NotNull; -import com.sun.istack.internal.Nullable; - -/** - * Generic interface for structures used to populate tokens - * @param - */ -public interface PartialToken extends Serializable { - - /** - * Creates a new PartialToken for provided field - * @param parent parent token - * @param field the field for which a new PartialToken should be created - * @param position token position in parser's buffer - * @return created PartialToken - */ - static PartialToken forField(@Nullable CompoundToken parent, @NotNull Field field, ParserLocation position) { - - if (position == null) { - throw new ParserError("Child token position cannot be null", parent); - } - - Class fieldType = field.getType(); - return forField(parent, field, fieldType, position); - } - - /** - * Creates a new PartialToken of given type for given field - * @param parent parent token - * @param field field for which a new PartialToken will be created - * @param tokenType the type of the resulting token - * @param position token position in parser's buffer - * @param - * @return created PartialToken - */ - static PartialToken forField(CompoundToken parent, Field field, Class tokenType, ParserLocation position) { - if (tokenType.isArray()) { - return new CollectionToken(parent, field, tokenType, position); - } else if (Rule.class.isAssignableFrom(tokenType)) { - if (!TokenGrammar.isConcrete(tokenType)) { - return new VariantToken(parent, field, tokenType, position); - } else { - return new RuleToken(parent, field, tokenType, position); - } - } else if (tokenType == String.class) { - return (PartialToken) new TerminalToken(parent, field, tokenType, position); - } else if (tokenType.isEnum()) { - return (PartialToken) new EnumToken(parent, field, tokenType, position); - } - throw new IllegalArgumentException("Unsupported field type: " + tokenType); - } - - /** - * Reads optionality condition for the field - * @param field field to read optionality condition for - * @return optionality condition or empty - */ - static Optional getOptionalCondition(Field field) { - if (field == null) { - return Optional.empty(); - } - CharSequence result = null; - if (field.isAnnotationPresent(OptionalToken.class)) { - result = field.getAnnotation(OptionalToken.class).whenFollowedBy(); - } else if (field.isAnnotationPresent(SkipIfFollowedBy.class)) { - result = field.getAnnotation(SkipIfFollowedBy.class).value(); - } - - return Optional.ofNullable(result == null || result.length() == 0 ? null : result); - } - - /** - * @param field field to check for presence of OptionalToken or SkipIfFollowedBy annotations - * @return true if the field is annotated with either {@link OptionalToken} or {@link SkipIfFollowedBy} - */ - static boolean hasOptionalAnnotation(Field field) { - return field != null && (field.isAnnotationPresent(OptionalToken.class) || field.isAnnotationPresent(SkipIfFollowedBy.class)); - } - - /** - * Context-aware field optionality checks - * @param owner Context to check - * @param field Field to check - * @return true if the field should be optional in this context - */ - static boolean isOptional(CompoundToken owner, Field field) { - try { - if (field.isAnnotationPresent(OptionalToken.class)) { - owner.log("Performing context-aware optionality check for field ${}", field); - OptionalToken optionalToken = field.getAnnotation(OptionalToken.class); - boolean result; - if (optionalToken.whenFieldIsNull().length() != 0) { - final String fieldName = optionalToken.whenFieldIsNull(); - result = testContextField(owner, fieldName, Objects::isNull); - owner.log("whenFieldIsNull({}) == {}", fieldName, result); - } else if (optionalToken.whenFieldNotNull().length() != 0) { - final String fieldName = optionalToken.whenFieldNotNull(); - result = testContextField(owner, fieldName, Objects::nonNull); - owner.log("whenFieldNotNull({}) == {}", fieldName, result); - } else { - result = optionalToken.whenFollowedBy().length() == 0; - owner.log("No context-aware conditions found; isOptional = {}", result); - } - return result; - } - - return false; - } catch (Exception e) { - throw new ParserError("Failed to determine if field " + field.getName() + " should be optional", owner); - } - } - - /** - * Tests if given field has context-aware optionality condition and should be optional in the current context - * @param owner the token that contains the field - * @param fieldName the name of the field - * @param tester Predicate to use in the test - * @return test result - * @throws NoSuchFieldException - * @throws IllegalAccessException - */ - static boolean testContextField(CompoundToken owner, String fieldName, Predicate tester) - throws NoSuchFieldException, IllegalAccessException { - Field targetField = owner.tokenType().getField(fieldName); - targetField.setAccessible(true); - boolean result = tester.test(targetField.get(owner.token())); - return result; - } - - /** - * Loads previously serialized PartialToken from provided InputStream - * @param is the InputStream to read a PartialToken from - * @return deserialized PartialToken - * @throws IOException - * @throws ClassNotFoundException - */ - static PartialToken load(InputStream is) throws IOException, ClassNotFoundException { - ObjectInputStream ois = new ObjectInputStream(is); - Object result = ois.readObject(); - if (result instanceof PartialToken) { - return (PartialToken)result; - } - String resultType = result == null ? "null" : result.getClass().getName(); - throw new IllegalArgumentException(resultType + " is not a PartialToken"); - } - - /** - * @return Java representation of populated token - */ - Optional token(); - - /** - * @return the type of resulting java token - */ - Class tokenType(); - - /** - * Called by parser to detect if this token is populated - * The result of this method should always be calculated - */ - boolean isPopulated(); - - /** - * Resets population flag for this token - * (Usually invoked on populated tokens with untested variants after one of the following tokens fails) - */ - void dropPopulated(); - - /** - * @return true if this token failed to match parser input - */ - boolean isFailed(); - - /** - * @return true if this token was marked as optional - */ - boolean isOptional(); - - /** - * @return parent token or empty if this token is the root AST token - */ - Optional> parent(); - - /** - * @return the field for which this PartialToken was created - */ - Optional targetField(); - - /** - * @return Token's location in parser input - */ - ParserLocation location(); - - /** - * @return the next position in parser input immediately after the last character that matched this token - */ - ParserLocation end(); - - /** - * Marks this token as optional - */ - void markOptional(); - - /** - * Callback method invoked upon token population - * @param end the next position in parser input immediately after the last character that matched this token - */ - void onPopulated(ParserLocation end); - - /** - * @return String representation of the token used for logging - */ - String tag(); - - /** - * A callback that is invoked when token matching hits end of parser input - * An invocation should result in either token failure or population - */ - void atEnd(); - - /** - * Using reversed breadth-first search algorithm, traces back from this token to the next token with untested alternatives - */ - default void traceback() { - onFail(); - } - - /** - * @return the list of metatokens for this token - */ - List metaTokens(); - - /** - * Stores giben object as a metatoken to this token - * @param metatoken - */ - void addMetaToken(Object metatoken); - - /** - * @return true if this token was marked as {@link MetaToken} - */ - default boolean isMetaToken() { - return tokenType().isAnnotationPresent(MetaToken.class); - } - - /** - * @return all characters consumed by the token and its children - */ - default CharSequence source() { - PartialToken root = root(); - return ConsumingToken.ConsumptionState.rootBuffer(root) - .map(buffer -> buffer.subSequence(position(), end().position())) - .orElse("?!"); - } - - /** - * @return a logger associated with this token - */ - Logger logger(); - - /** - * Logs a DEBUG-level message from this token - * @see String#format(String, Object...) - * @param message template for the message - * @param arguments template arguments - */ - default void log(CharSequence message, Object... arguments) { - logger().debug(message.toString(), arguments); - } - /** - * Logs an ERROR-level message from this token - * @param message the message to log - * @param error cause exception - */ - default void error(CharSequence message, Throwable error) { - logger().error(message.toString(), error); - } - - - /** - * Called upon token failures - */ - default void onFail() { - log("!!! FAILED !!!"); - invalidate(); - } - - /** - * Called on failed tokens - * @return true if the token should continue consumption, false otherwise - */ - default void lookahead(CharSequence source, int from) { - log("performing lookahead at position {}", from); - targetField() - .flatMap(PartialToken::getOptionalCondition) - .ifPresent(condition -> { - int start = TextUtils.firstNonIgnoredCharacter(this, source, from); - CharSequence buffer = source.subSequence(start, start + condition.length()); - log("Loookahead '{}' on '{}'", LoggerLayout.sanitize(condition), LoggerLayout.sanitize(buffer)); - if (!isOptional() && Objects.equals(condition, buffer)) { - log("Optional condition match: '{}' == '{}'", LoggerLayout.sanitize(condition), LoggerLayout.sanitize(buffer)); - markOptional(); - } - }); - - parent() - .filter(CompoundToken::onlyOneUnfilledChildLeft) - .filter(p -> p != this) - .ifPresent(p -> { - log("Delegating lookahead to parent {}", p.tag()); - p.lookahead(source, from); - }); - } - - /** - * Recursively passes this token and its parent tokens to provided predicate until the AST root and returnes the first token that matched the predicate - * @param comparator the predicate to use on path tokens - * @return first matched token or empty - */ - default Optional> findInPath(Predicate comparator) { - if (comparator.test(this)) { - return Optional.of(this); - } - - return parent() - .flatMap(parent -> parent.findInPath(comparator)); - } - - /** - * @return Token offset relative to the start of parser input - */ - default int position() { - ParserLocation location = location(); - if (location == null) { - return 0; - } - return location.position(); - } - - /** - * @return base priority for this token to be used by {@link VariantToken} - */ - default int basePriority() { - int result = 0; - Class tokenType = tokenType(); - if (tokenType.isAnnotationPresent(AdjustPriority.class)) { - AdjustPriority adjustment = tokenType.getAnnotation(AdjustPriority.class); - result += adjustment.value(); - } - return result; - } - - /** - * @return true if this token's priority should be added to parent token's priority - */ - default boolean propagatePriority() { - Class tokenType = tokenType(); - if (tokenType.isAnnotationPresent(AdjustPriority.class)) { - return tokenType.getAnnotation(AdjustPriority.class).propagate(); - } - - return false; - } - - /** - * A callback invoked on rotatable tokens after token population so that algebraic and similar tokens reorder themselves according to their priorities - */ - default void sortPriorities() { - - } - - /** - * Forcefully sets java representation for this token - * @param token new java representation for this token - */ - default void token(X token) { - throw new RuntimeException("Unsupported"); - } - - /** - * A callback invoked every time this token is detached from its AST - * This primarily intended for asynchronous token evaluation algoritms - */ - default void invalidate() { - } - - /** - * Using BFS algorithm, passes this token and its sub-tree tokens to the visitor - * @param visitor - */ - default void visit(Consumer> visitor) { - visitor.accept(this); - } - - /** - * @return String containing all characters to ignore for this token - */ - default String ignoredCharacters() { - return parent().map(CompoundToken::ignoredCharacters).orElse(""); - } - - /** - * @return true if this token has untested alternatives - */ - default boolean alternativesLeft() { - return false; - } - - /** - * @return root token of the AST to which this token belongs to - */ - default PartialToken root() { - PartialToken current = this; - while(true) { - PartialToken parent = current.parent().orElse(null); - if (parent == null) { - return current; - } - current = parent; - } - } - - /** - * @param length the number of characters to return - * @return last X characters matched this token - */ - default CharSequence tail(int length) { - return LoggerLayout.ralign(LoggerLayout.sanitize(source().toString()), length); - } - - /** - * @param length the number of characters to return - * @return first X characters matched this token - */ - default CharSequence head(int length) { - return LoggerLayout.head(LoggerLayout.sanitize(source()), 50); - } - - /** - * @return a list of tokens including this token and its parents up to the root token of the AST - */ - default LinkedList> path() { - LinkedList path = parent() - .map(PartialToken::path) - .orElseGet(LinkedList::new); - path.add(this); - return path; - } - - /** - * @return String representation of the AST with this token as the AST root - */ - default CharSequence dumpTree() { - return dumpTree(PartialToken::tag); - } - - /** - * @param formatter formatter function to use on tree nodes - * @return String representation of the AST with this token as AST root - */ - default CharSequence dumpTree(Function, CharSequence> formatter) { - return dumpTree(0, "", "", formatter); - } - - /** - * Dumps AST represented by this token into a String - * @param offset tabulation offset in the tree - * @param prefix prefix to use when rendering this token - * @param childPrefix prefix to use when rendering this token's children - * @param formatter formatter function - * @return String representation of the AST - */ - default CharSequence dumpTree(int offset, CharSequence prefix, CharSequence childPrefix, Function, CharSequence> formatter) { - return String.format("%s%s\n", prefix, formatter.apply(this)); - } - - /** - * Serializes this token into an output stream - * @param os OuputStream to write this token into - * @throws IOException - */ - default void store(OutputStream os) throws IOException { - ObjectOutputStream oos = new ObjectOutputStream(os); - oos.writeObject(this); - } -} - diff --git a/src/main/java/com/onkiup/linker/parser/token/Rotatable.java b/src/main/java/com/onkiup/linker/parser/token/Rotatable.java deleted file mode 100644 index dfede75..0000000 --- a/src/main/java/com/onkiup/linker/parser/token/Rotatable.java +++ /dev/null @@ -1,23 +0,0 @@ -package com.onkiup.linker.parser.token; - -/** - * An interface for rotatable ASTs - */ -public interface Rotatable { - - /** - * @return true if this token's AST can be rotated - */ - boolean rotatable(); - - /** - * Performs a clockwise rotation on AST represented by this token - */ - void rotateForth(); - - /** - * Performs a counter clockwise rotation on AST represented by this token - */ - void rotateBack(); -} - diff --git a/src/main/java/com/onkiup/linker/parser/token/RuleToken.java b/src/main/java/com/onkiup/linker/parser/token/RuleToken.java index 1c7d58b..0f0a7f7 100644 --- a/src/main/java/com/onkiup/linker/parser/token/RuleToken.java +++ b/src/main/java/com/onkiup/linker/parser/token/RuleToken.java @@ -6,7 +6,6 @@ import java.lang.reflect.Field; import java.lang.reflect.Method; import java.lang.reflect.Modifier; -import java.util.Arrays; import java.util.Optional; import java.util.function.Function; @@ -14,6 +13,7 @@ import com.onkiup.linker.parser.Rule; import com.onkiup.linker.parser.annotation.IgnoreCharacters; import com.onkiup.linker.parser.util.LoggerLayout; +import com.onkiup.linker.parser.util.Utils; /** * PartialToken used to populate concrete Rule instances @@ -29,8 +29,8 @@ public class RuleToken extends AbstractToken implements Compo private boolean rotated = false; private transient ParserLocation lastTokenEnd; - public RuleToken(CompoundToken parent, Field field, Class type, ParserLocation location) { - super(parent, field, location); + public RuleToken(CompoundToken parent, int position, Field field, Class type, ParserLocation location) { + super(parent, position, field, location); this.tokenType = type; this.lastTokenEnd = location; @@ -41,9 +41,8 @@ public RuleToken(CompoundToken parent, Field field, Class type, ParserLocatio throw new IllegalArgumentException("Failed to instantiate rule token " + type, e); } - fields = Arrays.stream(type.getDeclaredFields()) - .filter(childField -> !Modifier.isTransient(childField.getModifiers())) - .toArray(Field[]::new); + // 0.9: token inheritance + fields = Utils.getTokenFields(type); values = new PartialToken[fields.length]; @@ -102,7 +101,7 @@ public Optional> nextChild() { if (values[nextChild] == null || values[nextChild].isFailed() || values[nextChild].isPopulated()) { Field childField = fields[nextChild]; log("Creating partial token for child#{} at position {}", nextChild, lastTokenEnd.position()); - values[nextChild] = PartialToken.forField(this, childField, lastTokenEnd); + values[nextChild] = PartialToken.forField(this, nextChild , childField, lastTokenEnd); } log("nextChild#{} = {}", nextChild, values[nextChild].tag()); return Optional.of(values[nextChild++]); @@ -306,7 +305,7 @@ public boolean rotatable() { public void rotateForth() { log("Rotating"); token.invalidate(); - RuleToken wrap = new RuleToken(this, fields[0], fields[0].getType(), location()); + RuleToken wrap = new RuleToken(this, 0, fields[0], fields[0].getType(), location()); wrap.nextChild = nextChild; nextChild = 1; PartialToken[] wrapValues = wrap.values; @@ -430,5 +429,18 @@ public CharSequence dumpTree(int offset, CharSequence prefix, CharSequence child } return result; } + + @Override + public int childCount() { + return values.length; + } + + @Override + public Optional> child(int position) { + if (position < 0 || position >= values.length) { + throw new ArrayIndexOutOfBoundsException(); + } + return Optional.ofNullable(values[position]); + } } diff --git a/src/main/java/com/onkiup/linker/parser/token/TerminalToken.java b/src/main/java/com/onkiup/linker/parser/token/TerminalToken.java index bf9fe4b..ae959df 100644 --- a/src/main/java/com/onkiup/linker/parser/token/TerminalToken.java +++ b/src/main/java/com/onkiup/linker/parser/token/TerminalToken.java @@ -25,8 +25,8 @@ public class TerminalToken extends AbstractToken implements ConsumingTok private transient TokenMatcher matcher; private CharSequence token; - public TerminalToken(CompoundToken parent, Field field, Class tokenType, ParserLocation location) { - super(parent, field, location); + public TerminalToken(CompoundToken parent, int position, Field field, Class tokenType, ParserLocation location) { + super(parent, position, field, location); this.matcher = TokenMatcher.forField(parent, field, tokenType); this.setTokenMatcher(matcher); diff --git a/src/main/java/com/onkiup/linker/parser/token/VariantToken.java b/src/main/java/com/onkiup/linker/parser/token/VariantToken.java index 5c4b06b..176d28f 100644 --- a/src/main/java/com/onkiup/linker/parser/token/VariantToken.java +++ b/src/main/java/com/onkiup/linker/parser/token/VariantToken.java @@ -10,11 +10,7 @@ import java.util.function.Function; import java.util.stream.Collectors; -import org.reflections.Reflections; -import org.reflections.scanners.SubTypesScanner; -import org.reflections.util.ClasspathHelper; -import org.reflections.util.ConfigurationBuilder; - +import com.onkiup.linker.parser.ParserContext; import com.onkiup.linker.parser.ParserLocation; import com.onkiup.linker.parser.Rule; import com.onkiup.linker.parser.TokenGrammar; @@ -35,18 +31,13 @@ * -- it performs basic lexing on parser input by tagging positions in parser buffer as compatible/incompatible. This information becomes crucial * to parser performance by allowing it to skip previously tested and failed grammar paths after following a non-matching grammar "dead end" paths * - * This class can be additionally optimized by testing grammar junctions concurrently + * This class can be additionally optimized by implementing concurrent grammar junction testing * @param the grammar junction class to be resolved */ public class VariantToken extends AbstractToken implements CompoundToken, Serializable { private static boolean excludeMatchingParents = true; - private static final Reflections reflections = new Reflections(new ConfigurationBuilder() - .setUrls(ClasspathHelper.forClassLoader(TokenGrammar.class.getClassLoader())) - .setScanners(new SubTypesScanner(true)) - ); - /** * Dynamic priorities registry */ @@ -62,8 +53,8 @@ public class VariantToken extends AbstractToken implements Co private String ignoreCharacters = ""; private transient List> tried = new LinkedList<>(); - public VariantToken(CompoundToken parent, Field field, Class tokenType, ParserLocation location) { - super(parent, field, location); + public VariantToken(CompoundToken parent, int position, Field field, Class tokenType, ParserLocation location) { + super(parent, position, field, location); this.tokenType = tokenType; if (TokenGrammar.isConcrete(tokenType)) { @@ -74,13 +65,17 @@ public VariantToken(CompoundToken parent, Field field, Class tokenType, Parse variants = tokenType.getAnnotation(Alternatives.class).value(); } else { final ConcurrentHashMap typePriorities = new ConcurrentHashMap<>(); - variants = (reflections.getSubTypesOf(tokenType).stream() + variants = ParserContext.get().implementations(tokenType) .filter(TokenGrammar::isConcrete) .filter(type -> { if (type.isAnnotationPresent(IgnoreVariant.class)) { log("Ignoring variant {} -- marked with @IgnoreVariant", type.getSimpleName()); return false; } + if (type instanceof NonParseable) { + log("Ignoring non-parseable variant {}", type.getSimpleName()); + return false; + } if (isLeftRecursive(type)) { log("Ignoring variant {} -- left recursive", type.getSimpleName()); return false; @@ -116,7 +111,7 @@ public VariantToken(CompoundToken parent, Field field, Class tokenType, Parse } return result; }) - .toArray(Class[]::new)); + .toArray(Class[]::new); } values = new PartialToken[variants.length]; @@ -160,7 +155,7 @@ public Optional> nextChild() { log("Creating partial token for nextChild#{}", nextVariant); updateDynPriority(variants[nextVariant], 10); tried.add(variants[nextVariant]); - values[nextVariant] = PartialToken.forField(this, targetField().orElse(null), variants[nextVariant], location()); + values[nextVariant] = PartialToken.forField(this, nextVariant, targetField().orElse(null), variants[nextVariant], location()); } log("nextChild#{} = {}", nextVariant, values[nextVariant].tag()); @@ -490,5 +485,15 @@ public CharSequence dumpTree(int offset, CharSequence prefix, CharSequence child } return result; } + + @Override + public int childCount() { + return 1; + } + + @Override + public Optional> child(int i) { + return Optional.of(values[currentChild()]); + } } diff --git a/src/main/java/com/onkiup/linker/parser/util/ParserError.java b/src/main/java/com/onkiup/linker/parser/util/ParserError.java deleted file mode 100644 index 6ab2542..0000000 --- a/src/main/java/com/onkiup/linker/parser/util/ParserError.java +++ /dev/null @@ -1,48 +0,0 @@ -package com.onkiup.linker.parser.util; - -import java.util.Optional; - -import com.onkiup.linker.parser.token.PartialToken; - -public class ParserError extends RuntimeException { - - private PartialToken source; - - public ParserError(String msg, Optional> source) { - this(msg, source.orElse(null)); - } - - public ParserError(String msg, PartialToken source) { - super(msg); - this.source = source; - } - - public ParserError(String msg, Optional> source, Throwable cause) { - this(msg, source.orElse(null), cause); - } - - public ParserError(String msg, PartialToken source, Throwable cause) { - super(msg, cause); - this.source = source; - } - - @Override - public String toString() { - StringBuilder result = new StringBuilder("Parser error at position "); - result.append(source == null ? "" : source.position()) - .append(": ") - .append(getMessage()) - .append("\n"); - - PartialToken parent = source; - while(parent != null) { - result.append("\t") - .append(parent.toString()) - .append("\n"); - parent = (PartialToken) parent.parent().orElse(null); - } - return result.toString(); - } - -} - diff --git a/src/main/java/com/onkiup/linker/parser/util/Utils.java b/src/main/java/com/onkiup/linker/parser/util/Utils.java new file mode 100644 index 0000000..b951523 --- /dev/null +++ b/src/main/java/com/onkiup/linker/parser/util/Utils.java @@ -0,0 +1,36 @@ +package com.onkiup.linker.parser.util; + +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.util.Arrays; +import java.util.Optional; + +import com.onkiup.linker.parser.Rule; +import com.onkiup.linker.parser.TokenGrammar; +import com.onkiup.linker.parser.annotation.IgnoreCase; + +public final class Utils { + private Utils() { + + } + + public static Field[] getTokenFields(Class source) { + // first, we need to iterate back to the "base" rule class + Class type = source; + do { + type = type.getSuperclass(); + } while (Rule.class.isAssignableFrom(type) && TokenGrammar.isConcrete(type)); + + return Arrays.stream(type.getDeclaredFields()) + .filter(childField -> !Modifier.isTransient(childField.getModifiers())) + .toArray(Field[]::new); + } + + public static boolean ignoreCase(Field forField) { + return Optional.ofNullable(forField) + .map(field -> field.isAnnotationPresent(IgnoreCase.class) ? field.getAnnotation(IgnoreCase.class) : + field.getType().isAnnotationPresent(IgnoreCase.class) ? field.getDeclaringClass().getAnnotation(IgnoreCase.class) : + null + ).map(IgnoreCase::value).orElse(false); + } +} diff --git a/src/test/java/com/onkiup/linker/parser/token/AbstractTokenTest.java b/src/test/java/com/onkiup/linker/parser/token/AbstractTokenTest.java index 3b7b890..6941cfe 100644 --- a/src/test/java/com/onkiup/linker/parser/token/AbstractTokenTest.java +++ b/src/test/java/com/onkiup/linker/parser/token/AbstractTokenTest.java @@ -1,94 +1,107 @@ package com.onkiup.linker.parser.token; -import static junit.framework.TestCase.assertTrue; -import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; - -import java.lang.reflect.Field; -import java.util.Optional; +import static org.junit.Assert.assertTrue; import org.junit.Test; -import org.junit.runner.RunWith; +import org.mockito.Mock; import org.mockito.Mockito; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; import com.onkiup.linker.parser.ParserLocation; -@PrepareForTest({PartialToken.class, LoggerFactory.class}) -@RunWith(PowerMockRunner.class) public class AbstractTokenTest { @Test - public void testReadFlags() { - PowerMockito.mockStatic(PartialToken.class); - PowerMockito.when(PartialToken.getOptionalCondition(Mockito.any())).thenReturn(Optional.of("lalalalala")); - PowerMockito.when(PartialToken.hasOptionalAnnotation(Mockito.any())).thenReturn(true); + public void markOptional() { + AbstractToken abstractToken = Mockito.mock(AbstractToken.class); + Mockito.doCallRealMethod().when(abstractToken).markOptional(); + Mockito.when(abstractToken.isOptional()).thenCallRealMethod(); + abstractToken.markOptional(); + assertTrue(abstractToken.isOptional()); + } - AbstractToken token = Mockito.mock(AbstractToken.class); - Mockito.when(token.isOptional()).thenCallRealMethod(); - Mockito.when(token.optionalCondition()).thenCallRealMethod(); - Mockito.doCallRealMethod().when(token).readFlags(Mockito.any()); + @Test + public void dropPopulated() { + AbstractToken abstractToken = Mockito.mock(AbstractToken.class); + ParserLocation location = Mockito.mock(ParserLocation.class); + Mockito.doCallRealMethod().when(abstractToken).onPopulated(location); + Mockito.when(abstractToken.isPopulated()).thenCallRealMethod(); + Mockito.doCallRealMethod().when(abstractToken).dropPopulated(); + + abstractToken.onPopulated(location); + assertTrue(abstractToken.isPopulated()); + abstractToken.dropPopulated(); + assertFalse(abstractToken.isPopulated()); + } - token.readFlags(null); + @Test + public void isFailed() { + AbstractToken abstractToken = Mockito.mock(AbstractToken.class); + Mockito.doCallRealMethod().when(abstractToken).onFail(); + Mockito.when(abstractToken.isFailed()).thenCallRealMethod(); + abstractToken.onFail(); + assertTrue(abstractToken.isFailed()); + } - assertFalse(token.isOptional()); - assertEquals("lalalalala", token.optionalCondition().get()); + @Test + public void location() { + } - Mockito.when(PartialToken.getOptionalCondition(Mockito.any())).thenReturn(Optional.empty()); - token.readFlags(null); - assertTrue(token.isOptional()); - assertFalse(token.optionalCondition().isPresent()); + @Test + public void testLocation() { + } - Mockito.when(PartialToken.hasOptionalAnnotation(Mockito.any())).thenReturn(false); - token.readFlags(null); - assertFalse(token.isOptional()); - assertFalse(token.optionalCondition().isPresent()); + @Test + public void end() { } @Test - public void testToString() throws Exception { - Field field = AbstractToken.class.getDeclaredField("field"); - AbstractToken token = Mockito.mock(AbstractToken.class); - Mockito.when(token.toString()).thenCallRealMethod(); + public void parent() { + } - // target field present - Mockito.when(token.targetField()).thenReturn(Optional.of(field)); - assertEquals(AbstractToken.class.getName() + "$" + "field", token.toString()); - // target field not present - Mockito.when(token.targetField()).thenReturn(Optional.empty()); - assertTrue(token.toString().startsWith(AbstractToken.class.getName() + "$MockitoMock$")); + @Test + public void targetField() { } @Test - public void testOnPopulated() { - ParserLocation end = Mockito.mock(ParserLocation.class); - AbstractToken token = Mockito.mock(AbstractToken.class); - Mockito.doCallRealMethod().when(token).onPopulated(Mockito.any()); - Mockito.when(token.end()).thenCallRealMethod(); + public void onPopulated() { + ParserLocation location = Mockito.mock(ParserLocation.class); + AbstractToken abstractToken = Mockito.mock(AbstractToken.class); + Mockito.doCallRealMethod().when(abstractToken).onPopulated(location); + Mockito.when(abstractToken.isPopulated()).thenCallRealMethod(); + abstractToken.onPopulated(location); + assertTrue(abstractToken.isPopulated()); + } - token.onPopulated(end); - assertEquals(end, token.end()); + @Test + public void logger() { } @Test - public void testLogging() { - Logger logger = Mockito.mock(Logger.class); - PowerMockito.mockStatic(LoggerFactory.class); - Mockito.when(LoggerFactory.getLogger(Mockito.anyString())).thenReturn(logger); - AbstractToken token = Mockito.mock(AbstractToken.class); - Mockito.when(token.logger()).thenReturn(logger); - Mockito.doCallRealMethod().when(token).log(Mockito.any(), Mockito.any()); - Mockito.doCallRealMethod().when(token).error(Mockito.any(), Mockito.any()); + public void tag() { + } - Object[] vararg = new Object[0]; - token.log("", vararg); - Mockito.verify(logger, Mockito.times(1)).debug("", vararg); - token.error("", null); - Mockito.verify(logger, Mockito.times(1)).error("", (Throwable)null); + @Test + public void testToString() { + } + + @Test + public void readFlags() { + } + + @Test + public void onFail() { + } + + @Test + public void optionalCondition() { + } + + @Test + public void addMetaToken() { } -} + @Test + public void metaTokens() { + } +} diff --git a/src/test/java/com/onkiup/linker/parser/token/CollectionTokenTest.java b/src/test/java/com/onkiup/linker/parser/token/CollectionTokenTest.java deleted file mode 100644 index 4dbb5f8..0000000 --- a/src/test/java/com/onkiup/linker/parser/token/CollectionTokenTest.java +++ /dev/null @@ -1,109 +0,0 @@ -package com.onkiup.linker.parser.token; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertSame; -import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - -import java.lang.reflect.Field; - -import org.junit.Test; - -import com.onkiup.linker.parser.ParserLocation; -import com.onkiup.linker.parser.annotation.CaptureLimit; -import com.onkiup.linker.parser.annotation.CapturePattern; - -public class CollectionTokenTest { - @CapturePattern(".*") - private String[] stringField; - private Field arrayField = CollectionTokenTest.class.getDeclaredField("stringField"); - @CapturePattern(".*") - @CaptureLimit(min=1) - private String[] minLimitArray; - private Field minLimitField = CollectionTokenTest.class.getDeclaredField("minLimitArray"); - @CapturePattern(".*") - @CaptureLimit(max=2) - private String[] maxLimitArray; - private Field maxLimitField = CollectionTokenTest.class.getDeclaredField("maxLimitArray"); - - public CollectionTokenTest() throws NoSuchFieldException { - } - - @Test - public void onChildPopulated() { - CollectionToken token = new CollectionToken(null, arrayField, String[].class, null); - try { - token.onChildPopulated(); - fail(); - } catch (IllegalStateException ise) { - // this is expected - } - token = new CollectionToken<>(null, arrayField, String[].class, null); - PartialToken child = token.nextChild().get(); - token.onChildPopulated(); - PartialToken[] children = token.children(); - assertEquals(1, children.length); - assertSame(child, children[0]); - } - - @Test - public void onChildFailed() { - CollectionToken token = new CollectionToken<>(null, arrayField, String[].class, null); - try { - token.onChildFailed(); - fail(); - } catch (IllegalStateException ise) { - // this is expected - } - token = new CollectionToken<>(null, arrayField, String[].class, null); - PartialToken child = token.nextChild().get(); - token.onChildFailed(); - PartialToken[] children = token.children(); - assertEquals(0, children.length); - assertFalse(token.isFailed()); - assertTrue(token.isPopulated()); - - token = new CollectionToken<>(null, minLimitField, String[].class, null); - child = token.nextChild().get(); - token.onChildFailed(); - assertTrue(token.isFailed()); - assertFalse(token.isPopulated()); - } - - @Test - public void source() { - CollectionToken token = new CollectionToken<>(null, maxLimitField, String[].class, null); - TerminalToken child = (TerminalToken)token.nextChild().get(); - ConsumingToken.ConsumptionState.inject(child, new ConsumingToken.ConsumptionState("token1", "token1|")); - child.onConsumeSuccess("token1"); - child.onPopulated(new ParserLocation("", 6, 0, 6)); - token.onChildPopulated(); - child = (TerminalToken)token.nextChild().get(); - ConsumingToken.ConsumptionState.inject(child, new ConsumingToken.ConsumptionState("token2", "token2")); - child.onConsumeSuccess("token2"); - child.onPopulated(new ParserLocation("", 12, 0, 12)); - token.onChildPopulated(); - assertEquals("token1|token2", token.source().toString()); - } - - @Test - public void unfilledChildren() { - CollectionToken token = new CollectionToken<>(null, maxLimitField, String[].class, null); - assertEquals(2, token.unfilledChildren()); - token.nextChild(); - assertEquals(2, token.unfilledChildren()); - token.onChildPopulated(); - assertEquals(1, token.unfilledChildren()); - token.nextChild(); - assertEquals(1, token.unfilledChildren()); - token.onChildPopulated(); - assertEquals(0, token.unfilledChildren()); - assertTrue(token.isPopulated()); - } - - @Test - public void alternativesLeft() { - // TODO - } -} diff --git a/src/test/java/com/onkiup/linker/parser/token/CompoundTokenTest.java b/src/test/java/com/onkiup/linker/parser/token/CompoundTokenTest.java deleted file mode 100644 index 51a588f..0000000 --- a/src/test/java/com/onkiup/linker/parser/token/CompoundTokenTest.java +++ /dev/null @@ -1,120 +0,0 @@ -package com.onkiup.linker.parser.token; - -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - -import java.util.LinkedList; -import java.util.Optional; -import java.util.function.Consumer; - -import org.junit.Test; -import org.mockito.Mockito; - -import com.onkiup.linker.parser.Rule; -import com.onkiup.linker.parser.annotation.AdjustPriority; - -public class CompoundTokenTest { - - public static interface CttJunction extends Rule { - - } - - @AdjustPriority(100) - public static class CttConcrete implements CttJunction { - - } - - @Test - public void forClass() { - assertTrue(CompoundToken.forClass(CttJunction.class, null) instanceof VariantToken); - assertTrue(CompoundToken.forClass(CttConcrete.class, null) instanceof RuleToken); - } - - @Test - public void traceback() { - CompoundToken token = Mockito.mock(CompoundToken.class); - CompoundToken compoundChild = Mockito.mock(CompoundToken.class); - PartialToken sourceChild = Mockito.mock(PartialToken.class); - - Mockito.when(token.traceback()).thenCallRealMethod(); - Mockito.when(token.children()).thenReturn(new PartialToken[] {compoundChild, sourceChild}); - Mockito.when(compoundChild.traceback()).thenReturn(Optional.of("/COMPOUND_CHILD/")); - Mockito.when(sourceChild.source()).thenReturn("/SOURCE_CHILD/"); - Mockito.when(compoundChild.alternativesLeft()).thenReturn(1); - - assertEquals("/SOURCE_CHILD//COMPOUND_CHILD/", token.traceback().get().toString()); - Mockito.verify(token, Mockito.times(0)).onFail(); - Mockito.verify(compoundChild, Mockito.times(1)).invalidate(); - Mockito.verify(compoundChild, Mockito.times(0)).onFail(); - Mockito.verify(sourceChild, Mockito.times(1)).invalidate(); - Mockito.verify(sourceChild, Mockito.times(1)).onFail(); - - Mockito.when(token.children()).thenReturn(new PartialToken[] {sourceChild}); - assertEquals("/SOURCE_CHILD/", token.traceback().get().toString()); - Mockito.verify(token, Mockito.times(1)).onFail(); - Mockito.verify(token, Mockito.times(1)).invalidate(); - Mockito.verify(sourceChild, Mockito.times(2)).invalidate(); - Mockito.verify(sourceChild, Mockito.times(2)).onFail(); - } - - @Test - public void alternativesLeft() { - CompoundToken token = Mockito.mock(CompoundToken.class); - PartialToken child1 = Mockito.mock(PartialToken.class); - PartialToken child2 = Mockito.mock(PartialToken.class); - - Mockito.when(token.alternativesLeft()).thenCallRealMethod(); - Mockito.when(token.children()).thenReturn(new PartialToken[]{child1, child2}); - Mockito.when(child1.alternativesLeft()).thenReturn(3); - Mockito.when(child2.alternativesLeft()).thenReturn(5); - - assertEquals(8, token.alternativesLeft()); - } - - @Test - public void basePriority() { - CompoundToken token = Mockito.mock(CompoundToken.class); - PartialToken child = Mockito.mock(PartialToken.class); - - Mockito.when(token.basePriority()).thenCallRealMethod(); - Mockito.when(token.tokenType()).thenReturn(CttConcrete.class); - Mockito.when(token.children()).thenReturn(new PartialToken[]{child}); - Mockito.when(child.basePriority()).thenReturn(900); - Mockito.when(child.propagatePriority()).thenReturn(true); - - assertEquals(1000, token.basePriority()); - } - - @Test - public void source() { - CompoundToken token = Mockito.mock(CompoundToken.class); - CompoundToken compoundChild = Mockito.mock(CompoundToken.class); - PartialToken sourceChild = Mockito.mock(PartialToken.class); - - Mockito.when(token.traceback()).thenCallRealMethod(); - Mockito.when(token.children()).thenReturn(new PartialToken[] {compoundChild, sourceChild}); - Mockito.when(compoundChild.traceback()).thenReturn(Optional.of("/COMPOUND_CHILD/")); - Mockito.when(sourceChild.source()).thenReturn("/SOURCE_CHILD/"); - - assertEquals("/SOURCE_CHILD//COMPOUND_CHILD/", token.traceback().get().toString()); - } - - @Test - public void visit() { - CompoundToken token = Mockito.mock(CompoundToken.class); - PartialToken child1 = Mockito.mock(PartialToken.class); - PartialToken child2 = Mockito.mock(PartialToken.class); - final LinkedList> visited = new LinkedList<>(); - Consumer visitor = visited::add; - - Mockito.doCallRealMethod().when(token).visit(visitor); - Mockito.when(token.children()).thenReturn(new PartialToken[]{child1, child2}); - - token.visit(visitor); - - assertEquals(1, visited.size()); - assertEquals(token, visited.get(0)); - Mockito.verify(child1, Mockito.times(1)).visit(visitor); - Mockito.verify(child2, Mockito.times(1)).visit(visitor); - } -} diff --git a/src/test/java/com/onkiup/linker/parser/token/EnumTokenTest.java b/src/test/java/com/onkiup/linker/parser/token/EnumTokenTest.java index 172d9b4..7838254 100644 --- a/src/test/java/com/onkiup/linker/parser/token/EnumTokenTest.java +++ b/src/test/java/com/onkiup/linker/parser/token/EnumTokenTest.java @@ -1,51 +1,57 @@ package com.onkiup.linker.parser.token; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.fail; +import static junit.framework.TestCase.assertEquals; +import static junit.framework.TestCase.assertSame; +import static junit.framework.TestCase.assertTrue; -import java.lang.reflect.Field; +import java.util.function.Function; import org.junit.Test; -import org.junit.validator.ValidateWith; import org.mockito.Mockito; import com.onkiup.linker.parser.ParserLocation; import com.onkiup.linker.parser.Rule; +import com.onkiup.linker.parser.TokenTestResult; import com.onkiup.linker.parser.annotation.CapturePattern; +import com.onkiup.linker.parser.annotation.IgnoreCase; public class EnumTokenTest { - public EnumTokenTest() throws NoSuchFieldException { + @IgnoreCase + public enum TestEnum implements Rule { + ONE, TWO, THREE, + @CapturePattern("hello") FOUR; } - public static enum EttEnum implements Rule { - ONE, - TWO, - THREE; - } - - public static class EttWrapper { - private EttEnum enumValue; - } - - private Field enumField = EttWrapper.class.getDeclaredField("enumValue"); + private TestEnum targetField; @Test - public void testParsing() { + public void baseCases() throws Exception { CompoundToken parent = Mockito.mock(CompoundToken.class); - EnumToken token = new EnumToken(parent, enumField, EttEnum.class, new ParserLocation(null, 0, 0, 0)); - String source = "TWO"; - - for (int i = 0; i < source.length(); i++) { - CharSequence returned = token.consume(source.charAt(i)).orElse(null); - if (returned != null && returned.length() > 0) { - fail("Unexpected buffer return at character#" + i + ": '" + returned); - } - } - - assertEquals(EttEnum.TWO, token.token().get()); - - assertEquals("z", token.consume('z').get()); + ConsumingToken.ConsumptionState.rootBuffer(parent, ""); + EnumToken subject = new EnumToken(parent, getClass().getDeclaredField("targetField"), TestEnum.class , + ParserLocation.ZERO); + + Function matcher = subject.tokenMatcher(); + + assertTrue(matcher.apply("ON").isContinue()); + assertEquals("ONE", matcher.apply("ONE").getToken()); + + subject.onConsumeSuccess("ONE"); + assertSame(TestEnum.ONE, subject.token().get()); + subject.reset(); + + assertTrue(matcher.apply("T").isContinue()); + subject.reset(); + assertEquals("two", matcher.apply("two").getToken()); + subject.onConsumeSuccess("two"); + assertSame(TestEnum.TWO, subject.token().get()); + subject.reset(); + + assertTrue(matcher.apply("HELLO").isMatch()); + subject.onConsumeSuccess("HELLO"); + assertEquals("HELLO", matcher.apply("HELLO").getToken()); + assertSame(TestEnum.FOUR, subject.token().get()); + subject.reset(); } } diff --git a/src/test/java/com/onkiup/linker/parser/token/NumberMatcherTest.java b/src/test/java/com/onkiup/linker/parser/token/NumberMatcherTest.java new file mode 100644 index 0000000..63b52d5 --- /dev/null +++ b/src/test/java/com/onkiup/linker/parser/token/NumberMatcherTest.java @@ -0,0 +1,99 @@ +package com.onkiup.linker.parser.token; + +import static junit.framework.TestCase.assertEquals; +import static junit.framework.TestCase.assertFalse; +import static junit.framework.TestCase.assertTrue; + +import java.math.BigDecimal; +import java.math.BigInteger; + +import org.junit.Test; + +import com.onkiup.linker.parser.NumberMatcher; + +public class NumberMatcherTest { + + @Test + public void testBytes() { + NumberMatcher subject = new NumberMatcher(Byte.class); + assertTrue(subject.apply("1").isMatchContinue()); + assertTrue(subject.apply("128").isFailed()); + + assertTrue(subject.apply("125 ").isMatch()); + assertEquals((byte)125, subject.apply("125i").getToken()); + assertEquals((byte)-125, subject.apply("-125i").getToken()); + assertEquals(3, subject.apply("125i").getTokenLength()); + } + + @Test + public void testIntegers() { + NumberMatcher subject = new NumberMatcher(Integer.class); + assertTrue(subject.apply("1").isMatchContinue()); + assertTrue(subject.apply("999999999999999999999999999").isFailed()); + + assertTrue(subject.apply("199 ").isMatch()); + assertEquals(199, subject.apply("199i").getToken()); + assertEquals(-199, subject.apply("-199i").getToken()); + assertEquals(3, subject.apply("199i").getTokenLength()); + } + + @Test + public void testLongs() { + NumberMatcher subject = new NumberMatcher(Long.class); + assertTrue(subject.apply("1").isMatchContinue()); + assertTrue(subject.apply("999999999999999999999999999").isFailed()); + + assertTrue(subject.apply("199 ").isMatch()); + assertEquals(199L, subject.apply("199i").getToken()); + assertEquals(-199L, subject.apply("-199i").getToken()); + assertEquals(3, subject.apply("199i").getTokenLength()); + } + + @Test + public void testFloats() { + NumberMatcher subject = new NumberMatcher(Float.class); + assertTrue(subject.apply("1").isMatchContinue()); + + assertTrue(subject.apply("19.9 ").isMatch()); + assertEquals(Float.POSITIVE_INFINITY, subject.apply("99999999999999999999999999999999999999999999999999999999999999999999999999999").getToken()); + assertEquals(Float.NEGATIVE_INFINITY, subject.apply("-99999999999999999999999999999999999999999999999999999999999999999999999999999").getToken()); + assertEquals(19.9F, subject.apply("19.9i").getToken()); + assertEquals(-19.9F, subject.apply("-19.9i").getToken()); + assertEquals(4, subject.apply("19.9i").getTokenLength()); + } + + @Test + public void testDoubles() { + NumberMatcher subject = new NumberMatcher(Double.class); + assertTrue(subject.apply("1").isMatchContinue()); + + assertTrue(subject.apply("19.9 ").isMatch()); + assertEquals(Double.POSITIVE_INFINITY, subject.apply("999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999").getToken()); + assertEquals(Double.NEGATIVE_INFINITY, subject.apply("-999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999").getToken()); + assertEquals(19.9D, subject.apply("19.9i").getToken()); + assertEquals(-19.9D, subject.apply("-19.9i").getToken()); + assertEquals(4, subject.apply("19.9i").getTokenLength()); + } + + @Test + public void testBigDecimals() { + NumberMatcher subject = new NumberMatcher(BigDecimal.class); + assertTrue(subject.apply("1").isMatchContinue()); + + assertTrue(subject.apply("19.9 ").isMatch()); + assertEquals(new BigDecimal("19.9"), subject.apply("19.9i").getToken()); + assertEquals(new BigDecimal("-19.9"), subject.apply("-19.9i").getToken()); + assertEquals(4, subject.apply("19.9i").getTokenLength()); + } + + @Test + public void testBigIntegers() { + NumberMatcher subject = new NumberMatcher(BigDecimal.class); + assertTrue(subject.apply("1").isMatchContinue()); + + assertTrue(subject.apply("19.9 ").isMatch()); + assertEquals(new BigInteger("199"), subject.apply("199i").getToken()); + assertEquals(new BigInteger("-199"), subject.apply("-199i").getToken()); + assertEquals(3, subject.apply("199i").getTokenLength()); + } +} diff --git a/src/test/java/com/onkiup/linker/parser/token/PartialTokenTest.java b/src/test/java/com/onkiup/linker/parser/token/PartialTokenTest.java deleted file mode 100644 index fa38237..0000000 --- a/src/test/java/com/onkiup/linker/parser/token/PartialTokenTest.java +++ /dev/null @@ -1,214 +0,0 @@ -package com.onkiup.linker.parser.token; - -import static junit.framework.TestCase.assertEquals; -import static junit.framework.TestCase.assertFalse; -import static junit.framework.TestCase.assertSame; -import static junit.framework.TestCase.assertTrue; - -import java.lang.reflect.Field; -import java.util.LinkedList; -import java.util.Optional; - -import org.junit.Test; -import org.junit.runner.RunWith; -import org.mockito.Mockito; -import org.powermock.api.mockito.PowerMockito; -import org.powermock.core.classloader.annotations.PrepareForTest; -import org.powermock.modules.junit4.PowerMockRunner; - -import com.onkiup.linker.parser.ParserLocation; -import com.onkiup.linker.parser.Rule; -import com.onkiup.linker.parser.annotation.AdjustPriority; -import com.onkiup.linker.parser.annotation.OptionalToken; -import com.onkiup.linker.parser.annotation.SkipIfFollowedBy; - -@RunWith(PowerMockRunner.class) -@PrepareForTest(PartialToken.class) -public class PartialTokenTest { - - private Object[] array; - - public PartialTokenTest() throws NoSuchFieldException { - } - - @AdjustPriority(value = 9000, propagate = true) - private static class TestRule implements Rule {} - @SkipIfFollowedBy("test") - private TestRule testRule; - @OptionalToken(whenFollowedBy = "hi") - private Rule rule; - @OptionalToken - private String string; - private enum TestEnum {}; - private TestEnum enumnumnum; - - private Field arrayField = PartialTokenTest.class.getDeclaredField("array"); - private Field testRuleField = PartialTokenTest.class.getDeclaredField("testRule"); - private Field junctionField = PartialTokenTest.class.getDeclaredField("rule"); - private Field stringField = PartialTokenTest.class.getDeclaredField("string"); - private Field enumField = PartialTokenTest.class.getDeclaredField("enumnumnum"); - - @Test - public void forField() throws Exception { - - Class[] constructorArguments = new Class[] { - CompoundToken.class, Field.class, Class.class, ParserLocation.class - }; - - PowerMockito.whenNew(CollectionToken.class).withAnyArguments().thenReturn(Mockito.mock(CollectionToken.class)); - PowerMockito.whenNew(VariantToken.class).withAnyArguments().thenReturn(Mockito.mock(VariantToken.class)); - PowerMockito.whenNew(RuleToken.class).withAnyArguments().thenReturn(Mockito.mock(RuleToken.class)); - PowerMockito.whenNew(TerminalToken.class).withAnyArguments().thenReturn(Mockito.mock(TerminalToken.class)); - PowerMockito.whenNew(EnumToken.class).withAnyArguments().thenReturn(Mockito.mock(EnumToken.class)); - - assertTrue(PartialToken.forField(null, arrayField, Object[].class, null) instanceof CollectionToken); - assertTrue(PartialToken.forField(null, testRuleField, TestRule.class, null) instanceof RuleToken); - assertTrue(PartialToken.forField(null, junctionField, Rule.class, null) instanceof VariantToken); - assertTrue(TerminalToken.class.isAssignableFrom(PartialToken.forField(null, stringField, String.class, null).getClass())); - assertTrue(PartialToken.forField(null, enumField, TestEnum.class, null) instanceof EnumToken); - - } - - @Test - public void getOptionalCondition() { - assertFalse(PartialToken.getOptionalCondition(enumField).isPresent()); - assertFalse(PartialToken.getOptionalCondition(stringField).isPresent()); - assertEquals("test", PartialToken.getOptionalCondition(testRuleField).get()); - assertEquals("hi", PartialToken.getOptionalCondition(junctionField).get()); - } - - @Test - public void isOptional() { - assertTrue(PartialToken.hasOptionalAnnotation(testRuleField)); - assertTrue(PartialToken.hasOptionalAnnotation(junctionField)); - assertTrue(PartialToken.hasOptionalAnnotation(stringField)); - assertFalse(PartialToken.hasOptionalAnnotation(enumField)); - } - - @Test - public void lookahead() { - PartialToken token = Mockito.mock(PartialToken.class); - CompoundToken parent = Mockito.mock(CompoundToken.class); - - Mockito.when(token.parent()).thenReturn(Optional.of(parent)); - Mockito.when(parent.parent()).thenReturn(Optional.empty()); - Mockito.when(token.lookahead(Mockito.any())).thenCallRealMethod(); - Mockito.when(parent.lookahead(Mockito.any())).thenCallRealMethod(); - Mockito.when(token.targetField()).thenReturn(Optional.of(testRuleField)); - Mockito.when(parent.targetField()).thenReturn(Optional.of(junctionField)); - - assertFalse(token.lookahead("m")); - assertTrue(token.lookahead("t")); - assertTrue(token.lookahead("h")); - assertTrue(token.lookahead("test")); - Mockito.verify(parent, Mockito.times(0)).markOptional(); - Mockito.verify(token, Mockito.times(1)).markOptional(); - assertFalse(token.lookahead("hi")); - Mockito.verify(parent, Mockito.times(1)).markOptional(); - assertFalse(token.lookahead("testt")); - Mockito.verify(token, Mockito.times(2)).markOptional(); - assertTrue(token.lookahead("testh")); - Mockito.verify(token, Mockito.times(3)).markOptional(); - Mockito.verify(parent, Mockito.times(1)).markOptional(); - assertFalse(token.lookahead("testhi")); - Mockito.verify(token, Mockito.times(4)).markOptional(); - Mockito.verify(parent, Mockito.times(2)).markOptional(); - } - - @Test - public void findInTree() { - PartialToken token = Mockito.mock(PartialToken.class); - CompoundToken parent = Mockito.mock(CompoundToken.class); - - Mockito.when(token.parent()).thenReturn(Optional.of(parent)); - Mockito.when(parent.parent()).thenReturn(Optional.empty()); - Mockito.when(token.findInPath(Mockito.any())).thenCallRealMethod(); - Mockito.when(parent.findInPath(Mockito.any())).thenCallRealMethod(); - - assertEquals(parent, token.findInPath(parent::equals).get()); - } - - @Test - public void position() { - PartialToken token = Mockito.mock(PartialToken.class); - Mockito.when(token.position()).thenCallRealMethod(); - - assertEquals(0, token.position()); - ParserLocation location = Mockito.mock(ParserLocation.class); - Mockito.when(location.position()).thenReturn(9000); - Mockito.when(token.location()).thenReturn(location); - assertEquals(9000, token.position()); - } - - @Test - public void basePriority() { - PartialToken token = Mockito.mock(PartialToken.class); - Mockito.when(token.tokenType()).thenReturn(TestRule.class); - Mockito.when(token.basePriority()).thenCallRealMethod(); - assertEquals(9000, token.basePriority()); - } - - @Test - public void propagatePriority() { - PartialToken token = Mockito.mock(PartialToken.class); - Mockito.when(token.tokenType()).thenReturn(TestRule.class); - Mockito.when(token.propagatePriority()).thenCallRealMethod(); - assertTrue(token.propagatePriority()); - } - - @Test - public void visit() { - PartialToken token = Mockito.mock(PartialToken.class); - Mockito.doCallRealMethod().when(token).visit(Mockito.any()); - PartialToken[] visited = new PartialToken[1]; - token.visit(t -> visited[0] = t); - assertEquals(token, visited[0]); - } - - @Test - public void alternativesLeft() { - PartialToken token = Mockito.mock(PartialToken.class); - Mockito.when(token.alternativesLeft()).thenCallRealMethod(); - assertEquals(0, token.alternativesLeft()); - } - - @Test - public void root() { - PartialToken parent = Mockito.mock(PartialToken.class); - PartialToken child = Mockito.mock(PartialToken.class); - - Mockito.when(child.parent()).thenReturn(Optional.of(parent)); - Mockito.when(parent.parent()).thenReturn(Optional.empty()); - Mockito.when(child.root()).thenCallRealMethod(); - Mockito.when(parent.root()).thenCallRealMethod(); - - assertEquals(parent, child.root()); - } - - @Test - public void tail() { - PartialToken token = Mockito.mock(PartialToken.class); - Mockito.when(token.tail(Mockito.anyInt())).thenCallRealMethod(); - Mockito.when(token.source()).thenReturn("source"); - - assertEquals("source", token.tail(6)); - assertEquals(" source", token.tail(7)); - assertEquals("rce", token.tail(3)); - } - - @Test - public void path() { - PartialToken token = Mockito.mock(PartialToken.class); - CompoundToken parent = Mockito.mock(CompoundToken.class); - - Mockito.when(token.parent()).thenReturn(Optional.of(parent)); - Mockito.when(parent.parent()).thenReturn(Optional.empty()); - Mockito.when(token.path()).thenCallRealMethod(); - Mockito.when(parent.path()).thenCallRealMethod(); - - LinkedList path = token.path(); - assertEquals(2, path.size()); - assertSame(parent, path.get(0)); - assertSame(token, path.get(1)); - } -} diff --git a/src/test/java/com/onkiup/linker/parser/token/RuleTokenTest.java b/src/test/java/com/onkiup/linker/parser/token/RuleTokenTest.java deleted file mode 100644 index 7ac4bc4..0000000 --- a/src/test/java/com/onkiup/linker/parser/token/RuleTokenTest.java +++ /dev/null @@ -1,7 +0,0 @@ -package com.onkiup.linker.parser.token; - -import static org.junit.Assert.*; - -public class RuleTokenTest { - -}