Helper function to split string into groups based on regexp. Possibility to retrieve...
[svjatoslav_commons.git] / src / main / java / eu / svjatoslav / commons / string / tokenizer / Tokenizer.java
index e92ccd7..14554be 100755 (executable)
@@ -1,55 +1,68 @@
 /*
- * Svjatoslav Commons - shared library of common functionality.
- * Copyright ©2012-2017, Svjatoslav Agejenko, svjatoslav@svjatoslav.eu
- * 
- * This program is free software; you can redistribute it and/or
- * modify it under the terms of version 3 of the GNU Lesser General Public License
- * or later as published by the Free Software Foundation.
+ * Svjatoslav Commons - shared library of common functionality. Author: Svjatoslav Agejenko.
+ * This project is released under Creative Commons Zero (CC0) license.
  */
-
 package eu.svjatoslav.commons.string.tokenizer;
 
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Stack;
+import java.util.regex.Matcher;
 import java.util.stream.Stream;
 
-import static eu.svjatoslav.commons.string.tokenizer.Terminator.TerminationStrategy.DROP;
 import static eu.svjatoslav.commons.string.tokenizer.Terminator.TerminationStrategy.PRESERVE;
+import static java.lang.System.out;
 
 public class Tokenizer {
 
-    final Stack<Integer> tokenIndexes = new Stack<>();
+    /**
+     * Stack of token indexes. This allows to walk back in history and un-consume the token.
+     */
+    private final Stack<Integer> tokenIndexes = new Stack<>();
+
+    /**
+     * Terminators that will be searched for by given tokenizer within given source string.
+     */
     private final List<Terminator> terminators = new ArrayList<>();
-    private String source;
+
+    private String source; // string to be tokenized
+
     private int currentIndex = 0;
 
     public Tokenizer(final String source) {
         this.source = source;
     }
 
-    public Tokenizer(){}
+    public Tokenizer() {
+    }
 
-    public Tokenizer setSource(String source){
+    public Tokenizer setSource(String source) {
         this.source = source;
         currentIndex = 0;
         tokenIndexes.clear();
         return this;
     }
 
-    public Tokenizer addTerminator(final String startSequence,
-                                   final Terminator.TerminationStrategy terminationStrategy) {
-        terminators.add(new Terminator(startSequence, terminationStrategy));
-        return this;
+    public Terminator addTerminator(final Terminator.TerminationStrategy terminationStrategy, String regexp) {
+        Terminator terminator = new Terminator(terminationStrategy, regexp,null);
+        terminators.add(terminator);
+        return terminator;
     }
 
-    public Tokenizer addTerminator(final String startSequence,
-                                   final String endSequence, final Terminator.TerminationStrategy terminationStrategy) {
-        terminators.add(new Terminator(startSequence, endSequence, terminationStrategy));
-        return this;
+    public Terminator addTerminator(final Terminator.TerminationStrategy terminationStrategy,
+                                    String regexp, String group) {
+        Terminator terminator = new Terminator(terminationStrategy, regexp,group);
+        terminators.add(terminator);
+        return terminator;
     }
 
-    public void expectAndConsumeNextToken(final String value)
+
+    public Terminator addTerminator(Terminator terminator) {
+        terminators.add(terminator);
+        return terminator;
+    }
+
+    public void expectAndConsumeNextStringToken(final String value)
             throws InvalidSyntaxException {
         final TokenizerMatch match = getNextToken();
         if (!value.equals(match.token))
@@ -57,55 +70,79 @@ public class Tokenizer {
                     + "\" but got \"" + match.token + "\" instead.");
     }
 
+    public TokenizerMatch expectAndConsumeNextTerminatorToken(Terminator terminator)
+            throws InvalidSyntaxException {
+        final TokenizerMatch match = getNextToken();
+
+        if (match.terminator != terminator)
+            throw new InvalidSyntaxException("Expected terminator \"" + terminator
+                    + "\" but got \"" + match.terminator + "\" instead.");
+
+        return match;
+    }
+
+
+    /**
+     * @return next @TokenizerMatch or <code>null</code> if end of input is reached.
+     */
     public TokenizerMatch getNextToken() {
         tokenIndexes.push(currentIndex);
 
-        StringBuilder token = new StringBuilder();
-
-        while (true){
-            if (isTokenTermination()){
-                Terminator tokenTerminator = findTokenTerminator();
-
-                if (tokenTerminator.termination == PRESERVE){
-                    if (hasAccumulatedToken(token)){
-                        // already assembled some token
-                        return new TokenizerMatch(token.toString(), "", tokenTerminator);
-                    } else {
-                        currentIndex++;
-                        return new TokenizerMatch(tokenTerminator.startSequence, "", tokenTerminator);
-                    }
-                } else if (tokenTerminator.termination == DROP){
-                    if (hasAccumulatedToken(token)){
-                        currentIndex++;
-                        return new TokenizerMatch(token.toString(), "", tokenTerminator);
-                    } else {
-                        currentIndex++;
-                    }
-                }
-            } else {
-                token.append(source.charAt(currentIndex));
+        StringBuilder tokenAccumulator = new StringBuilder();
+
+        while (true) {
+
+            if (currentIndex >= source.length()) { // reached end of input
+                if (hasAccumulatedToken(tokenAccumulator))
+                    return new TokenizerMatch(tokenAccumulator.toString(), null, null, this);
+                else
+                    return null;
+            }
+
+            TokenizerMatch matchResult = findTerminatorMatch();
+            if (matchResult == null) {
+                tokenAccumulator.append(source.charAt(currentIndex));
                 currentIndex++;
+                continue;
             }
-        }
 
-    }
+            if (matchResult.terminator.termination == PRESERVE) {
+                if (hasAccumulatedToken(tokenAccumulator))
+                    return new TokenizerMatch(tokenAccumulator.toString(), null, null, this);
 
-    private boolean hasAccumulatedToken(StringBuilder token) {
-        return token.length() > 0;
-    }
+                currentIndex = matchResult.matcher.end();
+                return matchResult;
+            } else {
+                currentIndex = matchResult.matcher.end();
 
-    private boolean isTokenTermination() {
-        return findTokenTerminator() != null;
+                if (hasAccumulatedToken(tokenAccumulator))
+                    return new TokenizerMatch(tokenAccumulator.toString(), null, null, this);
+            }
+        }
     }
 
-    public Terminator findTokenTerminator() {
+    public TokenizerMatch findTerminatorMatch(){
         for (Terminator terminator : terminators)
-            if (terminator.matches(source, currentIndex))
-                return terminator;
+            if (terminator.active) {
+                Matcher match = terminator.match(source, currentIndex);
+                if (match.find()) {
+                    String token = source.substring(match.start(), match.end());
+                    return new TokenizerMatch(token, terminator, match, this);
+                }
+            }
         return null;
     }
 
-    public boolean consumeIfNextToken(final String token) {
+    private boolean hasAccumulatedToken(StringBuilder tokenAccumulator) {
+        return tokenAccumulator.length() > 0;
+    }
+
+    public boolean hasMoreContent() {
+        if (source == null) return false;
+        return currentIndex < source.length();
+    }
+
+    public boolean consumeIfNextToken(final String token) throws InvalidSyntaxException {
         if (token.equals(getNextToken().token))
             return true;
 
@@ -113,52 +150,45 @@ public class Tokenizer {
         return false;
     }
 
-    public TokenizerMatch peekNextToken(){
+    public TokenizerMatch peekNextToken() throws InvalidSyntaxException {
         TokenizerMatch result = getNextToken();
         unreadToken();
         return result;
     }
 
-    public boolean peekIsOneOf(String ... possibilities){
+    public boolean peekIsOneOf(String... possibilities) throws InvalidSyntaxException {
         String nextToken = peekNextToken().token;
         return Stream.of(possibilities).anyMatch(possibility -> possibility.equals(nextToken));
     }
 
-    public void peekExpectNoneOf(String ... possibilities) throws InvalidSyntaxException {
+    public void peekExpectNoneOf(String... possibilities) throws InvalidSyntaxException {
         if (peekIsOneOf(possibilities))
             throw new InvalidSyntaxException("Not expected \"" + peekNextToken().token + "\" here.");
     }
 
+    public void unreadToken() {
+        currentIndex = tokenIndexes.pop();
+    }
 
-    public boolean sequenceMatches(final String sequence) {
-        if ((currentIndex + sequence.length()) > source.length())
-            return false;
+    /**
+     * For debugging
+     */
+    public void enlistRemainingTokens(){
+        int redTokenCount = 0;
 
-        for (int i = 0; i < sequence.length(); i++)
-            if (sequence.charAt(i) != source.charAt(i + currentIndex))
-                return false;
+        while (hasMoreContent()) {
+            out.println(getNextToken().toString());
+            redTokenCount++;
+        }
 
-        return true;
+        // restore pointer to original location
+        for (int i = 0; i< redTokenCount; i++ ) unreadToken();
     }
 
+
     public void skipUntilDataEnd() {
         tokenIndexes.push(currentIndex);
         currentIndex = source.length();
     }
 
-    public void skipUntilSequence(final String sequence) {
-        while (currentIndex < source.length()) {
-            if (sequenceMatches(sequence)) {
-                currentIndex += sequence.length();
-                return;
-            }
-
-            currentIndex++;
-        }
-    }
-
-    public void unreadToken() {
-        currentIndex = tokenIndexes.pop();
-    }
-
 }