Java 类org.antlr.runtime.TokenRewriteStream 实例源码

项目:JavaGraph    文件:CtrlLoader.java   
/**
 * Returns renamed versions of the stored control programs.
 * @return a mapping from program names to changed programs
 */
public Map<QualName,String> rename(QualName oldCallName, QualName newCallName) {
    Map<QualName,String> result = new HashMap<>();
    for (Map.Entry<QualName,CtrlTree> entry : this.controlTreeMap.entrySet()) {
        QualName name = entry.getKey();
        CtrlTree tree = entry.getValue();
        TokenRewriteStream rewriter = getRewriter(tree);
        boolean changed = false;
        for (CtrlTree t : tree.getRuleIdTokens(oldCallName)) {
            rewriter.replace(t.getToken(), t.getChild(0)
                .getToken(), newCallName);
            changed = true;
        }
        if (changed) {
            result.put(name, rewriter.toString());
        }
    }
    return result;
}
项目:hive-jq-udtf    文件:ObjectInspectors.java   
private static ASTNode parseHiveType(final String hiveType) {
    try {
        final ParseDriver driver = new ParseDriver();
        final HiveLexer lexer = new HiveLexer(driver.new ANTLRNoCaseStringStream(hiveType));
        final HiveParser parser = new HiveParser(new TokenRewriteStream(lexer));
        parser.setTreeAdaptor(ParseDriver.adaptor);

        final HiveParser.type_return type = parser.type();

        final ASTNode ast = (ASTNode) type.getTree();
        ast.setUnknownTokenBoundaries();
        return ast;
    } catch (Exception e) {
        throw new IllegalArgumentException("invalid type: " + hiveType, e);
    }
}
项目:Rapture    文件:ParseEntitlementPath.java   
public static String getEntPath(String path, IEntitlementsContext entCtx) {
    EntLexer lexer = new EntLexer();
    lexer.setCharStream(new ANTLRStringStream(path));
    TokenRewriteStream tokens = new TokenRewriteStream(lexer);
    EntParser parser = new EntParser(tokens);
    parser.setEctx(entCtx);

    try {
        parser.entpath();
        return parser.getPath();
    } catch (RecognitionException e) {
        // If we don't recognize it, ignore it and return the path
        log.error("Could not parse path " + path + ", error was " + e.getMessage());
    }
    return path;
}
项目:SecureDB    文件:ParseDriver.java   
/**
 * Parses a command, optionally assigning the parser's token stream to the
 * given context.
 *
 * @param command command to parse
 * @param ctx     context with which to associate this parser's token stream, or
 *                null if either no context is available or the context already has
 *                an existing stream
 * @return parsed AST
 */
public ASTNode parse(String command) throws ParseException {
  LOG.info("Parsing command: " + command);

  HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
  TokenRewriteStream tokens = new TokenRewriteStream(lexer);
  HiveParser parser = new HiveParser(tokens);
  parser.setTreeAdaptor(adaptor);
  HiveParser.statement_return r = null;
  try {
    r = parser.statement();
  } catch (RecognitionException e) {
    // e.printStackTrace();
    throw new ParseException(parser.errors);
  }

  if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) {
    LOG.info("Parse Completed");
  } else if (lexer.getErrors().size() != 0) {
    throw new ParseException(lexer.getErrors());
  } else {
    throw new ParseException(parser.errors);
  }

  return (ASTNode) r.getTree();
}
项目:SecureDB    文件:ParseDriver.java   
public ASTNode parseSelect(String command) throws ParseException {
  LOG.info("Parsing command: " + command);

  HiveLexerX lexer = new HiveLexerX(new ANTLRNoCaseStringStream(command));
  TokenRewriteStream tokens = new TokenRewriteStream(lexer);
  HiveParser parser = new HiveParser(tokens);
  parser.setTreeAdaptor(adaptor);
  HiveParser_SelectClauseParser.selectClause_return r = null;
  try {
    r = parser.selectClause();
  } catch (RecognitionException e) {
    e.printStackTrace();
    throw new ParseException(parser.errors);
  }

  if (lexer.getErrors().size() == 0 && parser.errors.size() == 0) {
    LOG.info("Parse Completed");
  } else if (lexer.getErrors().size() != 0) {
    throw new ParseException(lexer.getErrors());
  } else {
    throw new ParseException(parser.errors);
  }

  return (ASTNode) r.getTree();
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void test2InsertBeforeAfterMiddleIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.insertBefore(1, "x");
    tokens.insertAfter(1, "x");
    String result = tokens.toString();
    String expecting = "axbxc";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void test2ReplaceMiddleIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(1, "x");
    tokens.replace(1, "y");
    String result = tokens.toString();
    String expecting = "ayc";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testReplaceThenDeleteMiddleIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(1, "x");
    tokens.delete(1);
    String result = tokens.toString();
    String expecting = "ac";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testReplaceThenInsertSameIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(0, "x");
    tokens.insertBefore(0, "0");
    String result = tokens.toString();
    String expecting = "0xbc";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testReplaceThen2InsertSameIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(0, "x");
    tokens.insertBefore(0, "y");
    tokens.insertBefore(0, "z");
    String result = tokens.toString();
    String expecting = "zyxbc";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testInsertThenReplaceSameIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.insertBefore(0, "0");
    tokens.replace(0, "x");
    String result = tokens.toString();
    String expecting = "0xbc";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void test2InsertMiddleIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.insertBefore(1, "x");
    tokens.insertBefore(1, "y");
    String result = tokens.toString();
    String expecting = "ayxbc";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void test2InsertThenReplaceIndex0() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.insertBefore(0, "x");
    tokens.insertBefore(0, "y");
    tokens.replace(0, "z");
    String result = tokens.toString();
    String expecting = "yxzbc";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testReplaceThenInsertBeforeLastIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(2, "x");
    tokens.insertBefore(2, "y");
    String result = tokens.toString();
    String expecting = "abyx";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testInsertThenReplaceLastIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.insertBefore(2, "y");
    tokens.replace(2, "x");
    String result = tokens.toString();
    String expecting = "abyx";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testReplaceThenInsertAfterLastIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(2, "x");
    tokens.insertAfter(2, "y");
    String result = tokens.toString();
    String expecting = "abxy";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testReplaceRangeThenInsertInMiddle() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abcccba");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(2, 4, "x");
    tokens.insertBefore(3, "y"); // no effect; can't insert in middle of replaced region
    String result = tokens.toString();
    String expecting = "abxba";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testReplaceRangeThenInsertAtLeftEdge() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abcccba");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(2, 4, "x");
    tokens.insertBefore(2, "y");
    String result = tokens.toString();
    String expecting = "abyxba";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testReplaceRangeThenInsertAtRightEdge() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abcccba");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(2, 4, "x");
    tokens.insertBefore(4, "y"); // no effect; within range of a replace
    String result = tokens.toString();
    String expecting = "abxba";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testReplaceRangeThenInsertAfterRightEdge() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abcccba");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(2, 4, "x");
    tokens.insertAfter(4, "y");
    String result = tokens.toString();
    String expecting = "abxyba";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testReplaceThenReplaceSuperset() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abcccba");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(2, 4, "xyz");
    tokens.replace(2, 5, "foo"); // kills previous replace
    String result = tokens.toString();
    String expecting = "abfooa";
    assertEquals(result, expecting);
}
项目:mclab-core    文件:TestTokenRewriteStream.java   
public void testReplaceSingleMiddleThenOverlappingSuperset() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abcba");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(2, 2, "xyz");
    tokens.replace(0, 3, "foo");
    String result = tokens.toString();
    String expecting = "fooa";
    assertEquals(result, expecting);
}
项目:Mutation-Testing-Framework    文件:JavaFile.java   
/**
 * change tokens and add the mutuant cases to the mutant case vector
 * 
 * @param originalthe
 *            original token
 * 
 * @param by
 *            the replacement token
 * 
 * @param operation
 *            the operation name
 * 
 * 
 *            it is acceptable int y = +10; to be converted to int y = /10;
 *            because it will make the mutant project compilation fail and
 *            this mutation will be discarded because the compilation has
 *            failed
 * 
 * 
 */
private boolean applyTokenTransformation(String original, String by,
        String operation) throws Exception {
    ANTLRInputStream input = new ANTLRInputStream(fileContents.toString());
    JavaLexer lexer = new JavaLexer(input);
    CommonTokenStream tokens = new CommonTokenStream(lexer);

    JavaParser parser = new JavaParser(tokens);

    ParseTree tree = parser.compilationUnit();

    TokenRewriteStream trs = new TokenRewriteStream();

    ParseTreeWalker walker = new ParseTreeWalker(); // create standard

    for (Integer i = 0; i < tokens.size(); i++) {
        if (tokens.get(i).getText().toString().equals(original)) {
            int startIndex = tokens.get(i).getStartIndex();
            int stopIndex = tokens.get(i).getStopIndex() + 1;
            mvc.add(startIndex, stopIndex, by);

        }
    }
    return true;
}
项目:Mutation-Testing-Framework    文件:JavaFile.java   
private void constantOperations(String required) {
    ANTLRInputStream input = new ANTLRInputStream(fileContents.toString());
    JavaLexer lexer = new JavaLexer(input);
    CommonTokenStream tokens = new CommonTokenStream(lexer);

    JavaParser parser = new JavaParser(tokens);

    ParseTree tree = parser.compilationUnit();

    TokenRewriteStream trs = new TokenRewriteStream();

    ParseTreeWalker walker = new ParseTreeWalker(); // create standard

    for (Integer i = 0; i < tokens.size(); i++) {
        if (NumberUtils.isNumber(tokens.get(i).getText().toString())) {
            int startIndex = tokens.get(i).getStartIndex();
            int stopIndex = tokens.get(i).getStopIndex() + 1;
            String by = fileContents.substring(startIndex, stopIndex)
                    + required;
            mvc.add(startIndex, stopIndex, by);
        }
    }
}
项目:jFuzzyLogic    文件:TestTokenRewriteStream.java   
public void test2InsertBeforeAfterMiddleIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.insertBefore(1, "x");
    tokens.insertAfter(1, "x");
    String result = tokens.toString();
    String expecting = "axbxc";
    assertEquals(expecting, result);
}
项目:jFuzzyLogic    文件:TestTokenRewriteStream.java   
public void test2ReplaceMiddleIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(1, "x");
    tokens.replace(1, "y");
    String result = tokens.toString();
    String expecting = "ayc";
    assertEquals(expecting, result);
}
项目:jFuzzyLogic    文件:TestTokenRewriteStream.java   
public void test2ReplaceMiddleIndex1InsertBefore() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
       tokens.insertBefore(0, "_");
       tokens.replace(1, "x");
    tokens.replace(1, "y");
    String result = tokens.toString();
    String expecting = "_ayc";
    assertEquals(expecting, result);
}
项目:jFuzzyLogic    文件:TestTokenRewriteStream.java   
public void testReplaceThenDeleteMiddleIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(1, "x");
    tokens.delete(1);
    String result = tokens.toString();
    String expecting = "ac";
    assertEquals(expecting, result);
}
项目:jFuzzyLogic    文件:TestTokenRewriteStream.java   
public void testInsertThenReplaceSameIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.insertBefore(0, "0");
    tokens.replace(0, "x"); // supercedes insert at 0
    String result = tokens.toString();
    String expecting = "xbc";
    assertEquals(expecting, result);
}
项目:jFuzzyLogic    文件:TestTokenRewriteStream.java   
public void test2InsertMiddleIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.insertBefore(1, "x");
    tokens.insertBefore(1, "y");
    String result = tokens.toString();
    String expecting = "ayxbc";
    assertEquals(expecting, result);
}
项目:jFuzzyLogic    文件:TestTokenRewriteStream.java   
public void test2InsertThenReplaceIndex0() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.insertBefore(0, "x");
    tokens.insertBefore(0, "y");
    tokens.replace(0, "z");
    String result = tokens.toString();
    String expecting = "zbc";
    assertEquals(expecting, result);
}
项目:jFuzzyLogic    文件:TestTokenRewriteStream.java   
public void testReplaceThenInsertBeforeLastIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(2, "x");
    tokens.insertBefore(2, "y");
    String result = tokens.toString();
    String expecting = "abyx";
    assertEquals(expecting, result);
}
项目:jFuzzyLogic    文件:TestTokenRewriteStream.java   
public void testInsertThenReplaceLastIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.insertBefore(2, "y");
    tokens.replace(2, "x");
    String result = tokens.toString();
    String expecting = "abx";
    assertEquals(expecting, result);
}
项目:jFuzzyLogic    文件:TestTokenRewriteStream.java   
public void testReplaceThenInsertAfterLastIndex() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abc");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(2, "x");
    tokens.insertAfter(2, "y");
    String result = tokens.toString();
    String expecting = "abxy";
    assertEquals(expecting, result);
}
项目:jFuzzyLogic    文件:TestTokenRewriteStream.java   
public void testReplaceRangeThenInsertAtLeftEdge() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abcccba");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(2, 4, "x");
    tokens.insertBefore(2, "y");
    String result = tokens.toString();
    String expecting = "abyxba";
    assertEquals(expecting, result);
}
项目:jFuzzyLogic    文件:TestTokenRewriteStream.java   
public void testReplaceRangeThenInsertAfterRightEdge() throws Exception {
    Grammar g = new Grammar(
        "lexer grammar t;\n"+
        "A : 'a';\n" +
        "B : 'b';\n" +
        "C : 'c';\n");
    CharStream input = new ANTLRStringStream("abcccba");
    Interpreter lexEngine = new Interpreter(g, input);
    TokenRewriteStream tokens = new TokenRewriteStream(lexEngine);
    tokens.LT(1); // fill buffer
    tokens.replace(2, 4, "x");
    tokens.insertAfter(4, "y");
    String result = tokens.toString();
    String expecting = "abxyba";
    assertEquals(expecting, result);
}