Java 类org.yaml.snakeyaml.tokens.StreamEndToken 实例源码
项目:AndroidApktool
文件:ScannerImpl.java
private void fetchStreamEnd() {
// Set the current intendation to -1.
unwindIndent(-1);
// Reset simple keys.
removePossibleSimpleKey();
this.allowSimpleKey = false;
this.possibleSimpleKeys.clear();
// Read the token.
Mark mark = reader.getMark();
// Add STREAM-END.
Token token = new StreamEndToken(mark, mark);
this.tokens.add(token);
// The stream is finished.
this.done = true;
}
项目:5zig-TIMV-Plugin
文件:ScannerImpl.java
private void fetchStreamEnd() {
// Set the current intendation to -1.
unwindIndent(-1);
// Reset simple keys.
removePossibleSimpleKey();
this.allowSimpleKey = false;
this.possibleSimpleKeys.clear();
// Read the token.
Mark mark = reader.getMark();
// Add STREAM-END.
Token token = new StreamEndToken(mark, mark);
this.tokens.add(token);
// The stream is finished.
this.done = true;
}
项目:snake-yaml
文件:ScannerImpl.java
private void fetchStreamEnd() {
// Set the current intendation to -1.
unwindIndent(-1);
// Reset simple keys.
removePossibleSimpleKey();
this.allowSimpleKey = false;
this.possibleSimpleKeys.clear();
// Read the token.
Mark mark = reader.getMark();
// Add STREAM-END.
Token token = new StreamEndToken(mark, mark);
this.tokens.add(token);
// The stream is finished.
this.done = true;
}
项目:snake-yaml
文件:ScannerImplTest.java
public void testGetToken() {
String data = "string: abcd";
StreamReader reader = new StreamReader(data);
Scanner scanner = new ScannerImpl(reader);
Mark dummy = new Mark("dummy", 0, 0, 0, "", 0);
LinkedList<Token> etalonTokens = new LinkedList<Token>();
etalonTokens.add(new StreamStartToken(dummy, dummy));
etalonTokens.add(new BlockMappingStartToken(dummy, dummy));
etalonTokens.add(new KeyToken(dummy, dummy));
etalonTokens.add(new ScalarToken("string", true, dummy, dummy, (char) 0));
etalonTokens.add(new ValueToken(dummy, dummy));
etalonTokens.add(new ScalarToken("abcd", true, dummy, dummy, (char) 0));
etalonTokens.add(new BlockEndToken(dummy, dummy));
etalonTokens.add(new StreamEndToken(dummy, dummy));
while (!etalonTokens.isEmpty() && scanner.checkToken(etalonTokens.get(0).getTokenId())) {
assertEquals(etalonTokens.removeFirst(), scanner.getToken());
}
assertFalse("Must contain no more tokens: " + scanner.getToken(),
scanner.checkToken(new Token.ID[0]));
}
项目:SubServers-2
文件:ScannerImpl.java
private void fetchStreamEnd() {
// Set the current intendation to -1.
unwindIndent(-1);
// Reset simple keys.
removePossibleSimpleKey();
this.allowSimpleKey = false;
this.possibleSimpleKeys.clear();
// Read the token.
Mark mark = reader.getMark();
// Add STREAM-END.
Token token = new StreamEndToken(mark, mark);
this.tokens.add(token);
// The stream is finished.
this.done = true;
}
项目:snakeyaml
文件:ScannerImpl.java
private void fetchStreamEnd() {
// Set the current intendation to -1.
unwindIndent(-1);
// Reset simple keys.
removePossibleSimpleKey();
this.allowSimpleKey = false;
this.possibleSimpleKeys.clear();
// Read the token.
Mark mark = reader.getMark();
// Add STREAM-END.
Token token = new StreamEndToken(mark, mark);
this.tokens.add(token);
// The stream is finished.
this.done = true;
}
项目:snakeyaml
文件:ScannerImplTest.java
public void testGetToken() {
String data = "string: abcd";
StreamReader reader = new StreamReader(data);
Scanner scanner = new ScannerImpl(reader);
Mark dummy = new Mark("dummy", 0, 0, 0, "", 0);
LinkedList<Token> etalonTokens = new LinkedList<Token>();
etalonTokens.add(new StreamStartToken(dummy, dummy));
etalonTokens.add(new BlockMappingStartToken(dummy, dummy));
etalonTokens.add(new KeyToken(dummy, dummy));
etalonTokens.add(new ScalarToken("string", true, dummy, dummy, (char) 0));
etalonTokens.add(new ValueToken(dummy, dummy));
etalonTokens.add(new ScalarToken("abcd", true, dummy, dummy, (char) 0));
etalonTokens.add(new BlockEndToken(dummy, dummy));
etalonTokens.add(new StreamEndToken(dummy, dummy));
while (!etalonTokens.isEmpty() && scanner.checkToken(etalonTokens.get(0).getTokenId())) {
assertEquals(etalonTokens.removeFirst(), scanner.getToken());
}
assertFalse("Must contain no more tokens: " + scanner.getToken(),
scanner.checkToken(new Token.ID[0]));
}
项目:TestTheTeacher
文件:ScannerImpl.java
private void fetchStreamEnd() {
// Set the current intendation to -1.
unwindIndent(-1);
// Reset simple keys.
removePossibleSimpleKey();
this.allowSimpleKey = false;
this.possibleSimpleKeys.clear();
// Read the token.
Mark mark = reader.getMark();
// Add STREAM-END.
Token token = new StreamEndToken(mark, mark);
this.tokens.add(token);
// The stream is finished.
this.done = true;
}
项目:org.openntf.domino
文件:ScannerImpl.java
private void fetchStreamEnd() {
// Set the current intendation to -1.
unwindIndent(-1);
// Reset simple keys.
removePossibleSimpleKey();
this.allowSimpleKey = false;
this.possibleSimpleKeys.clear();
// Read the token.
Mark mark = reader.getMark();
// Add STREAM-END.
Token token = new StreamEndToken(mark, mark);
this.tokens.add(token);
// The stream is finished.
this.done = true;
}
项目:snake-yaml
文件:CanonicalScanner.java
private void scan() {
this.tokens.add(new StreamStartToken(mark, mark));
boolean stop = false;
while (!stop) {
findToken();
char ch = data.charAt(index);
switch (ch) {
case '\0':
tokens.add(new StreamEndToken(mark, mark));
stop = true;
break;
case '%':
tokens.add(scanDirective());
break;
case '-':
if ("---".equals(data.substring(index, index + 3))) {
index += 3;
tokens.add(new DocumentStartToken(mark, mark));
}
break;
case '[':
index++;
tokens.add(new FlowSequenceStartToken(mark, mark));
break;
case '{':
index++;
tokens.add(new FlowMappingStartToken(mark, mark));
break;
case ']':
index++;
tokens.add(new FlowSequenceEndToken(mark, mark));
break;
case '}':
index++;
tokens.add(new FlowMappingEndToken(mark, mark));
break;
case '?':
index++;
tokens.add(new KeyToken(mark, mark));
break;
case ':':
index++;
tokens.add(new ValueToken(mark, mark));
break;
case ',':
index++;
tokens.add(new FlowEntryToken(mark, mark));
break;
case '*':
tokens.add(scanAlias());
break;
case '&':
tokens.add(scanAlias());
break;
case '!':
tokens.add(scanTag());
break;
case '"':
tokens.add(scanScalar());
break;
default:
throw new CanonicalException("invalid token");
}
}
scanned = true;
}
项目:snakeyaml
文件:CanonicalScanner.java
private void scan() {
this.tokens.add(new StreamStartToken(mark, mark));
boolean stop = false;
while (!stop) {
findToken();
char ch = data.charAt(index);
switch (ch) {
case '\0':
tokens.add(new StreamEndToken(mark, mark));
stop = true;
break;
case '%':
tokens.add(scanDirective());
break;
case '-':
if ("---".equals(data.substring(index, index + 3))) {
index += 3;
tokens.add(new DocumentStartToken(mark, mark));
}
break;
case '[':
index++;
tokens.add(new FlowSequenceStartToken(mark, mark));
break;
case '{':
index++;
tokens.add(new FlowMappingStartToken(mark, mark));
break;
case ']':
index++;
tokens.add(new FlowSequenceEndToken(mark, mark));
break;
case '}':
index++;
tokens.add(new FlowMappingEndToken(mark, mark));
break;
case '?':
index++;
tokens.add(new KeyToken(mark, mark));
break;
case ':':
index++;
tokens.add(new ValueToken(mark, mark));
break;
case ',':
index++;
tokens.add(new FlowEntryToken(mark, mark));
break;
case '*':
tokens.add(scanAlias());
break;
case '&':
tokens.add(scanAlias());
break;
case '!':
tokens.add(scanTag());
break;
case '"':
tokens.add(scanScalar());
break;
default:
throw new CanonicalException("invalid token");
}
}
scanned = true;
}