FileDocCategorySizeDatePackage
CharScanner.javaAPI DocGlassfish v2 API12027Wed Aug 30 15:34:06 BST 2006persistence.antlr

CharScanner

public abstract class CharScanner extends Object implements TokenStream

Fields Summary
static final char
NO_CHAR
public static final char
EOF_CHAR
protected ANTLRStringBuffer
text
protected boolean
saveConsumedInput
protected Class
tokenObjectClass
protected boolean
caseSensitive
protected boolean
caseSensitiveLiterals
protected Hashtable
literals
protected int
tabsize
Tab chars are handled by tab() according to this value; override method to do anything weird with tabs.
protected Token
_returnToken
protected ANTLRHashString
hashString
protected LexerSharedInputState
inputState
protected boolean
commitToPath
Used during filter mode to indicate that path is desired. A subsequent scan error will report an error as usual if acceptPath=true;
protected int
traceDepth
Used to keep track of indentdepth for traceIn/Out
Constructors Summary
public CharScanner()


      
        text = new ANTLRStringBuffer();
        hashString = new ANTLRHashString(this);
        setTokenObjectClass("persistence.antlr.CommonToken");
    
public CharScanner(InputBuffer cb)

 // SAS: use generic buffer
        this();
        inputState = new LexerSharedInputState(cb);
    
public CharScanner(LexerSharedInputState sharedState)

        this();
        inputState = sharedState;
    
Methods Summary
public charLA(int i)

        if (caseSensitive) {
            return inputState.input.LA(i);
        }
        else {
            return toLower(inputState.input.LA(i));
        }
    
public voidappend(char c)

        if (saveConsumedInput) {
            text.append(c);
        }
    
public voidappend(java.lang.String s)

        if (saveConsumedInput) {
            text.append(s);
        }
    
public voidcommit()

        inputState.input.commit();
    
public voidconsume()

        if (inputState.guessing == 0) {
            char c = LA(1);
            if (caseSensitive) {
                append(c);
            }
            else {
                // use input.LA(), not LA(), to get original case
                // CharScanner.LA() would toLower it.
                append(inputState.input.LA(1));
            }
            if (c == '\t") {
                tab();
            }
            else {
                inputState.column++;
            }
        }
        inputState.input.consume();
    
public voidconsumeUntil(int c)
Consume chars until one matches the given char

        while (LA(1) != EOF_CHAR && LA(1) != c) {
            consume();
        }
    
public voidconsumeUntil(persistence.antlr.collections.impl.BitSet set)
Consume chars until one matches the given set

        while (LA(1) != EOF_CHAR && !set.member(LA(1))) {
            consume();
        }
    
public booleangetCaseSensitive()

        return caseSensitive;
    
public final booleangetCaseSensitiveLiterals()

        return caseSensitiveLiterals;
    
public intgetColumn()

        return inputState.column;
    
public booleangetCommitToPath()

        return commitToPath;
    
public java.lang.StringgetFilename()

        return inputState.filename;
    
public persistence.antlr.InputBuffergetInputBuffer()

        return inputState.input;
    
public persistence.antlr.LexerSharedInputStategetInputState()

        return inputState;
    
public intgetLine()

        return inputState.line;
    
public intgetTabSize()

        return tabsize;
    
public java.lang.StringgetText()
return a copy of the current text buffer

        return text.toString();
    
public persistence.antlr.TokengetTokenObject()

        return _returnToken;
    
protected persistence.antlr.TokenmakeToken(int t)

        try {
            Token tok = (Token)tokenObjectClass.newInstance();
            tok.setType(t);
            tok.setColumn(inputState.tokenStartColumn);
            tok.setLine(inputState.tokenStartLine);
            // tracking real start line now: tok.setLine(inputState.line);
            return tok;
        }
        catch (InstantiationException ie) {
            panic("can't instantiate token: " + tokenObjectClass);
        }
        catch (IllegalAccessException iae) {
            panic("Token class is not accessible" + tokenObjectClass);
        }
        return Token.badToken;
    
public intmark()

        return inputState.input.mark();
    
public voidmatch(char c)

        if (LA(1) != c) {
            throw new MismatchedCharException(LA(1), c, false, this);
        }
        consume();
    
public voidmatch(persistence.antlr.collections.impl.BitSet b)

        if (!b.member(LA(1))) {
            throw new MismatchedCharException(LA(1), b, false, this);
        }
        else {
            consume();
        }
    
public voidmatch(java.lang.String s)

        int len = s.length();
        for (int i = 0; i < len; i++) {
            if (LA(1) != s.charAt(i)) {
                throw new MismatchedCharException(LA(1), s.charAt(i), false, this);
            }
            consume();
        }
    
public voidmatchNot(char c)

        if (LA(1) == c) {
            throw new MismatchedCharException(LA(1), c, true, this);
        }
        consume();
    
public voidmatchRange(char c1, char c2)

        if (LA(1) < c1 || LA(1) > c2) throw new MismatchedCharException(LA(1), c1, c2, false, this);
        consume();
    
public voidnewline()

        inputState.line++;
        inputState.column = 1;
    
public voidpanic()

see
#panic(String)

        System.err.println("CharScanner: panic");
        System.exit(1);
    
public voidpanic(java.lang.String s)
This method is executed by ANTLR internally when it detected an illegal state that cannot be recovered from. The default implementation of this method calls {@link java.lang.System.exit(int)} and writes directly to {@link java.lang.System.err)} , which is usually not appropriate when a translator is embedded into a larger application. It is highly recommended that this method be overridden to handle the error in a way appropriate for your application (e.g. throw an unchecked exception).

        System.err.println("CharScanner; panic: " + s);
        System.exit(1);
    
public voidreportError(persistence.antlr.RecognitionException ex)
Parser error-reporting function can be overridden in subclass

        System.err.println(ex);
    
public voidreportError(java.lang.String s)
Parser error-reporting function can be overridden in subclass

        if (getFilename() == null) {
            System.err.println("error: " + s);
        }
        else {
            System.err.println(getFilename() + ": error: " + s);
        }
    
public voidreportWarning(java.lang.String s)
Parser warning-reporting function can be overridden in subclass

        if (getFilename() == null) {
            System.err.println("warning: " + s);
        }
        else {
            System.err.println(getFilename() + ": warning: " + s);
        }
    
public voidresetText()

        text.setLength(0);
        inputState.tokenStartColumn = inputState.column;
        inputState.tokenStartLine = inputState.line;
    
public voidrewind(int pos)

		 inputState.input.rewind(pos);
		 // RK: should not be here, it is messing up column calculation
		 // setColumn(inputState.tokenStartColumn);
    
public voidsetCaseSensitive(boolean t)

        caseSensitive = t;
    
public voidsetColumn(int c)

        inputState.column = c;
    
public voidsetCommitToPath(boolean commit)

        commitToPath = commit;
    
public voidsetFilename(java.lang.String f)

        inputState.filename = f;
    
public voidsetInputState(persistence.antlr.LexerSharedInputState state)

        inputState = state;
    
public voidsetLine(int line)

        inputState.line = line;
    
public voidsetTabSize(int size)

	  	tabsize = size;
	
public voidsetText(java.lang.String s)

        resetText();
        text.append(s);
    
public voidsetTokenObjectClass(java.lang.String cl)

        try {
            tokenObjectClass = Class.forName(cl);
        }
        catch (ClassNotFoundException ce) {
            panic("ClassNotFoundException: " + cl);
        }
    
public voidtab()
advance the current column number by an appropriate amount according to tab size. This method is called from consume().

        int c = getColumn();
		int nc = ( ((c-1)/tabsize) + 1) * tabsize + 1;  // calculate tab stop
		setColumn( nc );
    
public inttestLiteralsTable(int ttype)

        hashString.setBuffer(text.getBuffer(), text.length());
        Integer literalsIndex = (Integer)literals.get(hashString);
        if (literalsIndex != null) {
            ttype = literalsIndex.intValue();
        }
        return ttype;
    
public inttestLiteralsTable(java.lang.String text, int ttype)
Test the text passed in against the literals table Override this method to perform a different literals test This is used primarily when you want to test a portion of a token.

        ANTLRHashString s = new ANTLRHashString(text, this);
        Integer literalsIndex = (Integer)literals.get(s);
        if (literalsIndex != null) {
            ttype = literalsIndex.intValue();
        }
        return ttype;
    
public chartoLower(char c)

        return Character.toLowerCase(c);
    
public voidtraceIn(java.lang.String rname)

        traceDepth += 1;
        traceIndent();
        System.out.println("> lexer " + rname + "; c==" + LA(1));
    
public voidtraceIndent()

        for (int i = 0; i < traceDepth; i++)
            System.out.print(" ");
    
public voidtraceOut(java.lang.String rname)

        traceIndent();
        System.out.println("< lexer " + rname + "; c==" + LA(1));
        traceDepth -= 1;
    
public voiduponEOF()
This method is called by YourLexer.nextToken() when the lexer has hit EOF condition. EOF is NOT a character. This method is not called if EOF is reached during syntactic predicate evaluation or during evaluation of normal lexical rules, which presumably would be an IOException. This traps the "normal" EOF condition. uponEOF() is called after the complete evaluation of the previous token and only if your parser asks for another token beyond that last non-EOF token. You might want to throw token or char stream exceptions like: "Heh, premature eof" or a retry stream exception ("I found the end of this file, go back to referencing file").