/** * Reads the next token from the given offset. * @param offset The offset to start reading from. * @param ignoreComments If set, comments will be overread. * @return Returns the token id. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public int readNext(int offset, boolean ignoreComments) throws CoreException { setOffset(offset); return readNext(ignoreComments); }
/** * Reads until a token is reached, starting from the given offset. * @param tok The token to read to. * @param offset The offset to start reading from. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public void readToToken(int tok, int offset) throws CoreException { setOffset(offset); readToToken(tok); }
/** * Reads the next token from the given offset. * @param offset The offset to start reading from. * @param ignoreComments If set, comments will be overread. * @return Returns the token id. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public int readNext(int offset, boolean ignoreComments) throws CoreException { setOffset(offset); return readNext(ignoreComments); }
/** * Reads until a token is reached, starting from the given offset. * @param tok The token to read to. * @param offset The offset to start reading from. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public void readToToken(int tok, int offset) throws CoreException { setOffset(offset); readToToken(tok); }
/** * Reads the next token from the given offset. * @param offset The offset to start reading from. * @param ignoreComments If set, comments will be overread. * @return Returns the token id. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public int readNext(int offset, boolean ignoreComments) throws CoreException { setOffset(offset); return readNext(ignoreComments); }
/** * Reads until a token is reached, starting from the given offset. * @param tok The token to read to. * @param offset The offset to start reading from. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public void readToToken(int tok, int offset) throws CoreException { setOffset(offset); readToToken(tok); }
/** * Reads the next token from the given offset. * @param offset The offset to start reading from. * @param ignoreComments If set, comments will be overread. * @return Returns the token id. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public int readNext(int offset, boolean ignoreComments) throws CoreException { setOffset(offset); return readNext(ignoreComments); }
/** * Reads until a token is reached, starting from the given offset. * @param tok The token to read to. * @param offset The offset to start reading from. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public void readToToken(int tok, int offset) throws CoreException { setOffset(offset); readToToken(tok); }
/** * Reads until a token is reached, starting from the given offset. * @param tok The token to read to. * @param offset The offset to start reading from. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public void readToToken(int tok, int offset) throws CoreException { setOffset(offset); readToToken(tok); }
/** * Reads the next token from the given offset. * @param offset The offset to start reading from. * @param ignoreComments If set, comments will be overread. * @return Returns the token id. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public int readNext(int offset, boolean ignoreComments) throws CoreException { setOffset(offset); return readNext(ignoreComments); }
/** * Reads until a token is reached, starting from the given offset. * @param tok The token to read to. * @param offset The offset to start reading from. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public void readToToken(int tok, int offset) throws CoreException { setOffset(offset); readToToken(tok); }
/** * Reads the next token from the given offset. * @param offset The offset to start reading from. * @param ignoreComments If set, comments will be overread. * @return Returns the token id. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public int readNext(int offset, boolean ignoreComments) throws CoreException { setOffset(offset); return readNext(ignoreComments); }
/** * Reads from the given offset until a token is reached and returns the offset after the previous token. * @param token The token to be found. * @param startOffset The offset to start scanning from. * @return Returns the end offset of the token previous to the given token. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public int getPreviousTokenEndOffset(int token, int startOffset) throws CoreException { setOffset(startOffset); int res= startOffset; int curr= readNext(false); while (curr != token) { res= getCurrentEndOffset(); curr= readNext(false); } return res; }
/** * Reads from the given offset until a token is reached and returns the offset after the previous token. * @param token The token to be found. * @param startOffset The offset to start scanning from. * @return Returns the end offset of the token previous to the given token. * @exception CoreException Thrown when the end of the file has been reached (code END_OF_FILE) * or a lexical error was detected while scanning (code LEXICAL_ERROR) */ public int getPreviousTokenEndOffset(int token, int startOffset) throws CoreException { setOffset(startOffset); int res= startOffset; int curr= readNext(false); while (curr != token) { res= getCurrentEndOffset(); curr= readNext(false); } return res; }
private void rewriteExtraDimensions(int oldDim, int newDim, int pos, TextEditGroup editGroup) { if (oldDim < newDim) { for (int i= oldDim; i < newDim; i++) { doTextInsert(pos, "[]", editGroup); //$NON-NLS-1$ } } else if (newDim < oldDim) { try { getScanner().setOffset(pos); for (int i= newDim; i < oldDim; i++) { getScanner().readToToken(TerminalTokens.TokenNameRBRACKET); } doTextRemove(pos, getScanner().getCurrentEndOffset() - pos, editGroup); } catch (CoreException e) { handleException(e); } } }
private void rewriteExtraDimensions(int oldDim, int newDim, int pos, TextEditGroup editGroup) { if (oldDim < newDim) { for (int i= oldDim; i < newDim; i++) { doTextInsert(pos, "[]", editGroup); //$NON-NLS-1$ } } else if (newDim < oldDim) { try { getScanner().setOffset(pos); for (int i= newDim; i < oldDim; i++) { getScanner().readToToken(TerminalTokens.TokenNameRBRACKET); } doTextRemove(pos, getScanner().getCurrentEndOffset() - pos, editGroup); } catch (CoreException e) { handleException(e); } } }
private void rewriteExtraDimensions(int oldDim, int newDim, int pos, TextEditGroup editGroup) { if (oldDim < newDim) { for (int i= oldDim; i < newDim; i++) { doTextInsert(pos, "[]", editGroup); //$NON-NLS-1$ } } else if (newDim < oldDim) { try { getScanner().setOffset(pos); for (int i= newDim; i < oldDim; i++) { getScanner().readToToken(TerminalTokens.TokenNameRBRACKET); } doTextRemove(pos, getScanner().getCurrentEndOffset() - pos, editGroup); } catch (CoreException e) { handleException(e); } } }
private void rewriteExtraDimensions(int oldDim, int newDim, int pos, TextEditGroup editGroup) { if (oldDim < newDim) { for (int i= oldDim; i < newDim; i++) { doTextInsert(pos, "[]", editGroup); //$NON-NLS-1$ } } else if (newDim < oldDim) { try { getScanner().setOffset(pos); for (int i= newDim; i < oldDim; i++) { getScanner().readToToken(TerminalTokens.TokenNameRBRACKET); } doTextRemove(pos, getScanner().getCurrentEndOffset() - pos, editGroup); } catch (CoreException e) { handleException(e); } } }
private void rewriteExtraDimensions(int oldDim, int newDim, int pos, TextEditGroup editGroup) { if (oldDim < newDim) { for (int i= oldDim; i < newDim; i++) { doTextInsert(pos, "[]", editGroup); //$NON-NLS-1$ } } else if (newDim < oldDim) { try { getScanner().setOffset(pos); for (int i= newDim; i < oldDim; i++) { getScanner().readToToken(TerminalTokens.TokenNameRBRACKET); } doTextRemove(pos, getScanner().getCurrentEndOffset() - pos, editGroup); } catch (CoreException e) { handleException(e); } } }
private void rewriteExtraDimensions(int oldDim, int newDim, int pos, TextEditGroup editGroup) { if (oldDim < newDim) { for (int i= oldDim; i < newDim; i++) { doTextInsert(pos, "[]", editGroup); //$NON-NLS-1$ } } else if (newDim < oldDim) { try { getScanner().setOffset(pos); for (int i= newDim; i < oldDim; i++) { getScanner().readToToken(TerminalTokens.TokenNameRBRACKET); } doTextRemove(pos, getScanner().getCurrentEndOffset() - pos, editGroup); } catch (CoreException e) { handleException(e); } } }