From 26ad9ae426647b6cb60fc7862c059dedfca769d8 Mon Sep 17 00:00:00 2001 From: jmramosr Date: Wed, 9 Jan 2019 17:21:21 +0100 Subject: [PATCH] Modified files for CDT 9.5.0 to use with Pro*C plugin for Eclipse --- README.md | 81 +------ org.eclipse.cdt.core/META-INF/MANIFEST.MF | 2 +- .../parser/scanner/ASTInclusionStatement.java | 219 ++++++++++++++++++ .../parser/scanner/ASTPreprocessorNode.java | 200 ---------------- .../core/parser/scanner/CPreprocessor.java | 30 +-- .../internal/core/parser/scanner/Lexer.java | 46 ++-- .../core/parser/scanner/ScannerContext.java | 4 +- 7 files changed, 263 insertions(+), 319 deletions(-) create mode 100644 org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/ASTInclusionStatement.java diff --git a/README.md b/README.md index c0db642eb..0a93fbd63 100644 --- a/README.md +++ b/README.md @@ -1,82 +1,7 @@ # Pro-C-for-Eclipse: Pro*C plugin for Eclipse. -This repository contains a complete Eclipse workspace to compile successfully the Pro*C plugin. -I don't code in Java, but you can send pull requests to make this plugin better. +## Branch for CDT 9.5.0 -Original work by https://github.com/buntatsu/cdt-proc. The firsts commits are, indeed, the original repo. +This repository branch contains the code used to develop the Pro*C plugin for Eclipse using CDT 9.5.0 -## How to compile the plugin - -🔘 You will need a **computer** with **JDK** installed. I suggest a list I tested it and it works. You can simply ignore it and use the most recent **JDK**. I tested on Windows, but you can use a osX or the Linux flavor you want. - -🔘 Download from https://www.eclipse.org/downloads/packages/release the package you want. Ensure you choose the **Eclipse IDE for Eclipse Committers** version. - -🔘 Open your **Eclipse IDE for Eclipse Committers**, create an empty workspace and install the version of CDT you want to use. You must install, at least, the main features. Restart the program. - -🔘 Go here and select the branch which name contains the name of your CDT version. - -🔘 Download the branch. It contains a workspace inside the branch. - -🔘 Open your **Eclipse IDE for Eclipse Committers** and open the workspace. Wait until completes the building of the workspace. It probably doesn't have the auto build. - -🔘 Select in any project the export option -> Deployable plugins and fragments -> select all -> Directory -> Select the directory -> Click Finish. - -🔘 Wait until completion and now you have your plugin ready in the folder you provided! - -## How to use the compiled plugin - -🔘 Download from https://www.eclipse.org/downloads/packages/release the package you want and install it. You will need the **Eclipse IDE for C/C++ Developers** version - -🔘 When you finish the installation, go to the *plugins* folder and find a file starts like this: _org.eclipse.cdt.core_X.Y.Z.YYYYMMDDhhmm.jar_. X.Y.Z stands for the version. Annotate the date. - -🔘 Go here and download the release with that version, if I have it. - -🔘 If I don't have it, submit an issue. Submit the name of your _org.eclipse.cdt_X.Y.Z.YYYYMMDDhhmm.jar_ file, not the _org.eclipse.cdt.core_X.Y.Z.YYYYMMDDhhmm.jar_ one! - -🔘 Replace the _org.eclipse.cdt.core_X.Y.Z.YYYYMMDDhhmm.jar_ and copy the other ones. Backup first the _org.eclipse.cdt.core_X.Y.Z.YYYYMMDDhhmm.jar_ file if you don't feel confident. - -## How to upgrade the plugin with Eclipse CDT - -🔘 Update CDT to the release version you want. This step assumes you have the version **Eclipse IDE for C/C++ Developers** for your package. If you have been installed a previous Pro*C plugin for Eclipse in this version, restore the original _org.eclipse.cdt.core_X.Y.Z.YYYYMMDDhhmm.jar_. - -🔘 Search in your plugins folder the lastest _org.eclipse.cdt.core_X.Y.Z.YYYYMMDDhhmm.jar_. You will have, at least, 2 versions of the file _org.eclipse.cdt.core_X.Y.Z.YYYYMMDDhhmm.jar_. - -🔘 Search in the releases list the lastest CDT version you have and download it. - -🔘 Replace the _org.eclipse.cdt.core_X.Y.Z.YYYYMMDDhhmm.jar_ and copy the other ones. Backup first the _org.eclipse.cdt.core_X.Y.Z.YYYYMMDDhhmm.jar_ file if you don't feel confident. - -## Version List - -| Package | Compilated package numbers | -|-----------|----------------------------------------| -| CDT_9_6_0 | org.eclipse.cdt_9.6.0.201811241055.jar | -| CDT_9_5_5 | org.eclipse.cdt_9.5.5.201811180605.jar | -| CDT_9_5_4 | org.eclipse.cdt_9.5.4.201810050005.jar | -| CDT_9_5_3 | org.eclipse.cdt_9.5.3.201809121146.jar | -| CDT_9_5_2 | org.eclipse.cdt_9.5.2.201807181141.jar | -| CDT_9_5_1 | org.eclipse.cdt_9.5.1.201807051742.jar | -| CDT_9_5_0 | org.eclipse.cdt_9.5.0.201806170908.jar | -| CDT_9_4_3 | org.eclipse.cdt_9.4.3.201802261533.jar | -| CDT_9_4_2 | org.eclipse.cdt_9.4.2.201802122019.jar | -| CDT_9_4_1 | org.eclipse.cdt_9.3.0.201801130900.jar | -| CDT_9_4_0 | org.eclipse.cdt_9.3.0.201712020452.jar | - -## Warnings - -⬜️ Be careful about CDT_9_4_1 and CDT_9_4_0! They use the same names with different dates. That's why you can annotate the release date, it serves as reference number for me. - -## TODO - -✔️ Get the version numbers of each package and make the version list. - -✔️ Renaming the original plugin. - -❌️ Upload every version. - -❌️ Buglist of everyday use. - -❌️ Not modifying the original _org.eclipse.cdt.core_X.Y.Z.YYYYMMDDhhmm.jar_ to use this plugin. - -## JDK Used: - -All CDT versions uses JDK 1.8 to compile them, as you can see in the manifest.mf files in each version. +For more info, see the master branch. \ No newline at end of file diff --git a/org.eclipse.cdt.core/META-INF/MANIFEST.MF b/org.eclipse.cdt.core/META-INF/MANIFEST.MF index b03657253..2d5c7f743 100644 --- a/org.eclipse.cdt.core/META-INF/MANIFEST.MF +++ b/org.eclipse.cdt.core/META-INF/MANIFEST.MF @@ -2,7 +2,7 @@ Manifest-Version: 1.0 Bundle-ManifestVersion: 2 Bundle-Name: %pluginName Bundle-SymbolicName: org.eclipse.cdt.core; singleton:=true -Bundle-Version: 6.5.0.qualifier +Bundle-Version: 6.5.0.201806170908 Bundle-Activator: org.eclipse.cdt.core.CCorePlugin Bundle-Vendor: %providerName Bundle-Localization: plugin diff --git a/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/ASTInclusionStatement.java b/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/ASTInclusionStatement.java new file mode 100644 index 000000000..8e122d965 --- /dev/null +++ b/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/ASTInclusionStatement.java @@ -0,0 +1,219 @@ +/******************************************************************************* + * Copyright (c) 2007, 2015 Wind River Systems, Inc. and others. + * All rights reserved. This program and the accompanying materials + * are made available under the terms of the Eclipse Public License v1.0 + * which accompanies this distribution, and is available at + * http://www.eclipse.org/legal/epl-v10.html + * + * Contributors: + * Markus Schorn - initial API and implementation + * Sergey Prigogin (Google) + *******************************************************************************/ +package org.eclipse.cdt.internal.core.parser.scanner; + +import org.eclipse.cdt.core.dom.ast.IASTName; +import org.eclipse.cdt.core.dom.ast.IASTPreprocessorIncludeStatement; +import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit; +import org.eclipse.cdt.core.dom.ast.IFileNomination; +import org.eclipse.cdt.core.index.IIndexFile; +import org.eclipse.cdt.core.parser.ISignificantMacros; +import org.eclipse.cdt.internal.core.dom.parser.ASTNodeSpecification; +import org.eclipse.core.runtime.CoreException; + + +public class ASTInclusionStatement extends ASTPreprocessorNode implements IASTPreprocessorIncludeStatement { + private static final ISignificantMacros[] NO_VERSIONS = {}; + + private final ASTPreprocessorName fName; + private final String fPath; + private final boolean fIsResolved; + private final boolean fIsSystemInclude; + private final boolean fFoundByHeuristics; + private final boolean fIncludedFileExported; + private final IFileNomination fNominationDelegate; + private boolean fPragmaOnce; + private boolean fCreatesAST; + private ISignificantMacros fSignificantMacros; + private ISignificantMacros[] fLoadedVersions = NO_VERSIONS; + private long fIncludedFileContentsHash; + private long fIncludedFileTimestamp = -1; + private long fIncludedFileSize; + private long fIncludedFileReadTime; + private boolean fErrorInIncludedFile; + + public ASTInclusionStatement(IASTTranslationUnit parent, + int startNumber, int nameStartNumber, int nameEndNumber, int endNumber, + char[] headerName, String filePath, boolean userInclude, boolean active, boolean heuristic, + boolean exportedFile, IFileNomination nominationDelegate) { + super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber); + fName= new ASTPreprocessorName(this, IASTPreprocessorIncludeStatement.INCLUDE_NAME, + nameStartNumber, nameEndNumber, headerName, null); + fPath= filePath == null ? "" : filePath; //$NON-NLS-1$ + fIsResolved= filePath != null; + fIsSystemInclude= !userInclude; + fFoundByHeuristics= heuristic; + fSignificantMacros= ISignificantMacros.NONE; + fNominationDelegate= nominationDelegate; + fIncludedFileExported= exportedFile; + if (!active) { + setInactive(); + } + } + + @Override + public IASTName getName() { + return fName; + } + + @Override + public String getPath() { + return fPath; + } + + @Override + public boolean isResolved() { + return fIsResolved; + } + + @Override + public boolean isSystemInclude() { + return fIsSystemInclude; + } + + @Override + void findNode(ASTNodeSpecification nodeSpec) { + super.findNode(nodeSpec); + nodeSpec.visit(fName); + } + + @Override + public boolean isResolvedByHeuristics() { + return fFoundByHeuristics; + } + + @Override + public boolean hasPragmaOnceSemantics() { + if (fNominationDelegate != null) { + try { + return fNominationDelegate.hasPragmaOnceSemantics(); + } catch (CoreException e) { + } + } + return fPragmaOnce; + } + + public void setPragamOnceSemantics(boolean value) { + assert fNominationDelegate == null; + fPragmaOnce= value; + } + + @Override + public ISignificantMacros getSignificantMacros() { + if (fNominationDelegate != null) { + try { + return fNominationDelegate.getSignificantMacros(); + } catch (CoreException e) { + } + } + return fSignificantMacros; + } + + public void setSignificantMacros(ISignificantMacros sig) { + assert sig != null; + assert fNominationDelegate == null; + fSignificantMacros= sig; + } + + public void setLoadedVersions(ISignificantMacros[] versions) { + fLoadedVersions= versions; + } + + @Override + public ISignificantMacros[] getLoadedVersions() { + return fLoadedVersions; + } + + @Override + public long getIncludedFileTimestamp() { + if (fNominationDelegate != null) { + return 0; + } + return fIncludedFileTimestamp; + } + + public void setIncludedFileTimestamp(long timestamp) { + assert fNominationDelegate == null; + fIncludedFileTimestamp= timestamp; + } + + @Override + public long getIncludedFileReadTime() { + if (fNominationDelegate != null) { + return 0; + } + return fIncludedFileReadTime; + } + + public void setIncludedFileReadTime(long time) { + assert fNominationDelegate == null; + fIncludedFileReadTime= time; + } + + @Override + public long getIncludedFileSize() { + if (fNominationDelegate != null) { + return 0; + } + return fIncludedFileSize; + } + + public void setIncludedFileSize(long size) { + assert fNominationDelegate == null; + fIncludedFileSize= size; + } + + @Override + public long getIncludedFileContentsHash() { + if (fNominationDelegate != null) { + return 0; + } + return fIncludedFileContentsHash; + } + + public void setIncludedFileContentsHash(long hash) { + assert fNominationDelegate == null; + fCreatesAST= true; + fIncludedFileContentsHash= hash; + } + + @Override + public boolean isErrorInIncludedFile() { + if (fNominationDelegate != null) { + return false; + } + return fErrorInIncludedFile; + } + + public void setErrorInIncludedFile(boolean error) { + assert fNominationDelegate == null; + fErrorInIncludedFile= error; + } + + @Override + public boolean isIncludedFileExported() { + return fIncludedFileExported; + } + + @Override + public boolean createsAST() { + return fCreatesAST; + } + + @Override + public IIndexFile getImportedIndexFile() { + if (fNominationDelegate instanceof IIndexFile) + return (IIndexFile) fNominationDelegate; + + return null; + } +} diff --git a/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/ASTPreprocessorNode.java b/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/ASTPreprocessorNode.java index 21a503d54..00efa914b 100644 --- a/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/ASTPreprocessorNode.java +++ b/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/ASTPreprocessorNode.java @@ -43,15 +43,11 @@ import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit.IDependencyTree; import org.eclipse.cdt.core.dom.ast.IASTTranslationUnit.IDependencyTree.IASTInclusionNode; import org.eclipse.cdt.core.dom.ast.IBinding; -import org.eclipse.cdt.core.dom.ast.IFileNomination; import org.eclipse.cdt.core.dom.ast.IMacroBinding; -import org.eclipse.cdt.core.index.IIndexFile; -import org.eclipse.cdt.core.parser.ISignificantMacros; import org.eclipse.cdt.core.parser.IToken; import org.eclipse.cdt.core.parser.util.CharArrayUtils; import org.eclipse.cdt.internal.core.dom.parser.ASTNode; import org.eclipse.cdt.internal.core.dom.parser.ASTNodeSpecification; -import org.eclipse.core.runtime.CoreException; /** * Models various AST-constructs obtained from the preprocessor. @@ -282,202 +278,6 @@ public boolean isPragmaOperator() { } } -class ASTInclusionStatement extends ASTPreprocessorNode implements IASTPreprocessorIncludeStatement { - private static final ISignificantMacros[] NO_VERSIONS = {}; - - private final ASTPreprocessorName fName; - private final String fPath; - private final boolean fIsResolved; - private final boolean fIsSystemInclude; - private final boolean fFoundByHeuristics; - private final boolean fIncludedFileExported; - private final IFileNomination fNominationDelegate; - private boolean fPragmaOnce; - private boolean fCreatesAST; - private ISignificantMacros fSignificantMacros; - private ISignificantMacros[] fLoadedVersions = NO_VERSIONS; - private long fIncludedFileContentsHash; - private long fIncludedFileTimestamp = -1; - private long fIncludedFileSize; - private long fIncludedFileReadTime; - private boolean fErrorInIncludedFile; - - public ASTInclusionStatement(IASTTranslationUnit parent, - int startNumber, int nameStartNumber, int nameEndNumber, int endNumber, - char[] headerName, String filePath, boolean userInclude, boolean active, boolean heuristic, - boolean exportedFile, IFileNomination nominationDelegate) { - super(parent, IASTTranslationUnit.PREPROCESSOR_STATEMENT, startNumber, endNumber); - fName= new ASTPreprocessorName(this, IASTPreprocessorIncludeStatement.INCLUDE_NAME, - nameStartNumber, nameEndNumber, headerName, null); - fPath= filePath == null ? "" : filePath; //$NON-NLS-1$ - fIsResolved= filePath != null; - fIsSystemInclude= !userInclude; - fFoundByHeuristics= heuristic; - fSignificantMacros= ISignificantMacros.NONE; - fNominationDelegate= nominationDelegate; - fIncludedFileExported= exportedFile; - if (!active) { - setInactive(); - } - } - - @Override - public IASTName getName() { - return fName; - } - - @Override - public String getPath() { - return fPath; - } - - @Override - public boolean isResolved() { - return fIsResolved; - } - - @Override - public boolean isSystemInclude() { - return fIsSystemInclude; - } - - @Override - void findNode(ASTNodeSpecification nodeSpec) { - super.findNode(nodeSpec); - nodeSpec.visit(fName); - } - - @Override - public boolean isResolvedByHeuristics() { - return fFoundByHeuristics; - } - - @Override - public boolean hasPragmaOnceSemantics() { - if (fNominationDelegate != null) { - try { - return fNominationDelegate.hasPragmaOnceSemantics(); - } catch (CoreException e) { - } - } - return fPragmaOnce; - } - - public void setPragamOnceSemantics(boolean value) { - assert fNominationDelegate == null; - fPragmaOnce= value; - } - - @Override - public ISignificantMacros getSignificantMacros() { - if (fNominationDelegate != null) { - try { - return fNominationDelegate.getSignificantMacros(); - } catch (CoreException e) { - } - } - return fSignificantMacros; - } - - public void setSignificantMacros(ISignificantMacros sig) { - assert sig != null; - assert fNominationDelegate == null; - fSignificantMacros= sig; - } - - public void setLoadedVersions(ISignificantMacros[] versions) { - fLoadedVersions= versions; - } - - @Override - public ISignificantMacros[] getLoadedVersions() { - return fLoadedVersions; - } - - @Override - public long getIncludedFileTimestamp() { - if (fNominationDelegate != null) { - return 0; - } - return fIncludedFileTimestamp; - } - - public void setIncludedFileTimestamp(long timestamp) { - assert fNominationDelegate == null; - fIncludedFileTimestamp= timestamp; - } - - @Override - public long getIncludedFileReadTime() { - if (fNominationDelegate != null) { - return 0; - } - return fIncludedFileReadTime; - } - - public void setIncludedFileReadTime(long time) { - assert fNominationDelegate == null; - fIncludedFileReadTime= time; - } - - @Override - public long getIncludedFileSize() { - if (fNominationDelegate != null) { - return 0; - } - return fIncludedFileSize; - } - - public void setIncludedFileSize(long size) { - assert fNominationDelegate == null; - fIncludedFileSize= size; - } - - @Override - public long getIncludedFileContentsHash() { - if (fNominationDelegate != null) { - return 0; - } - return fIncludedFileContentsHash; - } - - public void setIncludedFileContentsHash(long hash) { - assert fNominationDelegate == null; - fCreatesAST= true; - fIncludedFileContentsHash= hash; - } - - @Override - public boolean isErrorInIncludedFile() { - if (fNominationDelegate != null) { - return false; - } - return fErrorInIncludedFile; - } - - public void setErrorInIncludedFile(boolean error) { - assert fNominationDelegate == null; - fErrorInIncludedFile= error; - } - - @Override - public boolean isIncludedFileExported() { - return fIncludedFileExported; - } - - @Override - public boolean createsAST() { - return fCreatesAST; - } - - @Override - public IIndexFile getImportedIndexFile() { - if (fNominationDelegate instanceof IIndexFile) - return (IIndexFile) fNominationDelegate; - - return null; - } -} class ASTMacroDefinition extends ASTPreprocessorNode implements IASTPreprocessorObjectStyleMacroDefinition { private final ASTPreprocessorName fName; diff --git a/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/CPreprocessor.java b/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/CPreprocessor.java index ab99ea551..0db5839f5 100644 --- a/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/CPreprocessor.java +++ b/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/CPreprocessor.java @@ -112,12 +112,12 @@ public class CPreprocessor implements ILexerLog, IScanner, IAdaptable { private static final char[] __COUNTER__ = "__COUNTER__".toCharArray(); //$NON-NLS-1$ private static final char[] ONCE = "once".toCharArray(); //$NON-NLS-1$ - static final int NO_EXPANSION = 0x01; + protected static final int NO_EXPANSION = 0x01; // Set in contexts where preprocessor intrinsics such as 'defined' // or '__has_feature' need to be recognized. static final int PROTECT_INTRINSICS = 0x02; - static final int STOP_AT_NL = 0x04; - static final int CHECK_NUMBERS = 0x08; + protected static final int STOP_AT_NL = 0x04; + protected static final int CHECK_NUMBERS = 0x08; static final int REPORT_SIGNIFICANT_MACROS = 0x10; static final int IGNORE_UNDEFINED_SIGNIFICANT_MACROS = 0x20; @@ -265,7 +265,7 @@ public Token currentToken() { // configuration final private LexerOptions fLexOptions= new LexerOptions(); final private char[] fAdditionalNumericLiteralSuffixes; - final private CharArrayIntMap fKeywords; + final protected CharArrayIntMap fKeywords; final private CharArrayIntMap fPPKeywords; private final IncludeSearchPath fIncludeSearchPath; private String[][] fPreIncludedFiles= null; @@ -277,11 +277,11 @@ public Token currentToken() { // State information private final CharArrayMap fMacroDictionary = new CharArrayMap<>(512); private final IMacroDictionary fMacroDictionaryFacade = new MacroDictionary(); - private final LocationMap fLocationMap; + protected final LocationMap fLocationMap; private CharArraySet fPreventInclusion; private CharArraySet fImports; - private final ScannerContext fRootContext; + protected final ScannerContext fRootContext; protected ScannerContext fCurrentContext; private boolean isCancelled; @@ -290,8 +290,8 @@ public Token currentToken() { private Token fPrefetchedTokens; private Token fLastToken; - private InternalFileContent fRootContent; - private boolean fHandledEndOfTranslationUnit; + protected InternalFileContent fRootContent; + protected boolean fHandledEndOfTranslationUnit; // Detection of include guards used around an include directive private char[] fExternIncludeGuard; @@ -913,7 +913,7 @@ private void appendStringContent(StringBuilder buf, Token t1) { } } - Token internalFetchToken(final ScannerContext uptoEndOfCtx, int options, boolean withinExpansion) + protected Token internalFetchToken(final ScannerContext uptoEndOfCtx, int options, boolean withinExpansion) throws OffsetLimitReachedException { @@ -1006,7 +1006,7 @@ Token internalFetchToken(final ScannerContext uptoEndOfCtx, int options, boolean } } - private void completeInclusion(ASTInclusionStatement inc) { + protected void completeInclusion(ASTInclusionStatement inc) { final ISignificantMacros sig; CharArrayObjectMap sigMacros= fCurrentContext.getSignificantMacros(); if (sigMacros == null || sigMacros.isEmpty()) { @@ -1020,7 +1020,7 @@ private void completeInclusion(ASTInclusionStatement inc) { } } - private void checkNumber(Token number, final boolean isFloat) { + protected void checkNumber(Token number, final boolean isFloat) { final char[] image= number.getCharImage(); boolean hasExponent = false; @@ -1356,7 +1356,7 @@ public void handleProblem(int id, char[] arg, int offset, int endOffset) { * Assumes that the pound token has not yet been consumed * @since 5.0 */ - private void executeDirective(final Lexer lexer, final int startOffset, boolean withinExpansion) + protected void executeDirective(final Lexer lexer, final int startOffset, boolean withinExpansion) throws OffsetLimitReachedException { final Token ident= lexer.nextToken(); switch (ident.getType()) { @@ -1483,7 +1483,7 @@ private void executeDirective(final Lexer lexer, final int startOffset, boolean } } - private void executeInclude(final Lexer lexer, int poundOffset, int includeType, + protected void executeInclude(final Lexer lexer, int poundOffset, int includeType, boolean active, boolean withinExpansion) throws OffsetLimitReachedException { // Make sure to clear the extern include guard. final char[] externGuard= fExternIncludeGuard; @@ -1709,7 +1709,7 @@ private void processInclusionFromIndex(int offset, InternalFileContent fi, boole fLocationMap.skippedFile(fLocationMap.getSequenceNumberForOffset(offset), fi); } - private char[] extractHeaderName(final char[] image, final char startDelim, final char endDelim, + protected char[] extractHeaderName(final char[] image, final char startDelim, final char endDelim, int[] offsets) { char[] headerName; int start= 0; @@ -2032,7 +2032,7 @@ private CodeState skipBranch(final Lexer lexer, boolean withinExpansion) * @param isPPCondition whether the expansion is inside of a preprocessor condition. This * implies a specific handling for the defined token. */ - private boolean expandMacro(final Token identifier, Lexer lexer, int options, + protected boolean expandMacro(final Token identifier, Lexer lexer, int options, boolean withinExpansion) throws OffsetLimitReachedException { final boolean reportSignificant = (options & REPORT_SIGNIFICANT_MACROS) != 0; final char[] name= identifier.getCharImage(); diff --git a/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/Lexer.java b/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/Lexer.java index 011f1df64..b081fb5c4 100644 --- a/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/Lexer.java +++ b/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/Lexer.java @@ -40,14 +40,14 @@ * The characters in string literals and char-literals are left as they are found, no conversion to * an execution character-set is performed. */ -final public class Lexer implements ITokenSequence { +public class Lexer implements ITokenSequence { public static final int tBEFORE_INPUT = IToken.FIRST_RESERVED_SCANNER; public static final int tNEWLINE = IToken.FIRST_RESERVED_SCANNER + 1; public static final int tQUOTE_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 2; public static final int tSYSTEM_HEADER_NAME = IToken.FIRST_RESERVED_SCANNER + 3; public static final int tOTHER_CHARACTER = IToken.FIRST_RESERVED_SCANNER + 4; - private static final int END_OF_INPUT = -1; + protected static final int END_OF_INPUT = -1; private static final int ORIGIN_LEXER = OffsetLimitReachedException.ORIGIN_LEXER; public final static class LexerOptions implements Cloneable { @@ -73,22 +73,22 @@ public Object clone() { } // configuration - private final LexerOptions fOptions; - private boolean fSupportContentAssist= false; + protected final LexerOptions fOptions; + protected boolean fSupportContentAssist= false; private final ILexerLog fLog; private final Object fSource; // the input to the lexer - private final AbstractCharArray fInput; + protected final AbstractCharArray fInput; private final int fStart; private int fLimit; // after phase 3 (newline, trigraph, line-splice) - private int fOffset; + protected int fOffset; private int fEndOffset; - private int fCharPhase3; + protected int fCharPhase3; - private boolean fInsideIncludeDirective= false; + protected boolean fInsideIncludeDirective= false; private Token fToken; private Token fLastToken; @@ -260,7 +260,7 @@ public Token nextDirective() throws OffsetLimitReachedException { /** * Computes the next token. */ - private Token fetchToken() throws OffsetLimitReachedException { + protected Token fetchToken() throws OffsetLimitReachedException { while (true) { final int start= fOffset; final int c= fCharPhase3; @@ -650,15 +650,15 @@ private Token fetchToken() throws OffsetLimitReachedException { } } - private Token newToken(int kind, int offset) { + protected Token newToken(int kind, int offset) { return new Token(kind, fSource, offset, fOffset); } - private Token newDigraphToken(int kind, int offset) { + protected Token newDigraphToken(int kind, int offset) { return new TokenForDigraph(kind, fSource, offset, fOffset); } - private Token newToken(final int kind, final int offset, final int imageLength) { + protected Token newToken(final int kind, final int offset, final int imageLength) { final int endOffset= fOffset; final int sourceLen= endOffset - offset; char[] image; @@ -676,7 +676,7 @@ private void handleProblem(int problemID, char[] arg, int offset) { fLog.handleProblem(problemID, arg, offset, fOffset); } - private Token headerName(final int start, final boolean expectQuotes) throws OffsetLimitReachedException { + protected Token headerName(final int start, final boolean expectQuotes) throws OffsetLimitReachedException { int length= 1; boolean done = false; int c= fCharPhase3; @@ -705,7 +705,7 @@ private Token headerName(final int start, final boolean expectQuotes) throws Off return newToken((expectQuotes ? tQUOTE_HEADER_NAME : tSYSTEM_HEADER_NAME), start, length); } - private void blockComment(final int start, final char trigger) { + protected void blockComment(final int start, final char trigger) { // We can ignore line-splices, trigraphs and windows newlines when searching for the '*' int pos= fEndOffset; while (isValidOffset(pos)) { @@ -723,7 +723,7 @@ private void blockComment(final int start, final char trigger) { fLog.handleComment(true, start, pos, fInput); } - private void lineComment(final int start) { + protected void lineComment(final int start) { int c= fCharPhase3; while (true) { switch (c) { @@ -745,7 +745,7 @@ private boolean isIdentifierStart(int c) { c == '_'; } - private Token stringLiteral(final int start, int length, int tokenType) throws OffsetLimitReachedException { + protected Token stringLiteral(final int start, int length, int tokenType) throws OffsetLimitReachedException { boolean escaped = false; boolean done = false; @@ -806,7 +806,7 @@ private boolean isUDLSuffixStart(int c) { return c == '_'; } - private Token rawStringLiteral(final int start, int length, int tokenType) throws OffsetLimitReachedException { + protected Token rawStringLiteral(final int start, int length, int tokenType) throws OffsetLimitReachedException { final int delimOffset= fOffset; int delimEndOffset = delimOffset; int offset; @@ -860,7 +860,7 @@ private Token rawStringLiteral(final int start, int length, int tokenType) throw return newToken(tokenType, start, offset - start); } - private Token charLiteral(final int start, int tokenType) throws OffsetLimitReachedException { + protected Token charLiteral(final int start, int tokenType) throws OffsetLimitReachedException { boolean escaped = false; boolean done = false; int length= tokenType == IToken.tCHAR ? 1 : 2; @@ -902,7 +902,7 @@ private Token charLiteral(final int start, int tokenType) throws OffsetLimitReac return newToken(tokenType, start, length); } - private Token identifier(int start, int length) { + protected Token identifier(int start, int length) { int tokenKind= IToken.tIDENTIFIER; boolean isPartOfIdentifier= true; int c= fCharPhase3; @@ -971,7 +971,7 @@ private Token identifier(int start, int length) { return newToken(tokenKind, start, length); } - private Token number(final int start, int length, boolean isFloat) throws OffsetLimitReachedException { + protected Token number(final int start, int length, boolean isFloat) throws OffsetLimitReachedException { boolean isPartOfNumber= true; boolean isHex= false; int c= fCharPhase3; @@ -1070,7 +1070,7 @@ private Token number(final int start, int length, boolean isFloat) throws Offset * Saves the current state of phase3, necessary for '...', '%:%:', UNCs and string literals * with a long prefix. */ - private void markPhase3() { + protected void markPhase3() { fMarkPhase3Offset= fOffset; fMarkPhase3EndOffset= fEndOffset; fMarkPhase3PrefetchedChar= fCharPhase3; @@ -1079,7 +1079,7 @@ private void markPhase3() { /** * Restores a previously saved state of phase3. */ - private void restorePhase3() { + protected void restorePhase3() { fOffset= fMarkPhase3Offset; fEndOffset= fMarkPhase3EndOffset; fCharPhase3= fMarkPhase3PrefetchedChar; @@ -1089,7 +1089,7 @@ private void restorePhase3() { * Perform phase 1-3: Replace \r\n with \n, handle trigraphs, detect line-splicing. * Changes fOffset, fEndOffset and fCharPhase3, state-less otherwise. */ - private int nextCharPhase3() { + protected int nextCharPhase3() { int pos= fEndOffset; do { if (!isValidOffset(pos + 1)) { diff --git a/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/ScannerContext.java b/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/ScannerContext.java index 08f07ae48..4e947b24e 100644 --- a/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/ScannerContext.java +++ b/org.eclipse.cdt.core/parser/org/eclipse/cdt/internal/core/parser/scanner/ScannerContext.java @@ -23,9 +23,9 @@ * Represents part of the input to the preprocessor. This may be a file or the result of a macro expansion. * @since 5.0 */ -final class ScannerContext { +public final class ScannerContext { enum BranchKind { eIf, eElif, eElse, eEnd } - enum CodeState { eActive, eParseInactive, eSkipInactive } + public enum CodeState { eActive, eParseInactive, eSkipInactive } final static class Conditional { private final CodeState fInitialState;