/** * Parses the input and returns a list of lexer tokens. Asserts that the produced tokens are equal to the tokens * that the production parser produced. * * @return the tokens for the highlighting. */ public List<Token> getTokens(CharSequence input) { List<Token> result; IParseResult parseResult = parser.parse(new StringReader(input.toString())); if (!parseResult.hasSyntaxErrors()) { result = throwingHighlightingParser.getTokens(input); } else { result = highlightingParser.getTokens(input); } // assert equal tokens Iterator<Token> iter = result.iterator(); for (ILeafNode leaf : parseResult.getRootNode().getLeafNodes()) { Assert.assertTrue("hasNext at index " + leaf.getTotalOffset() + " for leaf '" + leaf.getText() + "'", iter.hasNext()); Token token = iter.next(); // TODO: assert token type Assert.assertEquals(leaf.getText(), token.getText()); } return result; }
/** * CollectAll resolutions under the cursor at offset. * */ List<IssueResolution> collectAllResolutions(XtextResource resource, RegionWithCursor offset, Multimap<Integer, Issue> offset2issue) { EObject script = resource.getContents().get(0); ICompositeNode scriptNode = NodeModelUtils.getNode(script); ILeafNode offsetNode = NodeModelUtils.findLeafNodeAtOffset(scriptNode, offset.getGlobalCursorOffset()); int offStartLine = offsetNode.getTotalStartLine(); List<Issue> allIssues = QuickFixTestHelper.extractAllIssuesInLine(offStartLine, offset2issue); List<IssueResolution> resolutions = Lists.newArrayList(); for (Issue issue : allIssues) { if (issue.getLineNumber() == offsetNode.getStartLine() && issue.getLineNumber() <= offsetNode.getEndLine()) { Display.getDefault().syncExec(() -> resolutions.addAll(quickfixProvider.getResolutions(issue))); } } return resolutions; }
@Override protected void handleLastCompleteNodeIsAtEndOfDatatypeNode() { String prefix = getPrefix(lastCompleteNode); INode previousNode = getLastCompleteNodeByOffset(rootNode, lastCompleteNode.getOffset()); EObject previousModel = previousNode.getSemanticElement(); INode currentDatatypeNode = getContainingDatatypeRuleNode(currentNode); Collection<FollowElement> followElements = getParser().getFollowElements(rootNode, 0, lastCompleteNode.getOffset(), true); int prevSize = contextBuilders.size(); doCreateContexts(previousNode, currentDatatypeNode, prefix, previousModel, followElements); if (lastCompleteNode instanceof ILeafNode && lastCompleteNode.getGrammarElement() == null && contextBuilders.size() != prevSize) { handleLastCompleteNodeHasNoGrammarElement(contextBuilders.subList(prevSize, contextBuilders.size()), previousModel); } }
/** * Check that not more than one access modifier is given. Access modifiers are those for which * {@link ModifierUtils#isAccessModifier(N4Modifier)} returns <code>true</code>. */ private boolean holdsNotMoreThanOneAccessModifier(ModifiableElement elem) { boolean hasIssue = false; boolean hasAccessModifier = false; for (int idx = 0; idx < elem.getDeclaredModifiers().size(); idx++) { final N4Modifier mod = elem.getDeclaredModifiers().get(idx); final boolean isAccessModifier = ModifierUtils.isAccessModifier(mod); if (hasAccessModifier && isAccessModifier) { final ILeafNode node = ModifierUtils.getNodeForModifier(elem, idx); addIssue(IssueCodes.getMessageForSYN_MODIFIER_ACCESS_SEVERAL(), elem, node.getOffset(), node.getLength(), IssueCodes.SYN_MODIFIER_ACCESS_SEVERAL); hasIssue = true; } hasAccessModifier |= isAccessModifier; } return !hasIssue; }
/** * Check for correct ordering of access modifiers. */ private boolean holdsCorrectOrder(ModifiableElement elem) { boolean isOrderMessedUp = false; int lastValue = -1; for (N4Modifier mod : elem.getDeclaredModifiers()) { final int currValue = mod.getValue(); if (currValue < lastValue) { isOrderMessedUp = true; break; } lastValue = currValue; } if (isOrderMessedUp) { final List<N4Modifier> modifiers = ModifierUtils.getSortedModifiers(elem.getDeclaredModifiers()); final String modifiersStr = Joiner.on(' ').join(modifiers.iterator()); final ILeafNode nodeFirst = ModifierUtils.getNodeForModifier(elem, 0); final ILeafNode nodeLast = ModifierUtils.getNodeForModifier(elem, elem.getDeclaredModifiers().size() - 1); addIssue(IssueCodes.getMessageForSYN_MODIFIER_BAD_ORDER(modifiersStr), elem, nodeFirst.getOffset(), nodeLast.getOffset() - nodeFirst.getOffset() + nodeLast.getLength(), IssueCodes.SYN_MODIFIER_BAD_ORDER); return false; } return true; }
@ValueConverter(rule = "FQN") // CHECKSTYLE:OFF public IValueConverter<String> FQN() { // NOPMD // CHECKSTYLE:ON return new AbstractNullSafeConverter<String>() { @Override protected String internalToValue(final String string, final INode node) { if (!string.equals(string.trim())) { throw new RuntimeException(); // NOPMD } StringBuffer b = new StringBuffer(); for (ILeafNode l : node.getLeafNodes()) { if (!l.isHidden()) { b.append(l.getText()); } } return b.toString(); } @Override protected String internalToString(final String value) { return value; } }; }
/** {@inheritDoc} */ @Override public List<INode> getDocumentationNodes(final EObject object) { ICompositeNode node = NodeModelUtils.getNode(object); if (node == null) { return ImmutableList.of(); } // get all single line comments before a non hidden leaf node List<INode> result = Lists.newArrayList(); for (ILeafNode leaf : node.getLeafNodes()) { if (!leaf.isHidden()) { break; } EObject grammarElement = leaf.getGrammarElement(); if (grammarElement instanceof AbstractRule && ruleName.equals(((AbstractRule) grammarElement).getName())) { String comment = leaf.getText(); if (getCommentPattern().matcher(comment).matches() && !comment.matches(ignore)) { result.add(leaf); } } } return result; }
/** * Highlights the non-hidden parts of {@code node} with the styles given by the {@code styleIds} */ protected void highlightNode(IHighlightedPositionAcceptor acceptor, INode node, String... styleIds) { if (node == null) return; if (node instanceof ILeafNode) { ITextRegion textRegion = node.getTextRegion(); acceptor.addPosition(textRegion.getOffset(), textRegion.getLength(), styleIds); } else { for (ILeafNode leaf : node.getLeafNodes()) { if (!leaf.isHidden()) { ITextRegion leafRegion = leaf.getTextRegion(); acceptor.addPosition(leafRegion.getOffset(), leafRegion.getLength(), styleIds); } } } }
@SuppressWarnings("deprecation") private List<String> getNodeSequence(EObject model) { List<String> result = Lists.newArrayList(); GrammarElementTitleSwitch titleSwitch = new GrammarElementTitleSwitch().showAssignments(); org.eclipse.xtext.serializer.sequencer.EmitterNodeIterator ni = new org.eclipse.xtext.serializer.sequencer.EmitterNodeIterator(NodeModelUtils.findActualNodeFor(model)); while (ni.hasNext()) { INode next = ni.next(); EObject ele = next.getGrammarElement() instanceof CrossReference ? ((CrossReference) next .getGrammarElement()).getTerminal() : next.getGrammarElement(); if (next instanceof ILeafNode || GrammarUtil.isDatatypeRuleCall(ele)) result.add(titleSwitch.doSwitch(ele) + " -> " + next.getText().trim()); else if (next instanceof ICompositeNode) result.add(titleSwitch.doSwitch(ele)); } return result; }
@Override public String getLegacyImportSyntax(XImportDeclaration importDeclaration) { List<INode> list = NodeModelUtils.findNodesForFeature(importDeclaration, XtypePackage.Literals.XIMPORT_DECLARATION__IMPORTED_TYPE); if (list.isEmpty()) { return null; } INode singleNode = list.get(0); if (singleNode.getText().indexOf('$') < 0) { return null; } StringBuilder sb = new StringBuilder(); for(ILeafNode node: singleNode.getLeafNodes()) { if (!node.isHidden()) { sb.append(node.getText().replace("^", "")); } } return sb.toString(); }
public HiddenLeafs getHiddenLeafsAfter(final INode node) { HiddenLeafs _xblockexpression = null; { final Function1<ILeafNode, Boolean> _function = (ILeafNode it) -> { boolean _isHidden = it.isHidden(); return Boolean.valueOf((!_isHidden)); }; final ILeafNode start = this.findPreviousLeaf(node, _function); HiddenLeafs _xifexpression = null; if ((start != null)) { _xifexpression = this.newHiddenLeafs(start.getEndOffset(), this.findNextHiddenLeafs(start)); } else { int _offset = 0; if (node!=null) { _offset=node.getOffset(); } _xifexpression = new HiddenLeafs(_offset); } _xblockexpression = _xifexpression; } return _xblockexpression; }
public static String getTypeRefName(TypeRef typeRef) { if (typeRef.getClassifier() != null) return typeRef.getClassifier().getName(); final ICompositeNode node = NodeModelUtils.getNode(typeRef); if (node != null) { final BidiIterator<INode> leafNodes = node.getAsTreeIterable().iterator(); while (leafNodes.hasPrevious()) { INode previous = leafNodes.previous(); if (previous instanceof ILeafNode && !((ILeafNode) previous).isHidden()) { String result = previous.getText(); if (result != null && result.startsWith("^")) { result = result.substring(1); } return result; } } } return null; }
/** * Finds trailing comment for a given context object. I.e. the comment after / on the same line as the context object. * * @param context * the object * @return the documentation string */ protected String findTrailingComment(final EObject context) { StringBuilder returnValue = new StringBuilder(); ICompositeNode node = NodeModelUtils.getNode(context); final int contextEndLine = node.getEndLine(); if (node != null) { // process all leaf nodes first for (ILeafNode leave : node.getLeafNodes()) { addComment(returnValue, leave, contextEndLine); } // we also need to process siblings (leave nodes only) due to the fact that the last comment after // a given element is not a leaf node of that element anymore. INode sibling = node.getNextSibling(); while (sibling instanceof ILeafNode) { addComment(returnValue, (ILeafNode) sibling, contextEndLine); sibling = sibling.getNextSibling(); } } return returnValue.toString(); }
public ILeafNode findNextLeaf(final INode node, final Function1<? super ILeafNode, ? extends Boolean> matches) { Object _xifexpression = null; if ((node != null)) { if (((node instanceof ILeafNode) && (matches.apply(((ILeafNode) node))).booleanValue())) { return ((ILeafNode) node); } final NodeIterator ni = new NodeIterator(node); while (ni.hasNext()) { { final INode next = ni.next(); if (((next instanceof ILeafNode) && (matches.apply(((ILeafNode) next))).booleanValue())) { return ((ILeafNode) next); } } } } return ((ILeafNode)_xifexpression); }
protected void add(NodeModelBasedRegionAccess access, INode node) { if (node instanceof ILeafNode && ((ILeafNode) node).isHidden()) { ILeafNode leafNode = (ILeafNode) node; lastHidden.addPart(createHidden(lastHidden, leafNode)); } else if (node instanceof ICompositeNode || node.getLength() > 0) { NodeEObjectRegion eObjectTokens = stack.peek(); NodeSemanticRegion newSemantic = createSemanticRegion(access, node); NodeHiddenRegion newHidden = createHiddenRegion(access); newSemantic.setTrailingHiddenRegion(newHidden); newHidden.setPrevious(newSemantic); newSemantic.setLeadingHiddenRegion(lastHidden); lastHidden.setNext(newSemantic); eObjectTokens.addChild(newSemantic); newSemantic.setEObjectTokens(eObjectTokens); lastHidden = newHidden; } }
protected Triple<INode, AbstractElement, EObject> findNext(INode node, boolean prune) { INode current = next(node, prune); while (current != null) { if (current instanceof ILeafNode && ((ILeafNode) current).isHidden()) { current = next(current, true); continue; } EObject ge = current.getGrammarElement(); if (ge instanceof AbstractElement && isEObjectNode(current)) return Tuples.create(current, (AbstractElement) ge, getEObjectNodeEObject(current)); else if (GrammarUtil.isAssigned(ge) && !GrammarUtil.isEObjectRuleCall(ge)) { if (ge instanceof CrossReference) return Tuples.create(current, ((CrossReference) ge).getTerminal(), null); else return Tuples.create(current, (AbstractElement) ge, null); } else current = next(current, false); } return null; }
/** * Collects import declarations in XtextResource for the given range (selectedRegion) */ public void collectImports(final XtextResource state, final ITextRegion selectedRegion, final ImportsAcceptor acceptor) { ICompositeNode rootNode = state.getParseResult().getRootNode(); final EObject selectedSemanticObj = this.findActualSemanticObjectFor(rootNode, selectedRegion); final Iterable<ILeafNode> contentsIterator = NodeModelUtils.findActualNodeFor(selectedSemanticObj).getLeafNodes(); for (final ILeafNode node : contentsIterator) { { final ITextRegion nodeRegion = node.getTotalTextRegion(); boolean _contains = selectedRegion.contains(nodeRegion); if (_contains) { final EObject semanticElement = node.getSemanticElement(); if ((semanticElement != null)) { this.visit(semanticElement, NodeModelUtils.findActualNodeFor(semanticElement), acceptor); } } if ((node.isHidden() && this.grammarAccess.getML_COMMENTRule().equals(node.getGrammarElement()))) { this.addJavaDocReferences(node, selectedRegion, acceptor); } } } }
/** * This method converts a node to text. * * Leading and trailing text from hidden tokens (whitespace/comments) is removed. Text from hidden tokens that is * surrounded by text from non-hidden tokens is summarized to a single whitespace. * * The preferred use case of this method is to convert the {@link ICompositeNode} that has been created for a data * type rule to text. * * This is also the recommended way to convert a node to text if you want to invoke * {@link org.eclipse.xtext.conversion.IValueConverterService#toValue(String, String, INode)} * */ public static String getTokenText(INode node) { if (node instanceof ILeafNode) return ((ILeafNode) node).getText(); else { StringBuilder builder = new StringBuilder(Math.max(node.getTotalLength(), 1)); boolean hiddenSeen = false; for (ILeafNode leaf : node.getLeafNodes()) { if (!leaf.isHidden()) { if (hiddenSeen && builder.length() > 0) builder.append(' '); builder.append(leaf.getText()); hiddenSeen = false; } else { hiddenSeen = true; } } return builder.toString(); } }
public ILeafNode newLeafNode(int offset, int length, EObject grammarElement, boolean isHidden, /* @Nullable */ SyntaxErrorMessage errorMessage, ICompositeNode parent) { LeafNode result = null; if (errorMessage != null) { if (isHidden) { result = new HiddenLeafNodeWithSyntaxError(); ((HiddenLeafNodeWithSyntaxError)result).basicSetSyntaxErrorMessage(errorMessage); } else { result = new LeafNodeWithSyntaxError(); ((LeafNodeWithSyntaxError)result).basicSetSyntaxErrorMessage(errorMessage); } } else { if (isHidden) { result = new HiddenLeafNode(); } else { result = new LeafNode(); } } result.basicSetGrammarElement(grammarElement); result.basicSetTotalOffset(offset); result.basicSetTotalLength(length); addChild(parent, result); return result; }
@Test public void testPartialParseConcreteRuleFirstToken_01() throws Exception { with(PartialParserTestLanguageStandaloneSetup.class); String model = "container c1 {\n" + " children {\n" + " -> C ( ch1 )\n" + " }" + "}"; XtextResource resource = getResourceFromString(model); assertTrue(resource.getErrors().isEmpty()); ICompositeNode root = resource.getParseResult().getRootNode(); ILeafNode children = findLeafNodeByText(root, model, "children"); resource.update(model.indexOf("n {") + 2, 1, "{"); resource.update(model.indexOf("n {") + 2, 1, "{"); assertSame(root, resource.getParseResult().getRootNode()); assertSame(children, findLeafNodeByText(root, model, "children")); }
private List<EObject> getPackage(ReferencedMetamodel context, ILeafNode text) { String nsUri = getMetamodelNsURI(text); if (nsUri == null) return Collections.emptyList(); Grammar grammar = GrammarUtil.getGrammar(context); Set<Grammar> visitedGrammars = new HashSet<Grammar>(); for (Grammar usedGrammar: grammar.getUsedGrammars()) { List<EObject> result = getPackage(nsUri, usedGrammar, visitedGrammars); if (result != null) return result; } QualifiedName packageNsURI = QualifiedName.create(nsUri); EPackage pack = findPackageInScope(context, packageNsURI); if (pack == null) { pack = findPackageInAllDescriptions(context, packageNsURI); if (pack == null) { pack = loadEPackage(nsUri, context.eResource().getResourceSet()); } } if (pack != null) return Collections.<EObject>singletonList(pack); return Collections.emptyList(); }
private List<EObject> getLinkedMetaModel(TypeRef context, EReference ref, ILeafNode text) throws IllegalNodeException { final ICompositeNode parentNode = text.getParent(); BidiIterator<INode> iterator = parentNode.getChildren().iterator(); while(iterator.hasPrevious()) { INode child = iterator.previous(); if (child instanceof ILeafNode) { ILeafNode leaf = (ILeafNode) child; if (text == leaf) return super.getLinkedObjects(context, ref, text); if (!(leaf.getGrammarElement() instanceof Keyword) && !leaf.isHidden()) { IScope scope = getScope(context, ref); return XtextMetamodelReferenceHelper.findBestMetamodelForType( context, text.getText(), leaf.getText(), scope); } } } return Collections.emptyList(); }
@ValueConverter(rule = "FQN") public IValueConverter<String> FQN() { return new AbstractNullSafeConverter<String>() { @Override protected String internalToString(String value) { return value; } @Override protected String internalToValue(String string, INode node) { if (!string.equals(string.trim())) throw new RuntimeException(); StringBuffer b = new StringBuffer(); for (ILeafNode leaf : node.getLeafNodes()) { if (!leaf.isHidden()) { b.append(leaf.getText()); } } return b.toString(); } }; }
@Test public void testPartialParseConcreteRuleFirstInnerToken_01() throws Exception { with(PartialParserTestLanguageStandaloneSetup.class); String model = "container c1 {\n" + " children {\n" + " -> C ( ch1 )\n" + " }" + "}"; XtextResource resource = getResourceFromString(model); assertTrue(resource.getErrors().isEmpty()); ICompositeNode root = resource.getParseResult().getRootNode(); ILeafNode childrenLeaf = findLeafNodeByText(root, model, "children"); ILeafNode arrowLeaf = findLeafNodeByText(root, model, "->"); resource.update(model.indexOf("->"), 2, "->"); resource.update(model.indexOf("->"), 2, "->"); assertSame(root, resource.getParseResult().getRootNode()); assertSame(childrenLeaf, findLeafNodeByText(root, model, "children")); assertSame(arrowLeaf, findLeafNodeByText(root, model, "->")); }
private INode findNext() { if (nextNodes.isEmpty()) { while (nodeIterator.hasNext()) { INode candidate = nodeIterator.next(); if (tokenUtil.isToken(candidate)) { nodeIterator.prune(); Pair<List<ILeafNode>, List<ILeafNode>> leadingAndTrailingHiddenTokens = tokenUtil .getLeadingAndTrailingHiddenTokens(candidate); nextNodes.addAll(leadingAndTrailingHiddenTokens.getFirst()); nextNodes.add(candidate); nextNodes.addAll(leadingAndTrailingHiddenTokens.getSecond()); return nextNodes.poll(); } else if (tokenUtil.isWhitespaceOrCommentNode(candidate)) { return candidate; } } return null; } return nextNodes.poll(); }
protected void assignComment(ILeafNode comment, Map<EObject, AbstractToken> eObject2Token, Map<ILeafNode, EObject> comments) { EObject container = comments.get(comment); if (container == null) return; AbstractToken token = eObject2Token.get(container); if (token != null) { for (int i = 0; i < token.getTokensForSemanticChildren().size(); i++) { AbstractToken t = token.getTokensForSemanticChildren().get(i); if ((t instanceof KeywordToken || t instanceof AssignmentToken) && t.getNode() == null) { token.getTokensForSemanticChildren().add(i, new CommentToken(comment)); return; } } token.getTokensForSemanticChildren().add(new CommentToken(comment)); } }
@Override public TreeConstructionReport serializeSubtree(EObject object, ITokenStream out) throws IOException { TreeConstructionReportImpl report = createReport(object); AbstractToken root = serialize(object, report); Set<ICompositeNode> roots = Sets.newHashSet(); Map<EObject, AbstractToken> obj2token = Maps.newHashMap(); collectRootsAndEObjects(root, obj2token, roots); // dump("", root); Map<ILeafNode, EObject> comments = commentAssociater.associateCommentsWithSemanticEObjects(object, roots); for (ICompositeNode r : roots) assignNodesByMatching(obj2token, r, comments); WsMergerStream wsout = new WsMergerStream(out); // dump("", root); // System.out.println(EmfFormatter.objToStr(roots.iterator().next(), // ParsetreePackage.Literals.ABSTRACT_NODE__TOTAL_LENGTH, // ParsetreePackage.Literals.ABSTRACT_NODE__TOTAL_OFFSET, // ParsetreePackage.Literals.ABSTRACT_NODE__TOTAL_LINE, ParsetreePackage.Literals.ABSTRACT_NODE__PARENT)); ITextRegion previousLocation = ITextRegion.EMPTY_REGION; initStream(root, wsout); previousLocation = write(root, wsout, previousLocation); wsout.flush(); report.setPreviousLocation(previousLocation); return report; }
protected boolean shouldUseParent(ICompositeNode result, int offset, ILeafNode leaf) { if (leaf.getTotalEndOffset() == offset) { return true; } if (result.getGrammarElement() instanceof RuleCall) { RuleCall rc = (RuleCall) result.getGrammarElement(); if (!rc.getArguments().isEmpty()) { return true; } Assignment assignment = GrammarUtil.containingAssignment(rc); if (assignment != null && (GrammarUtil.isMultipleCardinality(assignment) || (assignment.eContainer() instanceof AbstractElement && GrammarUtil .isMultipleCardinality((AbstractElement) assignment.eContainer())))) { return true; } } return false; }
@Override protected void highlightSpecialIdentifiers(final IHighlightedPositionAcceptor acceptor, final ICompositeNode root) { TerminalRule idRule = grammarAccess.getIDRule(); for (ILeafNode leaf : root.getLeafNodes()) { if (commentProvider.isJavaDocComment(leaf)) { // not really a special identifier, but we don't want to iterate over the leaf nodes twice, do we? acceptor.addPosition(leaf.getOffset(), leaf.getLength(), CheckHighlightingConfiguration.JAVADOC_ID); } else if (!leaf.isHidden()) { if (leaf.getGrammarElement() instanceof Keyword) { // Check if it is a keyword used as an identifier. ParserRule rule = GrammarUtil.containingParserRule(leaf.getGrammarElement()); if (FEATURE_CALL_ID_RULE_NAME.equals(rule.getName())) { acceptor.addPosition(leaf.getOffset(), leaf.getLength(), DefaultHighlightingConfiguration.DEFAULT_ID); } } else { highlightSpecialIdentifiers(leaf, acceptor, idRule); } } } }
/** * Gets the offset for given text by analyzing the parse tree and looking for leaf nodes having * a text attribute matching given value. Returns the first instance found and an error value if * no match found. * * @param model * the model * @param text * the text * @return the offset for text */ protected int getOffsetForText(final EObject model, final String text) { Iterable<ILeafNode> parseTreeNodes = NodeModelUtils.getNode(model).getLeafNodes(); try { ILeafNode result = Iterables.find(parseTreeNodes, new Predicate<ILeafNode>() { @Override public boolean apply(final ILeafNode input) { return text.equals(input.getText()); } }); return result.getOffset(); } catch (NoSuchElementException e) { return LEAF_NOT_FOUND_VALUE; } }
/** * Returns the INode for a given modifier. This is intended for computing error message regions, quick fixes, etc. */ public static final ILeafNode getNodeForModifier(ModifiableElement elem, int index) { final List<INode> nodes = NodeModelUtils.findNodesForFeature(elem, N4JSPackage.eINSTANCE.getModifiableElement_DeclaredModifiers()); if (index >= 0 && index < nodes.size()) { final Iterable<ILeafNode> leafNodes = nodes.get(index).getLeafNodes(); for (ILeafNode currLeaf : leafNodes) { if (!currLeaf.isHidden()) return currLeaf; } } return null; }
/** * Converts a leaf node an Antlr token type (int). */ public int getInternalTokenType(ILeafNode leafNode) { EObject grammarElement = leafNode.getGrammarElement(); if (grammarElement != null) { return getInternalTokenType(grammarElement); } String text = leafNode.getText(); Integer type = tokenTypes.get("'" + text + "'"); if (type != null) { return type; } throw new IllegalArgumentException(text); }
/** * Implementation of the {@link TokenSource} interface. Return new tokens as long as there are some, afterwards * return {@link Token#EOF_TOKEN}. */ @Override public Token nextToken() { if (next != null) { Token result = next; next = null; return result; } if (!leafNodes.hasNext()) { return Token.EOF_TOKEN; } ILeafNode leaf = leafNodes.next(); if (leaf.getTotalOffset() >= endOffset) { leafNodes = Iterators.emptyIterator(); return Token.EOF_TOKEN; } if (leaf.getTotalEndOffset() <= startOffset) { return nextToken(); } if (leaf.getTotalEndOffset() > endOffset) { return toPrefixToken(leaf); } SyntaxErrorMessage syntaxErrorMessage = leaf.getSyntaxErrorMessage(); if (syntaxErrorMessage != null && SEMICOLON_INSERTED.equals(syntaxErrorMessage.getIssueCode())) { return toASIToken(leaf); } if (leaf.isHidden()) { return processHiddenToken(leaf); } int tokenType = tokenTypeMapper.getInternalTokenType(leaf); return new CommonToken(tokenType, leaf.getText()); }
/** * Skips the given leaf as it's hidden. If it was the last token to be returned, a hidden token may be syntesized if * would affect the semicolon insertion. */ private Token processHiddenToken(ILeafNode leaf) { Token result = nextToken(); if (result == Token.EOF_TOKEN && Strings.countLineBreaks(leaf.getText()) > 0) { next = result; CommonToken hidden = new CommonToken(tokenTypeMapper.getInternalTokenType(leaf), leaf.getText()); hidden.setChannel(Token.HIDDEN_CHANNEL); return hidden; } return result; }
/** * Produce an Antlr token for the prefix of the given leaf that overlaps the requested region * * @see #endOffset */ private Token toPrefixToken(ILeafNode leaf) { Lexer lexer = new InternalN4JSLexer(); String text = leaf.getText(); String prefix = text.substring(0, endOffset - leaf.getTotalOffset()); ANTLRStringStream stream = new ANTLRStringStream(prefix); lexer.setCharStream(stream); Token nextToken = lexer.nextToken(); // copy to get rid of the reference to the stream again return new CommonToken(nextToken.getType(), nextToken.getText()); }
/** * Returns with the length of the node including all hidden leaf nodes but the {@link LeafNodeWithSyntaxError} one, * that was created for the automatic semicolon insertion. */ private int getLengthWithoutAutomaticSemicolon(final INode node) { if (node instanceof ILeafNode) { return node.getLength(); } int length = 0; for (final INode leafNode : ((ICompositeNode) node).getLeafNodes()) { if (!isIgnoredSyntaxErrorNode(leafNode, SEMICOLON_INSERTED)) { length += leafNode.getLength(); } } return length; }
private String getDocumentation(/* @NonNull */EObject object) { if (object.eContainer() == null) { // if a comment is at the beginning of the file it will be returned for // the root element (e.g. Script in N4JS) as well -> avoid this! return null; } ICompositeNode node = NodeModelUtils.getNode(object); if (node != null) { // get the last multi line comment before a non hidden leaf node for (ILeafNode leafNode : node.getLeafNodes()) { if (!leafNode.isHidden()) break; EObject grammarElem = leafNode.getGrammarElement(); if (grammarElem instanceof TerminalRule && "ML_COMMENT".equalsIgnoreCase(((TerminalRule) grammarElem).getName())) { String comment = leafNode.getText(); if (commentStartTagRegex.matcher(comment).matches()) { return leafNode.getText(); } } } } return null; }
/** * Checks that no "with" is used and that list of implemented interfaces is separated with commas and not with * keywords. These checks (with some warnings created instead of errors) should help the transition from roles to * interfaces. However, they may be useful later on as well, e.g., if an interface is manually refactored into a * class or vice versa. * <p> * Note that "with" is used in Dart for roles, so maybe it is useful to have a user-friendly message instead of a * parser error. */ @Check public void checkClassDefinition(N4ClassDefinition n4ClassDefinition) { holdsNoKeywordInsteadOfComma(n4ClassDefinition); ICompositeNode node = NodeModelUtils.findActualNodeFor(n4ClassDefinition); ILeafNode keywordNode = findSecondLeafWithKeyword(n4ClassDefinition, "{", node, "extends", false); if (keywordNode != null) { TClass tclass = n4ClassDefinition.getDefinedTypeAsClass(); if (tclass == null) { return; // avoid consequential errors } if (StreamSupport.stream(tclass.getImplementedInterfaceRefs().spliterator(), false).allMatch( superTypeRef -> superTypeRef.getDeclaredType() instanceof TInterface)) { List<? extends IdentifiableElement> interfaces = StreamSupport.stream( tclass.getImplementedInterfaceRefs().spliterator(), false) .map(ref -> (TInterface) (ref.getDeclaredType())).collect(Collectors.toList()); String message = getMessageForSYN_KW_EXTENDS_IMPLEMENTS_MIXED_UP( validatorMessageHelper.description(tclass), "extend", "interface" + (interfaces.size() > 1 ? "s " : " ") + validatorMessageHelper.names(interfaces), "implements"); addIssue(message, n4ClassDefinition, keywordNode.getTotalOffset(), keywordNode.getLength(), SYN_KW_EXTENDS_IMPLEMENTS_MIXED_UP); } } }
private boolean holdsNoKeywordInsteadOfComma(EObject semanticElement) { ICompositeNode node = NodeModelUtils.findActualNodeFor(semanticElement); List<ILeafNode> commaAlternatives = filterLeafsWithKeywordInsteadOfComma(semanticElement, "{", node, "extends", "implements", "with"); boolean result = true; for (ILeafNode n : commaAlternatives) { addIssue(getMessageForSYN_KW_INSTEAD_OF_COMMA_WARN(n.getText()), semanticElement, n.getTotalOffset(), n.getLength(), SYN_KW_INSTEAD_OF_COMMA_WARN); result = false; } return result; }
/** * Returns nodes which represent keywords and specified in keywords. * * @param keywords * keywords in natural order used in Arrays#s */ protected List<ILeafNode> filterLeafsWithKeywordInsteadOfComma(EObject semanticElement, String stopAtKeyword, ICompositeNode node, final String... keywords) { List<ILeafNode> filteredLeaves = null; for (BidiTreeIterator<INode> iter = node.getAsTreeIterable().iterator(); iter.hasNext();) { INode child = iter.next(); EObject childSemElement = child.getSemanticElement(); if (child != node && childSemElement != null && childSemElement != semanticElement) { iter.prune(); } else if (child instanceof ILeafNode) { ILeafNode leaf = (ILeafNode) child; EObject grammarElement = leaf.getGrammarElement(); if (grammarElement instanceof Keyword) { String value = ((Keyword) grammarElement).getValue(); if (stopAtKeyword.equals(value)) { break; } if (Arrays.binarySearch(keywords, value) >= 0) { if (grammarElement.eContainer() instanceof Alternatives) { AbstractElement first = ((Alternatives) (grammarElement.eContainer())).getElements().get(0); boolean inCommaAlternative = (first instanceof Keyword && ",".equals(((Keyword) first) .getValue())); if (inCommaAlternative) { if (filteredLeaves == null) { filteredLeaves = new ArrayList<>(5); } filteredLeaves.add(leaf); } } } } } } return filteredLeaves == null ? Collections.emptyList() : filteredLeaves; }