Skip to content

Commit

Permalink
Syntax highlighting for macro_rules!
Browse files Browse the repository at this point in the history
  • Loading branch information
bruno-medeiros committed Dec 16, 2016
1 parent fabe5df commit f3cbeb2
Show file tree
Hide file tree
Showing 6 changed files with 94 additions and 75 deletions.
4 changes: 2 additions & 2 deletions documentation/ChangeLog.md
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
## release ChangeLog

### 0.8.0
* Added support for Cargo's JSON output, with `--message-format json`. (#138)
* Added support for Cargo's JSON output, with `--message-format=json`. (#138)
#TODO
* Invoking a build operation for a project now immediately cancels the pending build operation for that project, if any.
* Added syntax coloring support for `?` operator.
* Added syntax coloring support for `?` operator, and `macro_rules!`.
* More lenient with `rustfmt` exit code 3, don't treat those warnings as a failure. (#144)
* Added: Cargo `clean` Build Target.
* Build target `check` now defaults to `cargo check --message-format=json`.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,19 @@
*******************************************************************************/
package com.github.rustdt.ide.ui.text;

import static melnorme.utilbox.core.Assert.AssertNamespace.assertFail;

import org.eclipse.jface.text.rules.IRule;
import org.eclipse.jface.text.rules.IToken;
import org.eclipse.jface.text.rules.Token;

import com.github.rustdt.tooling.lexer.RustColoringTokens;
import com.github.rustdt.tooling.lexer.RustWordLexerRule;

import melnorme.lang.ide.ui.text.AbstractLangScanner;
import melnorme.lang.ide.ui.text.coloring.TokenRegistry;
import melnorme.lang.tooling.parser.lexer.ILexingRule2;
import melnorme.lang.utils.parse.ICharacterReader;
import melnorme.utilbox.collections.ArrayList2;

/**
Expand All @@ -34,18 +39,40 @@ protected void initRules(ArrayList2<IRule> rules) {
IToken defaultToken = getToken(RustColorPreferences.DEFAULT);
setDefaultReturnToken(defaultToken);

RustWordLexerRule<IToken> codeLexerRule = new RustWordLexerRule<>(
Token.WHITESPACE,
getToken(RustColorPreferences.KEYWORDS),
getToken(RustColorPreferences.KEYWORDS_BOOLEAN_LIT),
getToken(RustColorPreferences.KEYWORDS_SELF),
defaultToken,
getToken(RustColorPreferences.MACRO_CALL),
getToken(RustColorPreferences.NUMBERS),
getToken(RustColorPreferences.TRY_OPERATOR)
);
IToken tkWhitespace = Token.WHITESPACE;
IToken tkKeywords = getToken(RustColorPreferences.KEYWORDS);
IToken tkKeywordsBooleanLiteral = getToken(RustColorPreferences.KEYWORDS_BOOLEAN_LIT);
IToken tkKeywordsSelf = getToken(RustColorPreferences.KEYWORDS_SELF);
IToken tkMacroCall = getToken(RustColorPreferences.MACRO_CALL);
IToken tkNumbers = getToken(RustColorPreferences.NUMBERS);
IToken tkTryOperator = getToken(RustColorPreferences.TRY_OPERATOR);

RustWordLexerRule rustColorLexer = new RustWordLexerRule();

ILexingRule2<IToken> rule = new ILexingRule2<IToken>() {
@Override
public IToken doEvaluateToken(ICharacterReader reader) {
RustColoringTokens rustToken = rustColorLexer.evaluateToken(reader);
if(rustToken == null) {
return null;
}
switch (rustToken) {
case WS: return tkWhitespace;
case KEYWORD: return tkKeywords;
case KEYWORD_BOOL: return tkKeywordsBooleanLiteral;
case KEYWORD_SELF: return tkKeywordsSelf;
case WORD: return defaultToken;
case MACRO_CALL: return tkMacroCall;
case MACRO_RULES: return tkMacroCall; // TODO own coloring token
case NUMBER: return tkNumbers;
case TRY_OP: return tkTryOperator;
}
// switch must be complete
throw assertFail();
}
};

rules.add(new LexingRule_RuleAdapter(codeLexerRule));
rules.add(new LexingRule_RuleAdapter(rule));
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,7 @@
*******************************************************************************/
package com.github.rustdt.tooling.lexer;

import static com.github.rustdt.tooling.lexer.RustWordLexer_Test.ColoringTokens.KEYWORD;
import static com.github.rustdt.tooling.lexer.RustWordLexer_Test.ColoringTokens.KEYWORD_BOOL;
import static com.github.rustdt.tooling.lexer.RustWordLexer_Test.ColoringTokens.KEYWORD_SELF;
import static com.github.rustdt.tooling.lexer.RustWordLexer_Test.ColoringTokens.MACRO_CALL;
import static com.github.rustdt.tooling.lexer.RustWordLexer_Test.ColoringTokens.NUMBER;
import static com.github.rustdt.tooling.lexer.RustWordLexer_Test.ColoringTokens.TRY_OP;
import static com.github.rustdt.tooling.lexer.RustWordLexer_Test.ColoringTokens.WORD;
import static com.github.rustdt.tooling.lexer.RustWordLexer_Test.ColoringTokens.WS;
import static com.github.rustdt.tooling.lexer.RustColoringTokens.*;
import static melnorme.utilbox.core.Assert.AssertNamespace.assertTrue;

import org.junit.Test;
Expand All @@ -28,31 +21,11 @@

public class RustWordLexer_Test extends CommonToolingTest {

public enum ColoringTokens {
WS,
KEYWORD,
KEYWORD_BOOL,
KEYWORD_SELF,
WORD,
MACRO_CALL,
NUMBER,
TRY_OP,
}

public void testRule(String source, ColoringTokens expectedToken, int expectedLength) {
RustWordLexerRule<ColoringTokens> wordLexerRule = new RustWordLexerRule<>(
WS,
KEYWORD,
KEYWORD_BOOL,
KEYWORD_SELF,
WORD,
MACRO_CALL,
NUMBER,
TRY_OP
);
public void testRule(String source, RustColoringTokens expectedToken, int expectedLength) {
RustWordLexerRule wordLexerRule = new RustWordLexerRule();

StringCharSource reader = new StringCharSource(source);
ColoringTokens token = wordLexerRule.doEvaluateToken(reader);
RustColoringTokens token = wordLexerRule.doEvaluateToken(reader);

assertTrue(token == expectedToken);
assertTrue(reader.getReadPosition() == expectedLength);
Expand All @@ -79,9 +52,15 @@ public void testRule(String source, ColoringTokens expectedToken, int expectedLe
testRule("abc!(asdf)", MACRO_CALL, 4);
testRule("abc![a", MACRO_CALL, 4);
testRule("abc! [a", MACRO_CALL, 4);

testRule("macro_rules!", MACRO_RULES, 12);
// Test macros (negative cases)
testRule("blah!", WORD, 4);
testRule("macro_rules!()", MACRO_RULES, 12);
testRule("abc!=3", WORD, 3);
testRule("abc! asdf", WORD, 3);

testRule("macro!", KEYWORD, 5); // Is reserved keyword
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ public class WordLexerRule<TOKEN> implements ILexingRule2<TOKEN> {
protected final TOKEN defaultWordToken;
protected final HashMap2<String, TOKEN> tokenMap = new HashMap2<>();

protected String lastEvaluatedWord;

public WordLexerRule(TOKEN whitespaceToken, TOKEN defaultWordToken) {
this.whitespaceToken = whitespaceToken;
this.defaultWordToken = defaultWordToken;
Expand All @@ -41,12 +43,12 @@ public TOKEN doEvaluateToken(ICharacterReader reader) {
return whitespaceToken;
}

String word = LexingUtils.readJavaIdentifier(reader);
if(word.isEmpty()) {
lastEvaluatedWord = LexingUtils.readJavaIdentifier(reader);
if(lastEvaluatedWord.isEmpty()) {
return null;
}

TOKEN keywordToken = tokenMap.get(word);
TOKEN keywordToken = tokenMap.get(lastEvaluatedWord);
return (keywordToken == null) ? defaultWordToken : keywordToken;
}

Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
/*******************************************************************************
* Copyright (c) 2016 Bruno Medeiros and other Contributors.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* Bruno Medeiros - initial API and implementation
*******************************************************************************/
package com.github.rustdt.tooling.lexer;

public enum RustColoringTokens {
WS,
KEYWORD,
KEYWORD_BOOL,
KEYWORD_SELF,
WORD,
MACRO_CALL,
MACRO_RULES,
NUMBER,
TRY_OP,
}
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@
*******************************************************************************/
package com.github.rustdt.tooling.lexer;

import static melnorme.utilbox.core.Assert.AssertNamespace.assertNotNull;
import static melnorme.utilbox.core.CoreUtil.areEqual;

import melnorme.lang.tooling.parser.lexer.CharacterReader_SubReader;
import melnorme.lang.tooling.parser.lexer.WordLexerRule;
Expand All @@ -20,7 +20,7 @@
/**
* A lexer rule used for coloring purposes.
*/
public class RustWordLexerRule<TOKEN> extends WordLexerRule<TOKEN> {
public class RustWordLexerRule extends WordLexerRule<RustColoringTokens> {

public static final String[] keywords_control = {
"abstract", "alignof", "as", "become", "box", "break", "const", "continue", "crate",
Expand All @@ -40,41 +40,25 @@ public class RustWordLexerRule<TOKEN> extends WordLexerRule<TOKEN> {

protected final RustNumberLexingRule rustNumberLexingRule = new RustNumberLexingRule();

protected final TOKEN macroCall;
protected final TOKEN numberLiteral;
protected final TOKEN tryOperator;

public RustWordLexerRule(
TOKEN whitespaceToken,
TOKEN keywords,
TOKEN keywords_booleanLiteral,
TOKEN keywords_self,
TOKEN word,
TOKEN macroCall,
TOKEN numberLiteral,
TOKEN tryOperator
) {
super(whitespaceToken, word);
this.macroCall = assertNotNull(macroCall);
this.numberLiteral = assertNotNull(numberLiteral);
this.tryOperator = assertNotNull(tryOperator);
public RustWordLexerRule() {
super(RustColoringTokens.WS, RustColoringTokens.WORD);

addKeywords(keywords, RustWordLexerRule.keywords_control);
addKeywords(keywords_booleanLiteral, RustWordLexerRule.keywords_boolean_lit);
addKeywords(keywords_self, RustWordLexerRule.keywords_self);
addKeywords(RustColoringTokens.KEYWORD, RustWordLexerRule.keywords_control);
addKeywords(RustColoringTokens.KEYWORD_BOOL, RustWordLexerRule.keywords_boolean_lit);
addKeywords(RustColoringTokens.KEYWORD_SELF, RustWordLexerRule.keywords_self);
}

@Override
public TOKEN doEvaluateToken(ICharacterReader reader) {
public RustColoringTokens doEvaluateToken(ICharacterReader reader) {
if(reader.tryConsume('?')) {
return tryOperator;
return RustColoringTokens.TRY_OP;
}

TOKEN result = super.doEvaluateToken(reader);
RustColoringTokens result = super.doEvaluateToken(reader);

if(result == null) {
if(rustNumberLexingRule.tryMatch(reader)) {
return numberLiteral;
return RustColoringTokens.NUMBER;
}
}

Expand All @@ -89,9 +73,13 @@ public TOKEN doEvaluateToken(ICharacterReader reader) {

int afterWS = LexingUtils.countWhitespace(subReader);
int lookahead = subReader.lookahead(afterWS);
if(areEqual(lastEvaluatedWord, "macro_rules")) {
subReader.consumeInParentReader();
return RustColoringTokens.MACRO_RULES;
}
if(lookahead == '(' || lookahead == '[') {
subReader.consumeInParentReader();
return macroCall;
return RustColoringTokens.MACRO_CALL;
}
}
return result;
Expand Down

0 comments on commit f3cbeb2

Please sign in to comment.