Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Global markdown mode remake based on Scopes #4923

Draft
wants to merge 48 commits into
base: master
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
48 commits
Select commit Hold shift + click to select a range
5c4daf3
experiment: introduce scopes (for custom tokenizer)
mkslanc Sep 9, 2022
ab34bf6
lua mode with custom tokenizer
mkslanc Sep 9, 2022
f80aa38
fix scopes
mkslanc Feb 14, 2023
0f247a1
reqork markdown mode based on new scopes
mkslanc Feb 14, 2023
83d1b1c
add markdown tests; fix test runner to support scope-based tokenizers
mkslanc Feb 14, 2023
82aa88a
Merge branch 'master' into scopes-markdown
mkslanc Feb 14, 2023
746ed4b
add auto-indent for markdown lists
mkslanc Feb 14, 2023
01404d2
Merge remote-tracking branch 'origin/scopes-markdown' into scopes-mar…
mkslanc Feb 14, 2023
4e7e49a
fix folding for markdown
mkslanc Feb 14, 2023
56ed432
MarkdownBehaviour now normally inherits all rules from CstyleBehaviour
mkslanc Feb 15, 2023
b9161f2
add tests for markdown behaviours
mkslanc Feb 20, 2023
caf23a5
add hr rule in blockquote
mkslanc Nov 13, 2023
ad15ed8
fix another few bugs
mkslanc Nov 18, 2023
03f2589
Add support for fenced code blocks in markdown
mkslanc Dec 7, 2023
3600da4
add extended www autolink recognition
mkslanc Dec 12, 2023
f6eb997
improve code blocks inside blockquotes
mkslanc Feb 7, 2024
23d9952
fix few more edge cases
mkslanc Feb 7, 2024
75e498f
fix code spans, code fences, rule for linkLabels end state
mkslanc Feb 15, 2024
ae3f72c
add thematic_break token name for hr's
mkslanc Feb 21, 2024
d94bd0f
moved header's inside paragraph state
mkslanc Feb 21, 2024
b65cdc3
add missing include rules
mkslanc Feb 26, 2024
13d24d4
add html tags highlight inside <style|script|pre>
mkslanc Mar 6, 2024
19ba118
fix wrong states
mkslanc Apr 5, 2024
0d8f0f5
experiment: introduce scopes (for custom tokenizer)
mkslanc Sep 9, 2022
68a86e4
fix scopes
mkslanc Feb 14, 2023
cc95250
reqork markdown mode based on new scopes
mkslanc Feb 14, 2023
b51b75e
add markdown tests; fix test runner to support scope-based tokenizers
mkslanc Feb 14, 2023
bbc6fe3
add auto-indent for markdown lists
mkslanc Feb 14, 2023
5e304ab
lua mode with custom tokenizer
mkslanc Sep 9, 2022
08ea379
fix folding for markdown
mkslanc Feb 14, 2023
db2bc0e
MarkdownBehaviour now normally inherits all rules from CstyleBehaviour
mkslanc Feb 15, 2023
e91fdfa
add tests for markdown behaviours
mkslanc Feb 20, 2023
6d2b464
add hr rule in blockquote
mkslanc Nov 13, 2023
0ccf6f4
fix another few bugs
mkslanc Nov 18, 2023
ffaef51
Add support for fenced code blocks in markdown
mkslanc Dec 7, 2023
b87407c
add extended www autolink recognition
mkslanc Dec 12, 2023
8f84add
improve code blocks inside blockquotes
mkslanc Feb 7, 2024
b44fdd5
fix few more edge cases
mkslanc Feb 7, 2024
9952bdb
fix code spans, code fences, rule for linkLabels end state
mkslanc Feb 15, 2024
f2d4c5d
add thematic_break token name for hr's
mkslanc Feb 21, 2024
97e668d
moved header's inside paragraph state
mkslanc Feb 21, 2024
d1a7bb8
add missing include rules
mkslanc Feb 26, 2024
a6e39d3
add html tags highlight inside <style|script|pre>
mkslanc Mar 6, 2024
5278fb3
fix wrong states
mkslanc Apr 5, 2024
1a14040
fix after rebase
nightwing Apr 14, 2024
25f0fac
Merge remote-tracking branch 'origin/scopes-markdown' into scopes-mar…
mkslanc May 16, 2024
99d315f
fix: egde case with wrongly not-closed emphasis or strong after link …
mkslanc May 17, 2024
afbf99c
fix: autolink regexp
mkslanc May 23, 2024
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion demo/kitchen-sink/demo.js
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,7 @@ function loadLanguageProvider(editor) {
}
}, true);
function showOccurrenceMarkers(session, positions) {
if (!session.state) session.state = {}
if (!session.state.occurrenceMarkers) {
session.state.occurrenceMarkers = new MarkerGroup(session);
}
Expand Down Expand Up @@ -179,7 +180,7 @@ function loadLanguageProvider(editor) {
let docPos = e.getDocumentPosition();

languageProvider.doHover(session, docPos, function(hover) {
var errorMarker = session.state?.diagnosticMarkers.getMarkerAtPosition(docPos);
var errorMarker = session.state?.diagnosticMarkers?.getMarkerAtPosition(docPos);

if (!errorMarker && !hover?.content) return;

Expand Down
36 changes: 29 additions & 7 deletions demo/kitchen-sink/token_tooltip.js
Original file line number Diff line number Diff line change
Expand Up @@ -58,13 +58,23 @@ class TokenTooltip extends Tooltip {
return;
}

var tokenText = token.type;
if (token.state)
tokenText += "|" + token.state;
if (token.merge)
tokenText += "\n merge";
if (token.stateTransitions)
tokenText += "\n " + token.stateTransitions.join("\n ");
var tokenText = "";
var scope = token.type;
if (scope.name !== undefined) {
do {
tokenText += scope.name + "\n"
if (!scope.parent)
tokenText += "\ntoken count:" + count(scope);
} while(scope = scope.parent)
} else {
tokenText += token.type;
if (token.state)
tokenText += "|" + token.state;
if (token.merge)
tokenText += "\n merge";
if (token.stateTransitions)
tokenText += "\n " + token.stateTransitions.join("\n ");
}

if (this.tokenText != tokenText) {
this.setText(tokenText);
Expand Down Expand Up @@ -121,4 +131,16 @@ class TokenTooltip extends Tooltip {

}

function count(root) {
return Object.keys(root.children).reduce(function (n, key) {
return n + count(root.children[key]);
}, 1);
}

function count(root) {
return Object.keys(root.children).reduce(function (n, key) {
return n + count(root.children[key]);
}, 1);
}

exports.TokenTooltip = TokenTooltip;
7 changes: 5 additions & 2 deletions src/background_tokenizer.js
Original file line number Diff line number Diff line change
Expand Up @@ -192,9 +192,12 @@ class BackgroundTokenizer {
var state = this.states[row - 1];
// @ts-expect-error TODO: potential wrong argument
var data = this.tokenizer.getLineTokens(line, state, row);
var lastToken = data.tokens[data.tokens.length - 1];
var newState = (lastToken !== undefined && lastToken.type !== undefined && lastToken.type.parent !== undefined)
? lastToken.type.parent : data.state;

if (this.states[row] + "" !== data.state + "") {
this.states[row] = data.state;
if (this.states[row] !== newState) {
this.states[row] = newState;
this.lines[row + 1] = null;
if (this.currentLine > row + 1)
this.currentLine = row + 1;
Expand Down
4 changes: 2 additions & 2 deletions src/edit_session/bracket_match.js
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ function BracketMatch() {
// whose type matches typeRe
do {
token = iterator.stepBackward();
} while (token && !typeRe.test(token.type));
} while (token && !typeRe.test(token.type.toString()));

if (token == null)
break;
Expand Down Expand Up @@ -245,7 +245,7 @@ function BracketMatch() {
// whose type matches typeRe
do {
token = iterator.stepForward();
} while (token && !typeRe.test(token.type));
} while (token && !typeRe.test(token.type.toString()));

if (token == null)
break;
Expand Down
4 changes: 2 additions & 2 deletions src/edit_session/folding.js
Original file line number Diff line number Diff line change
Expand Up @@ -708,9 +708,9 @@ function Folding() {
var token = iterator.getCurrentToken();
var type = token && token.type;
if (token && /^comment|string/.test(type)) {
type = type.match(/comment|string/)[0];
type = /comment|string/.exec(type)[0];
if (type == "comment")
type += "|doc-start|\\.doc";
type += "|doc-start|\\.doc|empty";
var re = new RegExp(type);
var range = new Range();
if (dir != 1) {
Expand Down
4 changes: 2 additions & 2 deletions src/ext/beautify.js
Original file line number Diff line number Diff line change
Expand Up @@ -147,13 +147,13 @@ exports.beautify = function(session) {
breakBefore = true;

// trim value if not in a comment or string
if (!is(token, "comment") && !token.type.match(/^(comment|string)$/))
if (!is(token, "comment") && !/^(comment|string)$/.test(token.type))
value = value.trimLeft();
}

if (value) {
// whitespace
if (token.type === "keyword" && value.match(/^(if|else|elseif|for|foreach|while|switch)$/)) {
if (token.type === "keyword" && /^(if|else|elseif|for|foreach|while|switch)$/.test(value)) {
parents[depth] = value;

trimNext();
Expand Down
6 changes: 3 additions & 3 deletions src/layer/text.js
Original file line number Diff line number Diff line change
Expand Up @@ -413,10 +413,10 @@ class Text {

valueFragment.appendChild(this.dom.createTextNode(i ? value.slice(i) : value, this.element));

if (!isTextToken(token.type)) {
var classes = "ace_" + token.type.replace(/\./g, " ace_");
if (!isTextToken(token.type.toString())) {
var classes = "ace_" + token.type.toString().replace(/\./g, " ace_");
var span = this.dom.createElement("span");
if (token.type == "fold"){
if (token.type.toString() === "fold") {
span.style.width = (token.value.length * this.config.characterWidth) + "px";
span.setAttribute("title", nls("inline-fold.closed.title", "Unfold code"));
}
Expand Down
78 changes: 45 additions & 33 deletions src/mode/_test/highlight_rules_test.js
Original file line number Diff line number Diff line change
Expand Up @@ -50,11 +50,11 @@ function checkModes() {
testComments(m.lineCommentStart, testLineComment, tokenizer, modeName);
testComments(m.blockComment, testBlockComment, tokenizer, modeName);
testBrackets(m, modeName);

if (m.snippetFileId)
snippets[m.snippetFileId] = modeName;
});

jsFileList(cwd + "../../snippets").forEach(function(snippetFileName) {
if (/\.snippets$/.test(snippetFileName)) return;
if (!snippets["ace/snippets/" + snippetFileName])
Expand All @@ -65,7 +65,7 @@ function checkModes() {
console.error("Snippet files missing", snippets);
throw new Error("Snippet files missing");
}

function testNextState(tokenizer, modeName) {
let keys = Object.keys(tokenizer.states);
for (let i = 0; i < keys.length; i++) {
Expand Down Expand Up @@ -97,24 +97,28 @@ function checkModes() {
}
}
}

function testBlockComment(tokenizer, blockComment, modeName) {
if (blockComment.lineStartOnly)
return; // TODO test
return; // TODO test
var str = blockComment.start + " " + blockComment.end;
str = blockComment.start + str;
if (blockComment.nestable)
str += blockComment.end;
str += blockComment.end;
var data = tokenizer.getLineTokens(str, "start");

var type = data.tokens.length > 0 ? data.tokens[data.tokens.length - 1].type : "";
var state = typeof type === "string" ? data.state : type.parent;

var isBroken = data.tokens.some(function(t) { return !/comment/.test(t.type); });
if (isBroken) {
die("broken blockComment in " + modeName, data);
}
if (!/start/.test(data.state)) {
if (!/start/.test(state)) {
die("broken state after blockComment in " + modeName, data);
}
}

function testLineComment(tokenizer, commentStart, modeName) {
var tokens = tokenizer.getLineTokens(commentStart + " ", "start").tokens;
if (!/comment/.test(tokens[0].type)) {
Expand All @@ -124,11 +128,11 @@ function checkModes() {

function testBrackets(mode, modeName) {
if (/^(forth|mask)$/.test(modeName)) return;

var session = new EditSession("{ foo[ bar(baz) ] }", mode);
var isInvalid = session.getTokens(0).some(function(t) {
return /invalid|illegal|string/.test(t.type);

var isInvalid = session.getTokens(0).some(function(t) {
return /invalid|illegal|string/.test(t.type);
});
if (isInvalid) return;

Expand All @@ -141,7 +145,7 @@ function checkModes() {
position = session.findMatchingBracket({row:0, column:11});
if (!position || position.column != 14)
die("Matching bracket not found in " + modeName);

if (mode.$behaviour) {
session.setValue("");
editor.setSession(session);
Expand All @@ -161,7 +165,7 @@ function checkModes() {
}

function generateTestData(names, force) {
var docRoot = root + "/demo/kitchen-sink/docs";
var docRoot = root + "/ace/demo/kitchen-sink/docs";
var docs = fs.readdirSync(docRoot);
var specialDocs = fs.readdirSync(cwd);
var modes = modeList();
Expand All @@ -183,7 +187,7 @@ function generateTestData(names, force) {

if (names && names.length && names.indexOf(modeName) == -1)
return;

var outputPath = cwd + "tokens_" + modeName + ".json";
try {
var oldOutput = require(outputPath);
Expand All @@ -197,7 +201,7 @@ function generateTestData(names, force) {
}).join("");
}).join("\n");
}

var filePath = "text_" + modeName + ".txt";
if (specialDocs.indexOf(filePath) !== -1) {
filePath = cwd + filePath;
Expand All @@ -206,7 +210,7 @@ function generateTestData(names, force) {
// oldText = "";
}
var text = oldText ||fs.readFileSync(filePath, "utf8");

try {
var Mode = require("../" + modeName).Mode;
} catch(e) {
Expand All @@ -217,26 +221,33 @@ function generateTestData(names, force) {
var tokenizer = new Mode().getTokenizer();

var state = "start";
var data = text.split(/\r\n|\r|\n/).map(function(line) {
var data = text.split(/\r\n|\r|\n/).map(function (line) {
var data = tokenizer.getLineTokens(line, state);

var type = data.tokens.length > 0 ? data.tokens[data.tokens.length - 1].type : "";
var tmp = [];
tmp.push(JSON.stringify(data.state));
var stateString;
if (/^[\x00]/.test(line)) {
stateString = "start";
}
else {
stateString = typeof type === "string" ? data.state : type.getAllScopeNames();
}
tmp.push(JSON.stringify(stateString));
var tokenizedLine = "";
data.tokens.forEach(function(x) {
data.tokens.forEach(function (x) {
tokenizedLine += x.value;
tmp.push(JSON.stringify([x.type, x.value]));
tmp.push(JSON.stringify([x.type.toString(), x.value]));
});
if (tokenizedLine != line)
tmp.push(JSON.stringify(line));
state = data.state;
if (tokenizedLine !== line) tmp.push(JSON.stringify(line));
state = typeof type === "string" ? data.state : type.parent;
return tmp.join(",\n ");
});

var jsonStr = "[[\n " + data.join("\n],[\n ") + "\n]]";

if (oldOutput && JSON.stringify(JSON.parse(jsonStr)) == JSON.stringify(oldOutput))
return;


if (oldOutput && JSON.stringify(JSON.parse(jsonStr)) == JSON.stringify(oldOutput)) return;

fs.writeFileSync(outputPath, jsonStr, "utf8");
});
}
Expand Down Expand Up @@ -280,18 +291,19 @@ function testMode(modeName, i) {
var tokens = tokenizer.getLineTokens(line, state);
var values = tokens.tokens.map(function(x) {return x.value;});
var types = tokens.tokens.map(function(x) {return x.type;});

var type = tokens.tokens.length > 0 ? tokens.tokens[tokens.tokens.length - 1].type: "";
var stateString = /^[\x00]/.test(line) ? "start" : typeof type === "string" ? tokens.state : type.getAllScopeNames();
var err = testEqual([
JSON.stringify(lineData.state), JSON.stringify(tokens.state),
JSON.stringify(lineData.state), JSON.stringify(stateString),
lineData.types, types,
lineData.values, values]);

if (err) {
console.log(line);
throw "error";
}

state = tokens.state;
state = typeof type === "string" ? tokens.state : type.parent;
});
}
function testEqual(a) {
Expand Down
Binary file modified src/mode/_test/text_markdown.txt
Binary file not shown.
Loading
Loading