Skip to content

Commit

Permalink
Add CallStack #10
Browse files Browse the repository at this point in the history
  • Loading branch information
RedCMD committed Mar 7, 2024
1 parent c17dbf8 commit 97fbc42
Show file tree
Hide file tree
Showing 51 changed files with 14,338 additions and 3,687 deletions.
3 changes: 1 addition & 2 deletions .vscode/tasks.json
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
"color": "terminal.ansiMagenta",
"id": "console",
},
"hide": true,
},
{
// Must remove "extensionDependencies" in `package.json` first
Expand All @@ -45,7 +44,7 @@
"label": "Launch-TextMate-Web-Extension",
"type": "npm",
"script": "open-in-browser",
"problemMatcher": "$ts-webpack-watch",
"problemMatcher": [],
"isBackground": true,
"presentation": {
"reveal": "never",
Expand Down
3 changes: 3 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
# TmLanguage-Syntax-Highlighter
Syntax Highlighter for VSCodes JSON TextMate grammars

# NEW! CallStack feature thingy
Feedback welcomed [Github](https://github.com/RedCMD/TmLanguage-Syntax-Highlighter/issues/10)

## Features
* Syntax Highlighting
* Basic Intellisense
Expand Down
141 changes: 141 additions & 0 deletions out/TextMate.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,141 @@
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
exports.tokenizeFile = exports.tokenizeLine = exports.initTextMate = exports.whileRuleId = exports.endRuleId = void 0;
const vscode = require("vscode");
const vscodeTextmate = require("./textmate/main");
// import * as vscodeTextmate from "vscode-textmate";
const vscodeOniguruma = require("vscode-oniguruma");
const extension_1 = require("./extension");
exports.endRuleId = -1;
exports.whileRuleId = -2;
async function onigLibInterface() {
return {
createOnigScanner(sources) {
return new vscodeOniguruma.OnigScanner(sources);
},
createOnigString(str) {
return new vscodeOniguruma.OnigString(str);
}
};
}
async function loadGrammar(scopeName) {
for (const extension of vscode.extensions.all) {
const packageJSON = extension.packageJSON;
const grammars = packageJSON.contributes?.grammars;
if (grammars) {
for (const grammar of grammars) {
if (grammar.scopeName == scopeName) {
const path = grammar.path;
if (path) {
const uri = vscode.Uri.joinPath(extension.extensionUri, path);
if (uri.scheme != 'untitled') {
const document = await vscode.workspace.openTextDocument(uri);
const rawGrammar = vscodeTextmate.parseRawGrammar(document.getText(), uri.path);
return rawGrammar;
}
}
}
}
}
}
vscode.window.showInformationMessage(`Unknown scopeName: ${scopeName}`);
console.log(`TextMate: Unknown scope name: ${scopeName}`);
return null;
}
let registry;
function initTextMate(context) {
const options = {
onigLib: onigLibInterface(),
loadGrammar: loadGrammar,
};
// Create a registry that can create a grammar from a scope name.
registry = new vscodeTextmate.Registry(options);
}
exports.initTextMate = initTextMate;
function getScopeName(lang) {
for (const extension of vscode.extensions.all) {
const packageJSON = extension.packageJSON;
const grammars = packageJSON.contributes?.grammars;
if (grammars) {
for (const grammar of grammars) {
if (grammar.language == lang) {
const scopeName = grammar.scopeName;
if (scopeName) {
return scopeName;
}
}
}
}
}
return null;
}
async function tokenizeLine(document, lineNumber) {
const lang = document.languageId;
const scopeName = getScopeName(lang);
const grammar = await registry.loadGrammar(scopeName);
// vscode.window.showInformationMessage(JSON.stringify(grammar));
// vscode.window.showInformationMessage(JSON.stringify(grammar, stringify));
// const text = [
// `function sayHello(name) {`,
// `\treturn "Hello, " + name;`,
// `}`
// ];
// const text = document.getText();
let tokenLineResult;
let ruleStack = vscodeTextmate.INITIAL;
for (let i = 0; i <= lineNumber; i++) {
const line = document.lineAt(i).text;
// const line = text[i];
// vscode.window.showInformationMessage(JSON.stringify(ruleStack));
// const lineTokens = tokenizeLine(grammar, line, ruleStack);
const lineTokens = grammar.tokenizeLine(line, ruleStack, 15000);
// vscode.window.showInformationMessage(JSON.stringify(lineTokens));
// console.log(`\nTokenizing line: ${line}`);
// for (let j = 0; j < lineTokens.tokens.length; j++) {
// const token = lineTokens.tokens[j];
// console.log(` - token from ${token.startIndex} to ${token.endIndex} ` +
// `(${line.substring(token.startIndex, token.endIndex)}) ` +
// `with scopes ${token.scopes.join(', ')}`
// );
// }
ruleStack = lineTokens.ruleStack;
tokenLineResult = lineTokens;
}
// vscode.window.showInformationMessage(stringifyMaxDepth(grammar._ruleId2desc, 6));
// @ts-ignore
// vscode.window.showInformationMessage(JSON.stringify((grammar as Grammar)._ruleId2desc, stringify));
// vscode.window.showInformationMessage(JSON.stringify(ruleStack, stringify));
return tokenLineResult;
}
exports.tokenizeLine = tokenizeLine;
async function tokenizeFile(document) {
const lang = document.languageId;
const scopeName = getScopeName(lang);
// const grammar = await registry.loadGrammar(scopeName);
const grammar = await registry.loadGrammar(scopeName);
// Very hacky, assigns array so `_tokenizeString()` can add rules to it
grammar.rules = [];
// const tokenLineResults: vscodeTextmate.ITokenizeLineResult[] = [];
let ruleStack = vscodeTextmate.INITIAL;
for (let i = 0; i < document.lineCount; i++) {
const line = document.lineAt(i).text;
const lineTokens = grammar.tokenizeLine(line, ruleStack, 15000);
// tokenLineResults.push(
// {
// tokens: lineTokens.tokens,
// ruleStack: structuredClone(lineTokens.ruleStack),
// stoppedEarly: lineTokens.stoppedEarly,
// }
// );
// one liner?
grammar.rules.pop();
grammar.rules.push(undefined);
ruleStack = lineTokens.ruleStack;
}
// vscode.window.showInformationMessage(JSON.stringify(registry, stringify));
vscode.window.showInformationMessage(JSON.stringify(grammar, extension_1.stringify));
// vscode.window.showInformationMessage(JSON.stringify(tokenLineResults, stringify));
// return tokenLineResults;
return grammar;
}
exports.tokenizeFile = tokenizeFile;
Loading

0 comments on commit 97fbc42

Please sign in to comment.