Merge commit 'be3e8236086165e5e45a5a10783823874b3f3ebd' as 'lib/vscode'
This commit is contained in:
@@ -0,0 +1,531 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as nls from 'vs/nls';
|
||||
import * as dom from 'vs/base/browser/dom';
|
||||
import { Color } from 'vs/base/common/color';
|
||||
import { onUnexpectedError } from 'vs/base/common/errors';
|
||||
import { Emitter, Event } from 'vs/base/common/event';
|
||||
import * as resources from 'vs/base/common/resources';
|
||||
import * as types from 'vs/base/common/types';
|
||||
import { equals as equalArray } from 'vs/base/common/arrays';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import { TokenizationResult, TokenizationResult2 } from 'vs/editor/common/core/token';
|
||||
import { IState, ITokenizationSupport, LanguageId, TokenMetadata, TokenizationRegistry, StandardTokenType, LanguageIdentifier } from 'vs/editor/common/modes';
|
||||
import { nullTokenize2 } from 'vs/editor/common/modes/nullMode';
|
||||
import { generateTokensCSSForColorMap } from 'vs/editor/common/modes/supports/tokenization';
|
||||
import { IModeService } from 'vs/editor/common/services/modeService';
|
||||
import { ILogService } from 'vs/platform/log/common/log';
|
||||
import { INotificationService, Severity } from 'vs/platform/notification/common/notification';
|
||||
import { IStorageService, StorageScope } from 'vs/platform/storage/common/storage';
|
||||
import { ExtensionMessageCollector } from 'vs/workbench/services/extensions/common/extensionsRegistry';
|
||||
import { ITMSyntaxExtensionPoint, grammarsExtPoint } from 'vs/workbench/services/textMate/common/TMGrammars';
|
||||
import { ITextMateService } from 'vs/workbench/services/textMate/common/textMateService';
|
||||
import { ITextMateThemingRule, IWorkbenchThemeService, IWorkbenchColorTheme } from 'vs/workbench/services/themes/common/workbenchThemeService';
|
||||
import type { IGrammar, StackElement, IOnigLib, IRawTheme } from 'vscode-textmate';
|
||||
import { Disposable, IDisposable, dispose } from 'vs/base/common/lifecycle';
|
||||
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
|
||||
import { IValidGrammarDefinition, IValidEmbeddedLanguagesMap, IValidTokenTypeMap } from 'vs/workbench/services/textMate/common/TMScopeRegistry';
|
||||
import { TMGrammarFactory } from 'vs/workbench/services/textMate/common/TMGrammarFactory';
|
||||
import { IExtensionResourceLoaderService } from 'vs/workbench/services/extensionResourceLoader/common/extensionResourceLoader';
|
||||
import { IProgressService, ProgressLocation } from 'vs/platform/progress/common/progress';
|
||||
|
||||
export abstract class AbstractTextMateService extends Disposable implements ITextMateService {
|
||||
public _serviceBrand: undefined;
|
||||
|
||||
private readonly _onDidEncounterLanguage: Emitter<LanguageId> = this._register(new Emitter<LanguageId>());
|
||||
public readonly onDidEncounterLanguage: Event<LanguageId> = this._onDidEncounterLanguage.event;
|
||||
|
||||
private readonly _styleElement: HTMLStyleElement;
|
||||
private readonly _createdModes: string[];
|
||||
private readonly _encounteredLanguages: boolean[];
|
||||
|
||||
private _debugMode: boolean;
|
||||
private _debugModePrintFunc: (str: string) => void;
|
||||
|
||||
private _grammarDefinitions: IValidGrammarDefinition[] | null;
|
||||
private _grammarFactory: TMGrammarFactory | null;
|
||||
private _tokenizersRegistrations: IDisposable[];
|
||||
protected _currentTheme: IRawTheme | null;
|
||||
protected _currentTokenColorMap: string[] | null;
|
||||
|
||||
constructor(
|
||||
@IModeService private readonly _modeService: IModeService,
|
||||
@IWorkbenchThemeService private readonly _themeService: IWorkbenchThemeService,
|
||||
@IExtensionResourceLoaderService protected readonly _extensionResourceLoaderService: IExtensionResourceLoaderService,
|
||||
@INotificationService private readonly _notificationService: INotificationService,
|
||||
@ILogService private readonly _logService: ILogService,
|
||||
@IConfigurationService private readonly _configurationService: IConfigurationService,
|
||||
@IStorageService private readonly _storageService: IStorageService,
|
||||
@IProgressService private readonly _progressService: IProgressService
|
||||
) {
|
||||
super();
|
||||
this._styleElement = dom.createStyleSheet();
|
||||
this._styleElement.className = 'vscode-tokens-styles';
|
||||
this._createdModes = [];
|
||||
this._encounteredLanguages = [];
|
||||
|
||||
this._debugMode = false;
|
||||
this._debugModePrintFunc = () => { };
|
||||
|
||||
this._grammarDefinitions = null;
|
||||
this._grammarFactory = null;
|
||||
this._tokenizersRegistrations = [];
|
||||
|
||||
this._currentTheme = null;
|
||||
this._currentTokenColorMap = null;
|
||||
|
||||
grammarsExtPoint.setHandler((extensions) => {
|
||||
this._grammarDefinitions = null;
|
||||
if (this._grammarFactory) {
|
||||
this._grammarFactory.dispose();
|
||||
this._grammarFactory = null;
|
||||
this._onDidDisposeGrammarFactory();
|
||||
}
|
||||
this._tokenizersRegistrations = dispose(this._tokenizersRegistrations);
|
||||
|
||||
this._grammarDefinitions = [];
|
||||
for (const extension of extensions) {
|
||||
const grammars = extension.value;
|
||||
for (const grammar of grammars) {
|
||||
if (!this._validateGrammarExtensionPoint(extension.description.extensionLocation, grammar, extension.collector)) {
|
||||
continue;
|
||||
}
|
||||
const grammarLocation = resources.joinPath(extension.description.extensionLocation, grammar.path);
|
||||
|
||||
const embeddedLanguages: IValidEmbeddedLanguagesMap = Object.create(null);
|
||||
if (grammar.embeddedLanguages) {
|
||||
let scopes = Object.keys(grammar.embeddedLanguages);
|
||||
for (let i = 0, len = scopes.length; i < len; i++) {
|
||||
let scope = scopes[i];
|
||||
let language = grammar.embeddedLanguages[scope];
|
||||
if (typeof language !== 'string') {
|
||||
// never hurts to be too careful
|
||||
continue;
|
||||
}
|
||||
let languageIdentifier = this._modeService.getLanguageIdentifier(language);
|
||||
if (languageIdentifier) {
|
||||
embeddedLanguages[scope] = languageIdentifier.id;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const tokenTypes: IValidTokenTypeMap = Object.create(null);
|
||||
if (grammar.tokenTypes) {
|
||||
const scopes = Object.keys(grammar.tokenTypes);
|
||||
for (const scope of scopes) {
|
||||
const tokenType = grammar.tokenTypes[scope];
|
||||
switch (tokenType) {
|
||||
case 'string':
|
||||
tokenTypes[scope] = StandardTokenType.String;
|
||||
break;
|
||||
case 'other':
|
||||
tokenTypes[scope] = StandardTokenType.Other;
|
||||
break;
|
||||
case 'comment':
|
||||
tokenTypes[scope] = StandardTokenType.Comment;
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let languageIdentifier: LanguageIdentifier | null = null;
|
||||
if (grammar.language) {
|
||||
languageIdentifier = this._modeService.getLanguageIdentifier(grammar.language);
|
||||
}
|
||||
|
||||
this._grammarDefinitions.push({
|
||||
location: grammarLocation,
|
||||
language: languageIdentifier ? languageIdentifier.id : undefined,
|
||||
scopeName: grammar.scopeName,
|
||||
embeddedLanguages: embeddedLanguages,
|
||||
tokenTypes: tokenTypes,
|
||||
injectTo: grammar.injectTo,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (const createMode of this._createdModes) {
|
||||
this._registerDefinitionIfAvailable(createMode);
|
||||
}
|
||||
});
|
||||
|
||||
this._register(this._themeService.onDidColorThemeChange(() => {
|
||||
if (this._grammarFactory) {
|
||||
this._updateTheme(this._grammarFactory, this._themeService.getColorTheme(), false);
|
||||
}
|
||||
}));
|
||||
|
||||
// Generate some color map until the grammar registry is loaded
|
||||
let colorTheme = this._themeService.getColorTheme();
|
||||
let defaultForeground: Color = Color.transparent;
|
||||
let defaultBackground: Color = Color.transparent;
|
||||
for (let i = 0, len = colorTheme.tokenColors.length; i < len; i++) {
|
||||
let rule = colorTheme.tokenColors[i];
|
||||
if (!rule.scope && rule.settings) {
|
||||
if (rule.settings.foreground) {
|
||||
defaultForeground = Color.fromHex(rule.settings.foreground);
|
||||
}
|
||||
if (rule.settings.background) {
|
||||
defaultBackground = Color.fromHex(rule.settings.background);
|
||||
}
|
||||
}
|
||||
}
|
||||
TokenizationRegistry.setColorMap([null!, defaultForeground, defaultBackground]);
|
||||
|
||||
this._modeService.onDidCreateMode((mode) => {
|
||||
let modeId = mode.getId();
|
||||
this._createdModes.push(modeId);
|
||||
this._registerDefinitionIfAvailable(modeId);
|
||||
});
|
||||
}
|
||||
|
||||
public startDebugMode(printFn: (str: string) => void, onStop: () => void): void {
|
||||
if (this._debugMode) {
|
||||
this._notificationService.error(nls.localize('alreadyDebugging', "Already Logging."));
|
||||
return;
|
||||
}
|
||||
|
||||
this._debugModePrintFunc = printFn;
|
||||
this._debugMode = true;
|
||||
|
||||
if (this._debugMode) {
|
||||
this._progressService.withProgress(
|
||||
{
|
||||
location: ProgressLocation.Notification,
|
||||
buttons: [nls.localize('stop', "Stop")]
|
||||
},
|
||||
(progress) => {
|
||||
progress.report({
|
||||
message: nls.localize('progress1', "Preparing to log TM Grammar parsing. Press Stop when finished.")
|
||||
});
|
||||
|
||||
return this._getVSCodeOniguruma().then((vscodeOniguruma) => {
|
||||
vscodeOniguruma.setDefaultDebugCall(true);
|
||||
progress.report({
|
||||
message: nls.localize('progress2', "Now logging TM Grammar parsing. Press Stop when finished.")
|
||||
});
|
||||
return new Promise<void>((resolve, reject) => { });
|
||||
});
|
||||
},
|
||||
(choice) => {
|
||||
this._getVSCodeOniguruma().then((vscodeOniguruma) => {
|
||||
this._debugModePrintFunc = () => { };
|
||||
this._debugMode = false;
|
||||
vscodeOniguruma.setDefaultDebugCall(false);
|
||||
onStop();
|
||||
});
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
private _canCreateGrammarFactory(): boolean {
|
||||
// Check if extension point is ready
|
||||
return (this._grammarDefinitions ? true : false);
|
||||
}
|
||||
|
||||
private async _getOrCreateGrammarFactory(): Promise<TMGrammarFactory> {
|
||||
if (this._grammarFactory) {
|
||||
return this._grammarFactory;
|
||||
}
|
||||
|
||||
const [vscodeTextmate, vscodeOniguruma] = await Promise.all([import('vscode-textmate'), this._getVSCodeOniguruma()]);
|
||||
const onigLib: Promise<IOnigLib> = Promise.resolve({
|
||||
createOnigScanner: (sources: string[]) => vscodeOniguruma.createOnigScanner(sources),
|
||||
createOnigString: (str: string) => vscodeOniguruma.createOnigString(str)
|
||||
});
|
||||
|
||||
// Avoid duplicate instantiations
|
||||
if (this._grammarFactory) {
|
||||
return this._grammarFactory;
|
||||
}
|
||||
|
||||
this._grammarFactory = new TMGrammarFactory({
|
||||
logTrace: (msg: string) => this._logService.trace(msg),
|
||||
logError: (msg: string, err: any) => this._logService.error(msg, err),
|
||||
readFile: (resource: URI) => this._extensionResourceLoaderService.readExtensionResource(resource)
|
||||
}, this._grammarDefinitions || [], vscodeTextmate, onigLib);
|
||||
this._onDidCreateGrammarFactory(this._grammarDefinitions || []);
|
||||
|
||||
this._updateTheme(this._grammarFactory, this._themeService.getColorTheme(), true);
|
||||
|
||||
return this._grammarFactory;
|
||||
}
|
||||
|
||||
private _registerDefinitionIfAvailable(modeId: string): void {
|
||||
const languageIdentifier = this._modeService.getLanguageIdentifier(modeId);
|
||||
if (!languageIdentifier) {
|
||||
return;
|
||||
}
|
||||
if (!this._canCreateGrammarFactory()) {
|
||||
return;
|
||||
}
|
||||
const languageId = languageIdentifier.id;
|
||||
|
||||
// Here we must register the promise ASAP (without yielding!)
|
||||
this._tokenizersRegistrations.push(TokenizationRegistry.registerPromise(modeId, (async () => {
|
||||
try {
|
||||
const grammarFactory = await this._getOrCreateGrammarFactory();
|
||||
if (!grammarFactory.has(languageId)) {
|
||||
return null;
|
||||
}
|
||||
const r = await grammarFactory.createGrammar(languageId);
|
||||
if (!r.grammar) {
|
||||
return null;
|
||||
}
|
||||
const tokenization = new TMTokenization(r.grammar, r.initialState, r.containsEmbeddedLanguages);
|
||||
tokenization.onDidEncounterLanguage((languageId) => {
|
||||
if (!this._encounteredLanguages[languageId]) {
|
||||
this._encounteredLanguages[languageId] = true;
|
||||
this._onDidEncounterLanguage.fire(languageId);
|
||||
}
|
||||
});
|
||||
return new TMTokenizationSupport(r.languageId, tokenization, this._notificationService, this._configurationService, this._storageService);
|
||||
} catch (err) {
|
||||
onUnexpectedError(err);
|
||||
return null;
|
||||
}
|
||||
})()));
|
||||
}
|
||||
|
||||
private static _toColorMap(colorMap: string[]): Color[] {
|
||||
let result: Color[] = [null!];
|
||||
for (let i = 1, len = colorMap.length; i < len; i++) {
|
||||
result[i] = Color.fromHex(colorMap[i]);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
private _updateTheme(grammarFactory: TMGrammarFactory, colorTheme: IWorkbenchColorTheme, forceUpdate: boolean): void {
|
||||
if (!forceUpdate && this._currentTheme && this._currentTokenColorMap && AbstractTextMateService.equalsTokenRules(this._currentTheme.settings, colorTheme.tokenColors) && equalArray(this._currentTokenColorMap, colorTheme.tokenColorMap)) {
|
||||
return;
|
||||
}
|
||||
this._currentTheme = { name: colorTheme.label, settings: colorTheme.tokenColors };
|
||||
this._currentTokenColorMap = colorTheme.tokenColorMap;
|
||||
this._doUpdateTheme(grammarFactory, this._currentTheme, this._currentTokenColorMap);
|
||||
}
|
||||
|
||||
protected _doUpdateTheme(grammarFactory: TMGrammarFactory, theme: IRawTheme, tokenColorMap: string[]): void {
|
||||
grammarFactory.setTheme(theme, tokenColorMap);
|
||||
let colorMap = AbstractTextMateService._toColorMap(tokenColorMap);
|
||||
let cssRules = generateTokensCSSForColorMap(colorMap);
|
||||
this._styleElement.textContent = cssRules;
|
||||
TokenizationRegistry.setColorMap(colorMap);
|
||||
}
|
||||
|
||||
private static equalsTokenRules(a: ITextMateThemingRule[] | null, b: ITextMateThemingRule[] | null): boolean {
|
||||
if (!b || !a || b.length !== a.length) {
|
||||
return false;
|
||||
}
|
||||
for (let i = b.length - 1; i >= 0; i--) {
|
||||
let r1 = b[i];
|
||||
let r2 = a[i];
|
||||
if (r1.scope !== r2.scope) {
|
||||
return false;
|
||||
}
|
||||
let s1 = r1.settings;
|
||||
let s2 = r2.settings;
|
||||
if (s1 && s2) {
|
||||
if (s1.fontStyle !== s2.fontStyle || s1.foreground !== s2.foreground || s1.background !== s2.background) {
|
||||
return false;
|
||||
}
|
||||
} else if (!s1 || !s2) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
private _validateGrammarExtensionPoint(extensionLocation: URI, syntax: ITMSyntaxExtensionPoint, collector: ExtensionMessageCollector): boolean {
|
||||
if (syntax.language && ((typeof syntax.language !== 'string') || !this._modeService.isRegisteredMode(syntax.language))) {
|
||||
collector.error(nls.localize('invalid.language', "Unknown language in `contributes.{0}.language`. Provided value: {1}", grammarsExtPoint.name, String(syntax.language)));
|
||||
return false;
|
||||
}
|
||||
if (!syntax.scopeName || (typeof syntax.scopeName !== 'string')) {
|
||||
collector.error(nls.localize('invalid.scopeName', "Expected string in `contributes.{0}.scopeName`. Provided value: {1}", grammarsExtPoint.name, String(syntax.scopeName)));
|
||||
return false;
|
||||
}
|
||||
if (!syntax.path || (typeof syntax.path !== 'string')) {
|
||||
collector.error(nls.localize('invalid.path.0', "Expected string in `contributes.{0}.path`. Provided value: {1}", grammarsExtPoint.name, String(syntax.path)));
|
||||
return false;
|
||||
}
|
||||
if (syntax.injectTo && (!Array.isArray(syntax.injectTo) || syntax.injectTo.some(scope => typeof scope !== 'string'))) {
|
||||
collector.error(nls.localize('invalid.injectTo', "Invalid value in `contributes.{0}.injectTo`. Must be an array of language scope names. Provided value: {1}", grammarsExtPoint.name, JSON.stringify(syntax.injectTo)));
|
||||
return false;
|
||||
}
|
||||
if (syntax.embeddedLanguages && !types.isObject(syntax.embeddedLanguages)) {
|
||||
collector.error(nls.localize('invalid.embeddedLanguages', "Invalid value in `contributes.{0}.embeddedLanguages`. Must be an object map from scope name to language. Provided value: {1}", grammarsExtPoint.name, JSON.stringify(syntax.embeddedLanguages)));
|
||||
return false;
|
||||
}
|
||||
|
||||
if (syntax.tokenTypes && !types.isObject(syntax.tokenTypes)) {
|
||||
collector.error(nls.localize('invalid.tokenTypes', "Invalid value in `contributes.{0}.tokenTypes`. Must be an object map from scope name to token type. Provided value: {1}", grammarsExtPoint.name, JSON.stringify(syntax.tokenTypes)));
|
||||
return false;
|
||||
}
|
||||
|
||||
const grammarLocation = resources.joinPath(extensionLocation, syntax.path);
|
||||
if (!resources.isEqualOrParent(grammarLocation, extensionLocation)) {
|
||||
collector.warn(nls.localize('invalid.path.1', "Expected `contributes.{0}.path` ({1}) to be included inside extension's folder ({2}). This might make the extension non-portable.", grammarsExtPoint.name, grammarLocation.path, extensionLocation.path));
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
public async createGrammar(modeId: string): Promise<IGrammar | null> {
|
||||
const languageId = this._modeService.getLanguageIdentifier(modeId);
|
||||
if (!languageId) {
|
||||
return null;
|
||||
}
|
||||
const grammarFactory = await this._getOrCreateGrammarFactory();
|
||||
if (!grammarFactory.has(languageId.id)) {
|
||||
return null;
|
||||
}
|
||||
const { grammar } = await grammarFactory.createGrammar(languageId.id);
|
||||
return grammar;
|
||||
}
|
||||
|
||||
protected _onDidCreateGrammarFactory(grammarDefinitions: IValidGrammarDefinition[]): void {
|
||||
}
|
||||
|
||||
protected _onDidDisposeGrammarFactory(): void {
|
||||
}
|
||||
|
||||
private _vscodeOniguruma: Promise<typeof import('vscode-oniguruma')> | null = null;
|
||||
private _getVSCodeOniguruma(): Promise<typeof import('vscode-oniguruma')> {
|
||||
if (!this._vscodeOniguruma) {
|
||||
this._vscodeOniguruma = this._doGetVSCodeOniguruma();
|
||||
}
|
||||
return this._vscodeOniguruma;
|
||||
}
|
||||
|
||||
private async _doGetVSCodeOniguruma(): Promise<typeof import('vscode-oniguruma')> {
|
||||
const [vscodeOniguruma, wasm] = await Promise.all([import('vscode-oniguruma'), this._loadVSCodeOnigurumWASM()]);
|
||||
const options = {
|
||||
data: wasm,
|
||||
print: (str: string) => {
|
||||
this._debugModePrintFunc(str);
|
||||
}
|
||||
};
|
||||
await vscodeOniguruma.loadWASM(options);
|
||||
return vscodeOniguruma;
|
||||
}
|
||||
|
||||
protected abstract _loadVSCodeOnigurumWASM(): Promise<Response | ArrayBuffer>;
|
||||
}
|
||||
|
||||
const donotAskUpdateKey = 'editor.maxTokenizationLineLength.donotask';
|
||||
|
||||
class TMTokenizationSupport implements ITokenizationSupport {
|
||||
private readonly _languageId: LanguageId;
|
||||
private readonly _actual: TMTokenization;
|
||||
private _tokenizationWarningAlreadyShown: boolean;
|
||||
private _maxTokenizationLineLength: number;
|
||||
|
||||
constructor(
|
||||
languageId: LanguageId,
|
||||
actual: TMTokenization,
|
||||
@INotificationService private readonly _notificationService: INotificationService,
|
||||
@IConfigurationService private readonly _configurationService: IConfigurationService,
|
||||
@IStorageService private readonly _storageService: IStorageService
|
||||
) {
|
||||
this._languageId = languageId;
|
||||
this._actual = actual;
|
||||
this._tokenizationWarningAlreadyShown = !!(this._storageService.getBoolean(donotAskUpdateKey, StorageScope.GLOBAL));
|
||||
this._maxTokenizationLineLength = this._configurationService.getValue<number>('editor.maxTokenizationLineLength');
|
||||
this._configurationService.onDidChangeConfiguration(e => {
|
||||
if (e.affectsConfiguration('editor.maxTokenizationLineLength')) {
|
||||
this._maxTokenizationLineLength = this._configurationService.getValue<number>('editor.maxTokenizationLineLength');
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
getInitialState(): IState {
|
||||
return this._actual.getInitialState();
|
||||
}
|
||||
|
||||
tokenize(line: string, state: IState, offsetDelta: number): TokenizationResult {
|
||||
throw new Error('Not supported!');
|
||||
}
|
||||
|
||||
tokenize2(line: string, state: StackElement, offsetDelta: number): TokenizationResult2 {
|
||||
if (offsetDelta !== 0) {
|
||||
throw new Error('Unexpected: offsetDelta should be 0.');
|
||||
}
|
||||
|
||||
// Do not attempt to tokenize if a line is too long
|
||||
if (line.length >= this._maxTokenizationLineLength) {
|
||||
if (!this._tokenizationWarningAlreadyShown) {
|
||||
this._tokenizationWarningAlreadyShown = true;
|
||||
this._notificationService.prompt(
|
||||
Severity.Warning,
|
||||
nls.localize('too many characters', "Tokenization is skipped for long lines for performance reasons. The length of a long line can be configured via `editor.maxTokenizationLineLength`."),
|
||||
[{
|
||||
label: nls.localize('neverAgain', "Don't Show Again"),
|
||||
isSecondary: true,
|
||||
run: () => this._storageService.store(donotAskUpdateKey, true, StorageScope.GLOBAL)
|
||||
}]
|
||||
);
|
||||
}
|
||||
console.log(`Line (${line.substr(0, 15)}...): longer than ${this._maxTokenizationLineLength} characters, tokenization skipped.`);
|
||||
return nullTokenize2(this._languageId, line, state, offsetDelta);
|
||||
}
|
||||
|
||||
return this._actual.tokenize2(line, state);
|
||||
}
|
||||
}
|
||||
|
||||
class TMTokenization extends Disposable {
|
||||
|
||||
private readonly _grammar: IGrammar;
|
||||
private readonly _containsEmbeddedLanguages: boolean;
|
||||
private readonly _seenLanguages: boolean[];
|
||||
private readonly _initialState: StackElement;
|
||||
|
||||
private readonly _onDidEncounterLanguage: Emitter<LanguageId> = this._register(new Emitter<LanguageId>());
|
||||
public readonly onDidEncounterLanguage: Event<LanguageId> = this._onDidEncounterLanguage.event;
|
||||
|
||||
constructor(grammar: IGrammar, initialState: StackElement, containsEmbeddedLanguages: boolean) {
|
||||
super();
|
||||
this._grammar = grammar;
|
||||
this._initialState = initialState;
|
||||
this._containsEmbeddedLanguages = containsEmbeddedLanguages;
|
||||
this._seenLanguages = [];
|
||||
}
|
||||
|
||||
public getInitialState(): IState {
|
||||
return this._initialState;
|
||||
}
|
||||
|
||||
public tokenize2(line: string, state: StackElement): TokenizationResult2 {
|
||||
let textMateResult = this._grammar.tokenizeLine2(line, state);
|
||||
|
||||
if (this._containsEmbeddedLanguages) {
|
||||
let seenLanguages = this._seenLanguages;
|
||||
let tokens = textMateResult.tokens;
|
||||
|
||||
// Must check if any of the embedded languages was hit
|
||||
for (let i = 0, len = (tokens.length >>> 1); i < len; i++) {
|
||||
let metadata = tokens[(i << 1) + 1];
|
||||
let languageId = TokenMetadata.getLanguageId(metadata);
|
||||
|
||||
if (!seenLanguages[languageId]) {
|
||||
seenLanguages[languageId] = true;
|
||||
this._onDidEncounterLanguage.fire(languageId);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let endState: StackElement;
|
||||
// try to save an object if possible
|
||||
if (state.equals(textMateResult.ruleStack)) {
|
||||
endState = state;
|
||||
} else {
|
||||
endState = textMateResult.ruleStack;
|
||||
|
||||
}
|
||||
|
||||
return new TokenizationResult2(textMateResult.tokens, endState);
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,43 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { ITextMateService } from 'vs/workbench/services/textMate/common/textMateService';
|
||||
import { registerSingleton } from 'vs/platform/instantiation/common/extensions';
|
||||
import { AbstractTextMateService } from 'vs/workbench/services/textMate/browser/abstractTextMateService';
|
||||
import { IModeService } from 'vs/editor/common/services/modeService';
|
||||
import { ILogService } from 'vs/platform/log/common/log';
|
||||
import { INotificationService } from 'vs/platform/notification/common/notification';
|
||||
import { IWorkbenchThemeService } from 'vs/workbench/services/themes/common/workbenchThemeService';
|
||||
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
|
||||
import { IStorageService } from 'vs/platform/storage/common/storage';
|
||||
import { IExtensionResourceLoaderService } from 'vs/workbench/services/extensionResourceLoader/common/extensionResourceLoader';
|
||||
import { IProgressService } from 'vs/platform/progress/common/progress';
|
||||
import { FileAccess } from 'vs/base/common/network';
|
||||
|
||||
export class TextMateService extends AbstractTextMateService {
|
||||
|
||||
constructor(
|
||||
@IModeService modeService: IModeService,
|
||||
@IWorkbenchThemeService themeService: IWorkbenchThemeService,
|
||||
@IExtensionResourceLoaderService extensionResourceLoaderService: IExtensionResourceLoaderService,
|
||||
@INotificationService notificationService: INotificationService,
|
||||
@ILogService logService: ILogService,
|
||||
@IConfigurationService configurationService: IConfigurationService,
|
||||
@IStorageService storageService: IStorageService,
|
||||
@IProgressService progressService: IProgressService
|
||||
) {
|
||||
super(modeService, themeService, extensionResourceLoaderService, notificationService, logService, configurationService, storageService, progressService);
|
||||
}
|
||||
|
||||
protected async _loadVSCodeOnigurumWASM(): Promise<Response | ArrayBuffer> {
|
||||
const response = await fetch(FileAccess.asBrowserUri('vscode-oniguruma/../onig.wasm', require).toString(true));
|
||||
// Using the response directly only works if the server sets the MIME type 'application/wasm'.
|
||||
// Otherwise, a TypeError is thrown when using the streaming compiler.
|
||||
// We therefore use the non-streaming compiler :(.
|
||||
return await response.arrayBuffer();
|
||||
}
|
||||
}
|
||||
|
||||
registerSingleton(ITextMateService, TextMateService);
|
||||
@@ -0,0 +1,147 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as nls from 'vs/nls';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import { LanguageId } from 'vs/editor/common/modes';
|
||||
import type { IGrammar, Registry, StackElement, IRawTheme, IOnigLib } from 'vscode-textmate';
|
||||
import { Disposable } from 'vs/base/common/lifecycle';
|
||||
import { TMScopeRegistry, IValidGrammarDefinition, IValidEmbeddedLanguagesMap } from 'vs/workbench/services/textMate/common/TMScopeRegistry';
|
||||
|
||||
interface ITMGrammarFactoryHost {
|
||||
logTrace(msg: string): void;
|
||||
logError(msg: string, err: any): void;
|
||||
readFile(resource: URI): Promise<string>;
|
||||
}
|
||||
|
||||
export interface ICreateGrammarResult {
|
||||
languageId: LanguageId;
|
||||
grammar: IGrammar | null;
|
||||
initialState: StackElement;
|
||||
containsEmbeddedLanguages: boolean;
|
||||
}
|
||||
|
||||
export class TMGrammarFactory extends Disposable {
|
||||
|
||||
private readonly _host: ITMGrammarFactoryHost;
|
||||
private readonly _initialState: StackElement;
|
||||
private readonly _scopeRegistry: TMScopeRegistry;
|
||||
private readonly _injections: { [scopeName: string]: string[]; };
|
||||
private readonly _injectedEmbeddedLanguages: { [scopeName: string]: IValidEmbeddedLanguagesMap[]; };
|
||||
private readonly _languageToScope2: string[];
|
||||
private readonly _grammarRegistry: Registry;
|
||||
|
||||
constructor(host: ITMGrammarFactoryHost, grammarDefinitions: IValidGrammarDefinition[], vscodeTextmate: typeof import('vscode-textmate'), onigLib: Promise<IOnigLib>) {
|
||||
super();
|
||||
this._host = host;
|
||||
this._initialState = vscodeTextmate.INITIAL;
|
||||
this._scopeRegistry = this._register(new TMScopeRegistry());
|
||||
this._injections = {};
|
||||
this._injectedEmbeddedLanguages = {};
|
||||
this._languageToScope2 = [];
|
||||
this._grammarRegistry = this._register(new vscodeTextmate.Registry({
|
||||
onigLib: onigLib,
|
||||
loadGrammar: async (scopeName: string) => {
|
||||
const grammarDefinition = this._scopeRegistry.getGrammarDefinition(scopeName);
|
||||
if (!grammarDefinition) {
|
||||
this._host.logTrace(`No grammar found for scope ${scopeName}`);
|
||||
return null;
|
||||
}
|
||||
const location = grammarDefinition.location;
|
||||
try {
|
||||
const content = await this._host.readFile(location);
|
||||
return vscodeTextmate.parseRawGrammar(content, location.path);
|
||||
} catch (e) {
|
||||
this._host.logError(`Unable to load and parse grammar for scope ${scopeName} from ${location}`, e);
|
||||
return null;
|
||||
}
|
||||
},
|
||||
getInjections: (scopeName: string) => {
|
||||
const scopeParts = scopeName.split('.');
|
||||
let injections: string[] = [];
|
||||
for (let i = 1; i <= scopeParts.length; i++) {
|
||||
const subScopeName = scopeParts.slice(0, i).join('.');
|
||||
injections = [...injections, ...(this._injections[subScopeName] || [])];
|
||||
}
|
||||
return injections;
|
||||
}
|
||||
}));
|
||||
|
||||
for (const validGrammar of grammarDefinitions) {
|
||||
this._scopeRegistry.register(validGrammar);
|
||||
|
||||
if (validGrammar.injectTo) {
|
||||
for (let injectScope of validGrammar.injectTo) {
|
||||
let injections = this._injections[injectScope];
|
||||
if (!injections) {
|
||||
this._injections[injectScope] = injections = [];
|
||||
}
|
||||
injections.push(validGrammar.scopeName);
|
||||
}
|
||||
|
||||
if (validGrammar.embeddedLanguages) {
|
||||
for (let injectScope of validGrammar.injectTo) {
|
||||
let injectedEmbeddedLanguages = this._injectedEmbeddedLanguages[injectScope];
|
||||
if (!injectedEmbeddedLanguages) {
|
||||
this._injectedEmbeddedLanguages[injectScope] = injectedEmbeddedLanguages = [];
|
||||
}
|
||||
injectedEmbeddedLanguages.push(validGrammar.embeddedLanguages);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (validGrammar.language) {
|
||||
this._languageToScope2[validGrammar.language] = validGrammar.scopeName;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public has(languageId: LanguageId): boolean {
|
||||
return this._languageToScope2[languageId] ? true : false;
|
||||
}
|
||||
|
||||
public setTheme(theme: IRawTheme, colorMap: string[]): void {
|
||||
this._grammarRegistry.setTheme(theme, colorMap);
|
||||
}
|
||||
|
||||
public getColorMap(): string[] {
|
||||
return this._grammarRegistry.getColorMap();
|
||||
}
|
||||
|
||||
public async createGrammar(languageId: LanguageId): Promise<ICreateGrammarResult> {
|
||||
const scopeName = this._languageToScope2[languageId];
|
||||
if (typeof scopeName !== 'string') {
|
||||
// No TM grammar defined
|
||||
return Promise.reject(new Error(nls.localize('no-tm-grammar', "No TM Grammar registered for this language.")));
|
||||
}
|
||||
|
||||
const grammarDefinition = this._scopeRegistry.getGrammarDefinition(scopeName);
|
||||
if (!grammarDefinition) {
|
||||
// No TM grammar defined
|
||||
return Promise.reject(new Error(nls.localize('no-tm-grammar', "No TM Grammar registered for this language.")));
|
||||
}
|
||||
|
||||
let embeddedLanguages = grammarDefinition.embeddedLanguages;
|
||||
if (this._injectedEmbeddedLanguages[scopeName]) {
|
||||
const injectedEmbeddedLanguages = this._injectedEmbeddedLanguages[scopeName];
|
||||
for (const injected of injectedEmbeddedLanguages) {
|
||||
for (const scope of Object.keys(injected)) {
|
||||
embeddedLanguages[scope] = injected[scope];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const containsEmbeddedLanguages = (Object.keys(embeddedLanguages).length > 0);
|
||||
|
||||
const grammar = await this._grammarRegistry.loadGrammarWithConfiguration(scopeName, languageId, { embeddedLanguages, tokenTypes: <any>grammarDefinition.tokenTypes });
|
||||
|
||||
return {
|
||||
languageId: languageId,
|
||||
grammar: grammar,
|
||||
initialState: this._initialState,
|
||||
containsEmbeddedLanguages: containsEmbeddedLanguages
|
||||
};
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,72 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as nls from 'vs/nls';
|
||||
import { ExtensionsRegistry, IExtensionPoint } from 'vs/workbench/services/extensions/common/extensionsRegistry';
|
||||
import { languagesExtPoint } from 'vs/workbench/services/mode/common/workbenchModeService';
|
||||
|
||||
export interface IEmbeddedLanguagesMap {
|
||||
[scopeName: string]: string;
|
||||
}
|
||||
|
||||
export interface TokenTypesContribution {
|
||||
[scopeName: string]: string;
|
||||
}
|
||||
|
||||
export interface ITMSyntaxExtensionPoint {
|
||||
language: string;
|
||||
scopeName: string;
|
||||
path: string;
|
||||
embeddedLanguages: IEmbeddedLanguagesMap;
|
||||
tokenTypes: TokenTypesContribution;
|
||||
injectTo: string[];
|
||||
}
|
||||
|
||||
export const grammarsExtPoint: IExtensionPoint<ITMSyntaxExtensionPoint[]> = ExtensionsRegistry.registerExtensionPoint<ITMSyntaxExtensionPoint[]>({
|
||||
extensionPoint: 'grammars',
|
||||
deps: [languagesExtPoint],
|
||||
jsonSchema: {
|
||||
description: nls.localize('vscode.extension.contributes.grammars', 'Contributes textmate tokenizers.'),
|
||||
type: 'array',
|
||||
defaultSnippets: [{ body: [{ language: '${1:id}', scopeName: 'source.${2:id}', path: './syntaxes/${3:id}.tmLanguage.' }] }],
|
||||
items: {
|
||||
type: 'object',
|
||||
defaultSnippets: [{ body: { language: '${1:id}', scopeName: 'source.${2:id}', path: './syntaxes/${3:id}.tmLanguage.' } }],
|
||||
properties: {
|
||||
language: {
|
||||
description: nls.localize('vscode.extension.contributes.grammars.language', 'Language identifier for which this syntax is contributed to.'),
|
||||
type: 'string'
|
||||
},
|
||||
scopeName: {
|
||||
description: nls.localize('vscode.extension.contributes.grammars.scopeName', 'Textmate scope name used by the tmLanguage file.'),
|
||||
type: 'string'
|
||||
},
|
||||
path: {
|
||||
description: nls.localize('vscode.extension.contributes.grammars.path', 'Path of the tmLanguage file. The path is relative to the extension folder and typically starts with \'./syntaxes/\'.'),
|
||||
type: 'string'
|
||||
},
|
||||
embeddedLanguages: {
|
||||
description: nls.localize('vscode.extension.contributes.grammars.embeddedLanguages', 'A map of scope name to language id if this grammar contains embedded languages.'),
|
||||
type: 'object'
|
||||
},
|
||||
tokenTypes: {
|
||||
description: nls.localize('vscode.extension.contributes.grammars.tokenTypes', 'A map of scope name to token types.'),
|
||||
type: 'object',
|
||||
additionalProperties: {
|
||||
enum: ['string', 'comment', 'other']
|
||||
}
|
||||
},
|
||||
injectTo: {
|
||||
description: nls.localize('vscode.extension.contributes.grammars.injectTo', 'List of language scope names to which this grammar is injected to.'),
|
||||
type: 'array',
|
||||
items: {
|
||||
type: 'string'
|
||||
}
|
||||
}
|
||||
},
|
||||
required: ['scopeName', 'path']
|
||||
}
|
||||
}
|
||||
});
|
||||
149
lib/vscode/src/vs/workbench/services/textMate/common/TMHelper.ts
Normal file
149
lib/vscode/src/vs/workbench/services/textMate/common/TMHelper.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
export interface IColorTheme {
|
||||
readonly tokenColors: ITokenColorizationRule[];
|
||||
}
|
||||
|
||||
export interface ITokenColorizationRule {
|
||||
name?: string;
|
||||
scope?: string | string[];
|
||||
settings: ITokenColorizationSetting;
|
||||
}
|
||||
|
||||
export interface ITokenColorizationSetting {
|
||||
foreground?: string;
|
||||
background?: string;
|
||||
fontStyle?: string; // italic, underline, bold
|
||||
}
|
||||
|
||||
export function findMatchingThemeRule(theme: IColorTheme, scopes: string[], onlyColorRules: boolean = true): ThemeRule | null {
|
||||
for (let i = scopes.length - 1; i >= 0; i--) {
|
||||
let parentScopes = scopes.slice(0, i);
|
||||
let scope = scopes[i];
|
||||
let r = findMatchingThemeRule2(theme, scope, parentScopes, onlyColorRules);
|
||||
if (r) {
|
||||
return r;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function findMatchingThemeRule2(theme: IColorTheme, scope: string, parentScopes: string[], onlyColorRules: boolean): ThemeRule | null {
|
||||
let result: ThemeRule | null = null;
|
||||
|
||||
// Loop backwards, to ensure the last most specific rule wins
|
||||
for (let i = theme.tokenColors.length - 1; i >= 0; i--) {
|
||||
let rule = theme.tokenColors[i];
|
||||
if (onlyColorRules && !rule.settings.foreground) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let selectors: string[];
|
||||
if (typeof rule.scope === 'string') {
|
||||
selectors = rule.scope.split(/,/).map(scope => scope.trim());
|
||||
} else if (Array.isArray(rule.scope)) {
|
||||
selectors = rule.scope;
|
||||
} else {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (let j = 0, lenJ = selectors.length; j < lenJ; j++) {
|
||||
let rawSelector = selectors[j];
|
||||
|
||||
let themeRule = new ThemeRule(rawSelector, rule.settings);
|
||||
if (themeRule.matches(scope, parentScopes)) {
|
||||
if (themeRule.isMoreSpecific(result)) {
|
||||
result = themeRule;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export class ThemeRule {
|
||||
readonly rawSelector: string;
|
||||
readonly settings: ITokenColorizationSetting;
|
||||
readonly scope: string;
|
||||
readonly parentScopes: string[];
|
||||
|
||||
constructor(rawSelector: string, settings: ITokenColorizationSetting) {
|
||||
this.rawSelector = rawSelector;
|
||||
this.settings = settings;
|
||||
let rawSelectorPieces = this.rawSelector.split(/ /);
|
||||
this.scope = rawSelectorPieces[rawSelectorPieces.length - 1];
|
||||
this.parentScopes = rawSelectorPieces.slice(0, rawSelectorPieces.length - 1);
|
||||
}
|
||||
|
||||
public matches(scope: string, parentScopes: string[]): boolean {
|
||||
return ThemeRule._matches(this.scope, this.parentScopes, scope, parentScopes);
|
||||
}
|
||||
|
||||
private static _cmp(a: ThemeRule | null, b: ThemeRule | null): number {
|
||||
if (a === null && b === null) {
|
||||
return 0;
|
||||
}
|
||||
if (a === null) {
|
||||
// b > a
|
||||
return -1;
|
||||
}
|
||||
if (b === null) {
|
||||
// a > b
|
||||
return 1;
|
||||
}
|
||||
if (a.scope.length !== b.scope.length) {
|
||||
// longer scope length > shorter scope length
|
||||
return a.scope.length - b.scope.length;
|
||||
}
|
||||
const aParentScopesLen = a.parentScopes.length;
|
||||
const bParentScopesLen = b.parentScopes.length;
|
||||
if (aParentScopesLen !== bParentScopesLen) {
|
||||
// more parents > less parents
|
||||
return aParentScopesLen - bParentScopesLen;
|
||||
}
|
||||
for (let i = 0; i < aParentScopesLen; i++) {
|
||||
const aLen = a.parentScopes[i].length;
|
||||
const bLen = b.parentScopes[i].length;
|
||||
if (aLen !== bLen) {
|
||||
return aLen - bLen;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
public isMoreSpecific(other: ThemeRule | null): boolean {
|
||||
return (ThemeRule._cmp(this, other) > 0);
|
||||
}
|
||||
|
||||
private static _matchesOne(selectorScope: string, scope: string): boolean {
|
||||
let selectorPrefix = selectorScope + '.';
|
||||
if (selectorScope === scope || scope.substring(0, selectorPrefix.length) === selectorPrefix) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static _matches(selectorScope: string, selectorParentScopes: string[], scope: string, parentScopes: string[]): boolean {
|
||||
if (!this._matchesOne(selectorScope, scope)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
let selectorParentIndex = selectorParentScopes.length - 1;
|
||||
let parentIndex = parentScopes.length - 1;
|
||||
while (selectorParentIndex >= 0 && parentIndex >= 0) {
|
||||
if (this._matchesOne(selectorParentScopes[selectorParentIndex], parentScopes[parentIndex])) {
|
||||
selectorParentIndex--;
|
||||
}
|
||||
parentIndex--;
|
||||
}
|
||||
|
||||
if (selectorParentIndex === -1) {
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as resources from 'vs/base/common/resources';
|
||||
import { URI } from 'vs/base/common/uri';
|
||||
import { Disposable } from 'vs/base/common/lifecycle';
|
||||
import { StandardTokenType, LanguageId } from 'vs/editor/common/modes';
|
||||
|
||||
export interface IValidGrammarDefinition {
|
||||
location: URI;
|
||||
language?: LanguageId;
|
||||
scopeName: string;
|
||||
embeddedLanguages: IValidEmbeddedLanguagesMap;
|
||||
tokenTypes: IValidTokenTypeMap;
|
||||
injectTo?: string[];
|
||||
}
|
||||
|
||||
export interface IValidTokenTypeMap {
|
||||
[selector: string]: StandardTokenType;
|
||||
}
|
||||
|
||||
export interface IValidEmbeddedLanguagesMap {
|
||||
[scopeName: string]: LanguageId;
|
||||
}
|
||||
|
||||
export class TMScopeRegistry extends Disposable {
|
||||
|
||||
private _scopeNameToLanguageRegistration: { [scopeName: string]: IValidGrammarDefinition; };
|
||||
|
||||
constructor() {
|
||||
super();
|
||||
this._scopeNameToLanguageRegistration = Object.create(null);
|
||||
}
|
||||
|
||||
public reset(): void {
|
||||
this._scopeNameToLanguageRegistration = Object.create(null);
|
||||
}
|
||||
|
||||
public register(def: IValidGrammarDefinition): void {
|
||||
if (this._scopeNameToLanguageRegistration[def.scopeName]) {
|
||||
const existingRegistration = this._scopeNameToLanguageRegistration[def.scopeName];
|
||||
if (!resources.isEqual(existingRegistration.location, def.location)) {
|
||||
console.warn(
|
||||
`Overwriting grammar scope name to file mapping for scope ${def.scopeName}.\n` +
|
||||
`Old grammar file: ${existingRegistration.location.toString()}.\n` +
|
||||
`New grammar file: ${def.location.toString()}`
|
||||
);
|
||||
}
|
||||
}
|
||||
this._scopeNameToLanguageRegistration[def.scopeName] = def;
|
||||
}
|
||||
|
||||
public getGrammarDefinition(scopeName: string): IValidGrammarDefinition | null {
|
||||
return this._scopeNameToLanguageRegistration[scopeName] || null;
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,48 @@
|
||||
{
|
||||
"registrations": [
|
||||
{
|
||||
"component": {
|
||||
"type": "other",
|
||||
"other": {
|
||||
"name": "lib-oniguruma",
|
||||
"downloadUrl": "https://github.com/kkos/oniguruma",
|
||||
"version": "6.9.5_rev1"
|
||||
}
|
||||
},
|
||||
"licenseDetail": [
|
||||
"Oniguruma LICENSE",
|
||||
"-----------------",
|
||||
"",
|
||||
"Copyright (c) 2002-2020 K.Kosako <kkosako0@gmail.com>",
|
||||
"All rights reserved.",
|
||||
"",
|
||||
"The BSD License",
|
||||
"",
|
||||
"Redistribution and use in source and binary forms, with or without",
|
||||
"modification, are permitted provided that the following conditions",
|
||||
"are met:",
|
||||
"1. Redistributions of source code must retain the above copyright",
|
||||
" notice, this list of conditions and the following disclaimer.",
|
||||
"2. Redistributions in binary form must reproduce the above copyright",
|
||||
" notice, this list of conditions and the following disclaimer in the",
|
||||
" documentation and/or other materials provided with the distribution.",
|
||||
"",
|
||||
"THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND",
|
||||
"ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE",
|
||||
"IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE",
|
||||
"ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE",
|
||||
"FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL",
|
||||
"DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS",
|
||||
"OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)",
|
||||
"HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT",
|
||||
"LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY",
|
||||
"OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF",
|
||||
"SUCH DAMAGE."
|
||||
],
|
||||
"isOnlyProductionDependency": true,
|
||||
"license": "BSD",
|
||||
"version": "6.9.5_rev1"
|
||||
}
|
||||
],
|
||||
"version": 1
|
||||
}
|
||||
@@ -0,0 +1,116 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { Event } from 'vs/base/common/event';
|
||||
import { LanguageId } from 'vs/editor/common/modes';
|
||||
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
|
||||
|
||||
export const ITextMateService = createDecorator<ITextMateService>('textMateService');
|
||||
|
||||
export interface ITextMateService {
|
||||
readonly _serviceBrand: undefined;
|
||||
|
||||
onDidEncounterLanguage: Event<LanguageId>;
|
||||
|
||||
createGrammar(modeId: string): Promise<IGrammar | null>;
|
||||
|
||||
startDebugMode(printFn: (str: string) => void, onStop: () => void): void;
|
||||
}
|
||||
|
||||
// -------------- Types "liberated" from vscode-textmate due to usage in /common/
|
||||
|
||||
export const enum StandardTokenType {
|
||||
Other = 0,
|
||||
Comment = 1,
|
||||
String = 2,
|
||||
RegEx = 4,
|
||||
}
|
||||
/**
|
||||
* A grammar
|
||||
*/
|
||||
export interface IGrammar {
|
||||
/**
|
||||
* Tokenize `lineText` using previous line state `prevState`.
|
||||
*/
|
||||
tokenizeLine(lineText: string, prevState: StackElement | null): ITokenizeLineResult;
|
||||
/**
|
||||
* Tokenize `lineText` using previous line state `prevState`.
|
||||
* The result contains the tokens in binary format, resolved with the following information:
|
||||
* - language
|
||||
* - token type (regex, string, comment, other)
|
||||
* - font style
|
||||
* - foreground color
|
||||
* - background color
|
||||
* e.g. for getting the languageId: `(metadata & MetadataConsts.LANGUAGEID_MASK) >>> MetadataConsts.LANGUAGEID_OFFSET`
|
||||
*/
|
||||
tokenizeLine2(lineText: string, prevState: StackElement | null): ITokenizeLineResult2;
|
||||
}
|
||||
export interface ITokenizeLineResult {
|
||||
readonly tokens: IToken[];
|
||||
/**
|
||||
* The `prevState` to be passed on to the next line tokenization.
|
||||
*/
|
||||
readonly ruleStack: StackElement;
|
||||
}
|
||||
/**
|
||||
* Helpers to manage the "collapsed" metadata of an entire StackElement stack.
|
||||
* The following assumptions have been made:
|
||||
* - languageId < 256 => needs 8 bits
|
||||
* - unique color count < 512 => needs 9 bits
|
||||
*
|
||||
* The binary format is:
|
||||
* - -------------------------------------------
|
||||
* 3322 2222 2222 1111 1111 1100 0000 0000
|
||||
* 1098 7654 3210 9876 5432 1098 7654 3210
|
||||
* - -------------------------------------------
|
||||
* xxxx xxxx xxxx xxxx xxxx xxxx xxxx xxxx
|
||||
* bbbb bbbb bfff ffff ffFF FTTT LLLL LLLL
|
||||
* - -------------------------------------------
|
||||
* - L = LanguageId (8 bits)
|
||||
* - T = StandardTokenType (3 bits)
|
||||
* - F = FontStyle (3 bits)
|
||||
* - f = foreground color (9 bits)
|
||||
* - b = background color (9 bits)
|
||||
*/
|
||||
export const enum MetadataConsts {
|
||||
LANGUAGEID_MASK = 255,
|
||||
TOKEN_TYPE_MASK = 1792,
|
||||
FONT_STYLE_MASK = 14336,
|
||||
FOREGROUND_MASK = 8372224,
|
||||
BACKGROUND_MASK = 4286578688,
|
||||
LANGUAGEID_OFFSET = 0,
|
||||
TOKEN_TYPE_OFFSET = 8,
|
||||
FONT_STYLE_OFFSET = 11,
|
||||
FOREGROUND_OFFSET = 14,
|
||||
BACKGROUND_OFFSET = 23,
|
||||
}
|
||||
export interface ITokenizeLineResult2 {
|
||||
/**
|
||||
* The tokens in binary format. Each token occupies two array indices. For token i:
|
||||
* - at offset 2*i => startIndex
|
||||
* - at offset 2*i + 1 => metadata
|
||||
*
|
||||
*/
|
||||
readonly tokens: Uint32Array;
|
||||
/**
|
||||
* The `prevState` to be passed on to the next line tokenization.
|
||||
*/
|
||||
readonly ruleStack: StackElement;
|
||||
}
|
||||
export interface IToken {
|
||||
startIndex: number;
|
||||
readonly endIndex: number;
|
||||
readonly scopes: string[];
|
||||
}
|
||||
/**
|
||||
* **IMPORTANT** - Immutable!
|
||||
*/
|
||||
export interface StackElement {
|
||||
_stackElementBrand: void;
|
||||
readonly depth: number;
|
||||
clone(): StackElement;
|
||||
equals(other: StackElement): boolean;
|
||||
}
|
||||
// -------------- End Types "liberated" from vscode-textmate due to usage in /common/
|
||||
@@ -0,0 +1,254 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { ITextMateService } from 'vs/workbench/services/textMate/common/textMateService';
|
||||
import { registerSingleton } from 'vs/platform/instantiation/common/extensions';
|
||||
import { AbstractTextMateService } from 'vs/workbench/services/textMate/browser/abstractTextMateService';
|
||||
import { IModeService } from 'vs/editor/common/services/modeService';
|
||||
import { IWorkbenchThemeService } from 'vs/workbench/services/themes/common/workbenchThemeService';
|
||||
import { INotificationService } from 'vs/platform/notification/common/notification';
|
||||
import { ILogService } from 'vs/platform/log/common/log';
|
||||
import { IConfigurationService } from 'vs/platform/configuration/common/configuration';
|
||||
import { createWebWorker, MonacoWebWorker } from 'vs/editor/common/services/webWorker';
|
||||
import { IModelService } from 'vs/editor/common/services/modelService';
|
||||
import type { IRawTheme } from 'vscode-textmate';
|
||||
import { IValidGrammarDefinition } from 'vs/workbench/services/textMate/common/TMScopeRegistry';
|
||||
import { TextMateWorker } from 'vs/workbench/services/textMate/electron-sandbox/textMateWorker';
|
||||
import { ITextModel } from 'vs/editor/common/model';
|
||||
import { Disposable } from 'vs/base/common/lifecycle';
|
||||
import { UriComponents, URI } from 'vs/base/common/uri';
|
||||
import { MultilineTokensBuilder } from 'vs/editor/common/model/tokensStore';
|
||||
import { TMGrammarFactory } from 'vs/workbench/services/textMate/common/TMGrammarFactory';
|
||||
import { IModelContentChangedEvent } from 'vs/editor/common/model/textModelEvents';
|
||||
import { IStorageService } from 'vs/platform/storage/common/storage';
|
||||
import { IExtensionResourceLoaderService } from 'vs/workbench/services/extensionResourceLoader/common/extensionResourceLoader';
|
||||
import { IWorkbenchEnvironmentService } from 'vs/workbench/services/environment/common/environmentService';
|
||||
import { IProgressService } from 'vs/platform/progress/common/progress';
|
||||
import { FileAccess } from 'vs/base/common/network';
|
||||
|
||||
const RUN_TEXTMATE_IN_WORKER = false;
|
||||
|
||||
class ModelWorkerTextMateTokenizer extends Disposable {
|
||||
|
||||
private readonly _worker: TextMateWorker;
|
||||
private readonly _model: ITextModel;
|
||||
private _isSynced: boolean;
|
||||
private _pendingChanges: IModelContentChangedEvent[] = [];
|
||||
|
||||
constructor(worker: TextMateWorker, model: ITextModel) {
|
||||
super();
|
||||
this._worker = worker;
|
||||
this._model = model;
|
||||
this._isSynced = false;
|
||||
|
||||
this._register(this._model.onDidChangeAttached(() => this._onDidChangeAttached()));
|
||||
this._onDidChangeAttached();
|
||||
|
||||
this._register(this._model.onDidChangeContent((e) => {
|
||||
if (this._isSynced) {
|
||||
this._worker.acceptModelChanged(this._model.uri.toString(), e);
|
||||
this._pendingChanges.push(e);
|
||||
}
|
||||
}));
|
||||
|
||||
this._register(this._model.onDidChangeLanguage((e) => {
|
||||
if (this._isSynced) {
|
||||
this._worker.acceptModelLanguageChanged(this._model.uri.toString(), this._model.getLanguageIdentifier().id);
|
||||
}
|
||||
}));
|
||||
}
|
||||
|
||||
private _onDidChangeAttached(): void {
|
||||
if (this._model.isAttachedToEditor()) {
|
||||
if (!this._isSynced) {
|
||||
this._beginSync();
|
||||
}
|
||||
} else {
|
||||
if (this._isSynced) {
|
||||
this._endSync();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private _beginSync(): void {
|
||||
this._isSynced = true;
|
||||
this._worker.acceptNewModel({
|
||||
uri: this._model.uri,
|
||||
versionId: this._model.getVersionId(),
|
||||
lines: this._model.getLinesContent(),
|
||||
EOL: this._model.getEOL(),
|
||||
languageId: this._model.getLanguageIdentifier().id,
|
||||
});
|
||||
}
|
||||
|
||||
private _endSync(): void {
|
||||
this._isSynced = false;
|
||||
this._worker.acceptRemovedModel(this._model.uri.toString());
|
||||
}
|
||||
|
||||
public dispose() {
|
||||
super.dispose();
|
||||
this._endSync();
|
||||
}
|
||||
|
||||
private _confirm(versionId: number): void {
|
||||
while (this._pendingChanges.length > 0 && this._pendingChanges[0].versionId <= versionId) {
|
||||
this._pendingChanges.shift();
|
||||
}
|
||||
}
|
||||
|
||||
public setTokens(versionId: number, rawTokens: ArrayBuffer): void {
|
||||
this._confirm(versionId);
|
||||
const tokens = MultilineTokensBuilder.deserialize(new Uint8Array(rawTokens));
|
||||
|
||||
for (let i = 0; i < this._pendingChanges.length; i++) {
|
||||
const change = this._pendingChanges[i];
|
||||
for (let j = 0; j < tokens.length; j++) {
|
||||
for (let k = 0; k < change.changes.length; k++) {
|
||||
tokens[j].applyEdit(change.changes[k].range, change.changes[k].text);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
this._model.setTokens(tokens);
|
||||
}
|
||||
}
|
||||
|
||||
export class TextMateWorkerHost {
|
||||
|
||||
constructor(
|
||||
private readonly textMateService: TextMateService,
|
||||
@IExtensionResourceLoaderService private readonly _extensionResourceLoaderService: IExtensionResourceLoaderService
|
||||
) {
|
||||
}
|
||||
|
||||
async readFile(_resource: UriComponents): Promise<string> {
|
||||
const resource = URI.revive(_resource);
|
||||
return this._extensionResourceLoaderService.readExtensionResource(resource);
|
||||
}
|
||||
|
||||
async setTokens(_resource: UriComponents, versionId: number, tokens: Uint8Array): Promise<void> {
|
||||
const resource = URI.revive(_resource);
|
||||
this.textMateService.setTokens(resource, versionId, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
export class TextMateService extends AbstractTextMateService {
|
||||
|
||||
private _worker: MonacoWebWorker<TextMateWorker> | null;
|
||||
private _workerProxy: TextMateWorker | null;
|
||||
private _tokenizers: { [uri: string]: ModelWorkerTextMateTokenizer; };
|
||||
|
||||
constructor(
|
||||
@IModeService modeService: IModeService,
|
||||
@IWorkbenchThemeService themeService: IWorkbenchThemeService,
|
||||
@IExtensionResourceLoaderService extensionResourceLoaderService: IExtensionResourceLoaderService,
|
||||
@INotificationService notificationService: INotificationService,
|
||||
@ILogService logService: ILogService,
|
||||
@IConfigurationService configurationService: IConfigurationService,
|
||||
@IStorageService storageService: IStorageService,
|
||||
@IProgressService progressService: IProgressService,
|
||||
@IModelService private readonly _modelService: IModelService,
|
||||
@IWorkbenchEnvironmentService private readonly _environmentService: IWorkbenchEnvironmentService,
|
||||
) {
|
||||
super(modeService, themeService, extensionResourceLoaderService, notificationService, logService, configurationService, storageService, progressService);
|
||||
this._worker = null;
|
||||
this._workerProxy = null;
|
||||
this._tokenizers = Object.create(null);
|
||||
this._register(this._modelService.onModelAdded(model => this._onModelAdded(model)));
|
||||
this._register(this._modelService.onModelRemoved(model => this._onModelRemoved(model)));
|
||||
this._modelService.getModels().forEach((model) => this._onModelAdded(model));
|
||||
}
|
||||
|
||||
private _onModelAdded(model: ITextModel): void {
|
||||
if (!this._workerProxy) {
|
||||
return;
|
||||
}
|
||||
if (model.isTooLargeForSyncing()) {
|
||||
return;
|
||||
}
|
||||
const key = model.uri.toString();
|
||||
const tokenizer = new ModelWorkerTextMateTokenizer(this._workerProxy, model);
|
||||
this._tokenizers[key] = tokenizer;
|
||||
}
|
||||
|
||||
private _onModelRemoved(model: ITextModel): void {
|
||||
const key = model.uri.toString();
|
||||
if (this._tokenizers[key]) {
|
||||
this._tokenizers[key].dispose();
|
||||
delete this._tokenizers[key];
|
||||
}
|
||||
}
|
||||
|
||||
protected async _loadVSCodeOnigurumWASM(): Promise<Response | ArrayBuffer> {
|
||||
const response = await fetch(this._environmentService.isBuilt
|
||||
? FileAccess.asBrowserUri('../../../../../../node_modules.asar.unpacked/vscode-oniguruma/release/onig.wasm', require).toString(true)
|
||||
: FileAccess.asBrowserUri('../../../../../../node_modules/vscode-oniguruma/release/onig.wasm', require).toString(true));
|
||||
return response;
|
||||
}
|
||||
|
||||
protected _onDidCreateGrammarFactory(grammarDefinitions: IValidGrammarDefinition[]): void {
|
||||
this._killWorker();
|
||||
|
||||
if (RUN_TEXTMATE_IN_WORKER) {
|
||||
const workerHost = new TextMateWorkerHost(this, this._extensionResourceLoaderService);
|
||||
const worker = createWebWorker<TextMateWorker>(this._modelService, {
|
||||
createData: {
|
||||
grammarDefinitions
|
||||
},
|
||||
label: 'textMateWorker',
|
||||
moduleId: 'vs/workbench/services/textMate/electron-browser/textMateWorker',
|
||||
host: workerHost
|
||||
});
|
||||
|
||||
this._worker = worker;
|
||||
worker.getProxy().then((proxy) => {
|
||||
if (this._worker !== worker) {
|
||||
// disposed in the meantime
|
||||
return;
|
||||
}
|
||||
this._workerProxy = proxy;
|
||||
if (this._currentTheme && this._currentTokenColorMap) {
|
||||
this._workerProxy.acceptTheme(this._currentTheme, this._currentTokenColorMap);
|
||||
}
|
||||
this._modelService.getModels().forEach((model) => this._onModelAdded(model));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
protected _doUpdateTheme(grammarFactory: TMGrammarFactory, theme: IRawTheme, colorMap: string[]): void {
|
||||
super._doUpdateTheme(grammarFactory, theme, colorMap);
|
||||
if (this._currentTheme && this._currentTokenColorMap && this._workerProxy) {
|
||||
this._workerProxy.acceptTheme(this._currentTheme, this._currentTokenColorMap);
|
||||
}
|
||||
}
|
||||
|
||||
protected _onDidDisposeGrammarFactory(): void {
|
||||
this._killWorker();
|
||||
}
|
||||
|
||||
private _killWorker(): void {
|
||||
for (let key of Object.keys(this._tokenizers)) {
|
||||
this._tokenizers[key].dispose();
|
||||
}
|
||||
this._tokenizers = Object.create(null);
|
||||
|
||||
if (this._worker) {
|
||||
this._worker.dispose();
|
||||
this._worker = null;
|
||||
}
|
||||
this._workerProxy = null;
|
||||
}
|
||||
|
||||
setTokens(resource: URI, versionId: number, tokens: ArrayBuffer): void {
|
||||
const key = resource.toString();
|
||||
if (!this._tokenizers[key]) {
|
||||
return;
|
||||
}
|
||||
this._tokenizers[key].setTokens(versionId, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
registerSingleton(ITextMateService, TextMateService);
|
||||
@@ -0,0 +1,215 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import { IWorkerContext } from 'vs/editor/common/services/editorSimpleWorker';
|
||||
import { UriComponents, URI } from 'vs/base/common/uri';
|
||||
import { LanguageId } from 'vs/editor/common/modes';
|
||||
import { IValidEmbeddedLanguagesMap, IValidTokenTypeMap, IValidGrammarDefinition } from 'vs/workbench/services/textMate/common/TMScopeRegistry';
|
||||
import { TMGrammarFactory, ICreateGrammarResult } from 'vs/workbench/services/textMate/common/TMGrammarFactory';
|
||||
import { IModelChangedEvent, MirrorTextModel } from 'vs/editor/common/model/mirrorTextModel';
|
||||
import { TextMateWorkerHost } from 'vs/workbench/services/textMate/electron-sandbox/textMateService';
|
||||
import { TokenizationStateStore } from 'vs/editor/common/model/textModelTokens';
|
||||
import type { IGrammar, StackElement, IRawTheme, IOnigLib } from 'vscode-textmate';
|
||||
import { MultilineTokensBuilder, countEOL } from 'vs/editor/common/model/tokensStore';
|
||||
import { LineTokens } from 'vs/editor/common/core/lineTokens';
|
||||
import { FileAccess } from 'vs/base/common/network';
|
||||
|
||||
export interface IValidGrammarDefinitionDTO {
|
||||
location: UriComponents;
|
||||
language?: LanguageId;
|
||||
scopeName: string;
|
||||
embeddedLanguages: IValidEmbeddedLanguagesMap;
|
||||
tokenTypes: IValidTokenTypeMap;
|
||||
injectTo?: string[];
|
||||
}
|
||||
|
||||
export interface ICreateData {
|
||||
grammarDefinitions: IValidGrammarDefinitionDTO[];
|
||||
}
|
||||
|
||||
export interface IRawModelData {
|
||||
uri: UriComponents;
|
||||
versionId: number;
|
||||
lines: string[];
|
||||
EOL: string;
|
||||
languageId: LanguageId;
|
||||
}
|
||||
|
||||
class TextMateWorkerModel extends MirrorTextModel {
|
||||
|
||||
private readonly _tokenizationStateStore: TokenizationStateStore;
|
||||
private readonly _worker: TextMateWorker;
|
||||
private _languageId: LanguageId;
|
||||
private _grammar: IGrammar | null;
|
||||
private _isDisposed: boolean;
|
||||
|
||||
constructor(uri: URI, lines: string[], eol: string, versionId: number, worker: TextMateWorker, languageId: LanguageId) {
|
||||
super(uri, lines, eol, versionId);
|
||||
this._tokenizationStateStore = new TokenizationStateStore();
|
||||
this._worker = worker;
|
||||
this._languageId = languageId;
|
||||
this._isDisposed = false;
|
||||
this._grammar = null;
|
||||
this._resetTokenization();
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
this._isDisposed = true;
|
||||
super.dispose();
|
||||
}
|
||||
|
||||
public onLanguageId(languageId: LanguageId): void {
|
||||
this._languageId = languageId;
|
||||
this._resetTokenization();
|
||||
}
|
||||
|
||||
onEvents(e: IModelChangedEvent): void {
|
||||
super.onEvents(e);
|
||||
for (let i = 0; i < e.changes.length; i++) {
|
||||
const change = e.changes[i];
|
||||
const [eolCount] = countEOL(change.text);
|
||||
this._tokenizationStateStore.applyEdits(change.range, eolCount);
|
||||
}
|
||||
this._ensureTokens();
|
||||
}
|
||||
|
||||
private _resetTokenization(): void {
|
||||
this._grammar = null;
|
||||
this._tokenizationStateStore.flush(null);
|
||||
|
||||
const languageId = this._languageId;
|
||||
this._worker.getOrCreateGrammar(languageId).then((r) => {
|
||||
if (this._isDisposed || languageId !== this._languageId || !r) {
|
||||
return;
|
||||
}
|
||||
|
||||
this._grammar = r.grammar;
|
||||
this._tokenizationStateStore.flush(r.initialState);
|
||||
this._ensureTokens();
|
||||
});
|
||||
}
|
||||
|
||||
private _ensureTokens(): void {
|
||||
if (!this._grammar) {
|
||||
return;
|
||||
}
|
||||
const builder = new MultilineTokensBuilder();
|
||||
const lineCount = this._lines.length;
|
||||
|
||||
// Validate all states up to and including endLineIndex
|
||||
for (let lineIndex = this._tokenizationStateStore.invalidLineStartIndex; lineIndex < lineCount; lineIndex++) {
|
||||
const text = this._lines[lineIndex];
|
||||
const lineStartState = this._tokenizationStateStore.getBeginState(lineIndex);
|
||||
|
||||
const r = this._grammar.tokenizeLine2(text, <StackElement>lineStartState!);
|
||||
LineTokens.convertToEndOffset(r.tokens, text.length);
|
||||
builder.add(lineIndex + 1, r.tokens);
|
||||
this._tokenizationStateStore.setEndState(lineCount, lineIndex, r.ruleStack);
|
||||
lineIndex = this._tokenizationStateStore.invalidLineStartIndex - 1; // -1 because the outer loop increments it
|
||||
}
|
||||
|
||||
this._worker._setTokens(this._uri, this._versionId, builder.serialize());
|
||||
}
|
||||
}
|
||||
|
||||
export class TextMateWorker {
|
||||
|
||||
private readonly _host: TextMateWorkerHost;
|
||||
private readonly _models: { [uri: string]: TextMateWorkerModel; };
|
||||
private readonly _grammarCache: Promise<ICreateGrammarResult>[];
|
||||
private readonly _grammarFactory: Promise<TMGrammarFactory | null>;
|
||||
|
||||
constructor(ctx: IWorkerContext<TextMateWorkerHost>, createData: ICreateData) {
|
||||
this._host = ctx.host;
|
||||
this._models = Object.create(null);
|
||||
this._grammarCache = [];
|
||||
const grammarDefinitions = createData.grammarDefinitions.map<IValidGrammarDefinition>((def) => {
|
||||
return {
|
||||
location: URI.revive(def.location),
|
||||
language: def.language,
|
||||
scopeName: def.scopeName,
|
||||
embeddedLanguages: def.embeddedLanguages,
|
||||
tokenTypes: def.tokenTypes,
|
||||
injectTo: def.injectTo,
|
||||
};
|
||||
});
|
||||
this._grammarFactory = this._loadTMGrammarFactory(grammarDefinitions);
|
||||
}
|
||||
|
||||
private async _loadTMGrammarFactory(grammarDefinitions: IValidGrammarDefinition[]): Promise<TMGrammarFactory> {
|
||||
require.config({
|
||||
paths: {
|
||||
'vscode-textmate': '../node_modules/vscode-textmate/release/main',
|
||||
'vscode-oniguruma': '../node_modules/vscode-oniguruma/release/main',
|
||||
}
|
||||
});
|
||||
const vscodeTextmate = await import('vscode-textmate');
|
||||
const vscodeOniguruma = await import('vscode-oniguruma');
|
||||
const response = await fetch(FileAccess.asBrowserUri('vscode-oniguruma/../onig.wasm', require).toString(true));
|
||||
// Using the response directly only works if the server sets the MIME type 'application/wasm'.
|
||||
// Otherwise, a TypeError is thrown when using the streaming compiler.
|
||||
// We therefore use the non-streaming compiler :(.
|
||||
const bytes = await response.arrayBuffer();
|
||||
await vscodeOniguruma.loadWASM(bytes);
|
||||
|
||||
const onigLib: Promise<IOnigLib> = Promise.resolve({
|
||||
createOnigScanner: (sources) => vscodeOniguruma.createOnigScanner(sources),
|
||||
createOnigString: (str) => vscodeOniguruma.createOnigString(str)
|
||||
});
|
||||
|
||||
return new TMGrammarFactory({
|
||||
logTrace: (msg: string) => {/* console.log(msg) */ },
|
||||
logError: (msg: string, err: any) => console.error(msg, err),
|
||||
readFile: (resource: URI) => this._host.readFile(resource)
|
||||
}, grammarDefinitions, vscodeTextmate, onigLib);
|
||||
}
|
||||
|
||||
public acceptNewModel(data: IRawModelData): void {
|
||||
const uri = URI.revive(data.uri);
|
||||
const key = uri.toString();
|
||||
this._models[key] = new TextMateWorkerModel(uri, data.lines, data.EOL, data.versionId, this, data.languageId);
|
||||
}
|
||||
|
||||
public acceptModelChanged(strURL: string, e: IModelChangedEvent): void {
|
||||
this._models[strURL].onEvents(e);
|
||||
}
|
||||
|
||||
public acceptModelLanguageChanged(strURL: string, newLanguageId: LanguageId): void {
|
||||
this._models[strURL].onLanguageId(newLanguageId);
|
||||
}
|
||||
|
||||
public acceptRemovedModel(strURL: string): void {
|
||||
if (this._models[strURL]) {
|
||||
this._models[strURL].dispose();
|
||||
delete this._models[strURL];
|
||||
}
|
||||
}
|
||||
|
||||
public async getOrCreateGrammar(languageId: LanguageId): Promise<ICreateGrammarResult | null> {
|
||||
const grammarFactory = await this._grammarFactory;
|
||||
if (!grammarFactory) {
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
if (!this._grammarCache[languageId]) {
|
||||
this._grammarCache[languageId] = grammarFactory.createGrammar(languageId);
|
||||
}
|
||||
return this._grammarCache[languageId];
|
||||
}
|
||||
|
||||
public async acceptTheme(theme: IRawTheme, colorMap: string[]): Promise<void> {
|
||||
const grammarFactory = await this._grammarFactory;
|
||||
if (grammarFactory) {
|
||||
grammarFactory.setTheme(theme, colorMap);
|
||||
}
|
||||
}
|
||||
|
||||
public _setTokens(resource: URI, versionId: number, tokens: Uint8Array): void {
|
||||
this._host.setTokens(resource, versionId, tokens);
|
||||
}
|
||||
}
|
||||
|
||||
export function create(ctx: IWorkerContext<TextMateWorkerHost>, createData: ICreateData): TextMateWorker {
|
||||
return new TextMateWorker(ctx, createData);
|
||||
}
|
||||
Reference in New Issue
Block a user