Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Added test framework and a couple of tests for for syntax highlighting #742

Merged
merged 4 commits into from
Sep 23, 2016
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 9 additions & 4 deletions .travis.yml
Original file line number Diff line number Diff line change
@@ -1,12 +1,17 @@
sudo: false
language: node_js

node_js:
- "5.1"

os:
-osx
-linux
env:
- CXX=g++-4.8

addons:
apt:
sources:
- ubuntu-toolchain-r-test
packages:
- g++-4.8

install:
- npm install
Expand Down
8 changes: 6 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,13 +86,17 @@ First install:
* Node.js (newer than 4.3.1)
* Npm (newer 2.14.12)

In case you get a *node-gyp* error [follow the instrutions here](https://github.com/nodejs/node-gyp/blob/master/README.md) to fix it. The *vscode-textmate* package pulls in a native node dependency and those instructions will set up the node build tool which deals with those.

To **run and develop** do the following:

* Run `npm i`
* Open in Visual Studio Code (`code .`)
* *Optional:* run `tsc -w`, make code changes (on Windows, try `start node ".\node_modules\typescript\bin\tsc -w"`)
* Press <kbd>F5</kbd> to debug

To **test** do the following: `npm run test`

### License
The Microsoft C# extension is subject to [these license terms](RuntimeLicenses/license.txt).
The source code to this extension is available on [https://github.com/OmniSharp/omnisharp-vscode](https://github.com/OmniSharp/omnisharp-vscode) and licensed under the [MIT license](LICENSE.txt).
The Microsoft C# extension is subject to [these license terms](RuntimeLicenses/license.txt).
The source code to this extension is available on [https://github.com/OmniSharp/omnisharp-vscode](https://github.com/OmniSharp/omnisharp-vscode) and licensed under the [MIT license](LICENSE.txt).
6 changes: 3 additions & 3 deletions gulpfile.js
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ function installOmnisharp(omnisharps) {

return download.go(omni.flavor, omni.platform, log);
});

return Promise.all(promises);
}

Expand All @@ -53,7 +53,7 @@ function cleanOmnisharp() {
gulp.task('omnisharp:clean', () => {
return cleanOmnisharp();
});

gulp.task('omnisharp:install', ['omnisharp:clean'], () => {
const flavor = gulpUtil.env.flavor || Flavor.CoreCLR;
const platform = gulpUtil.env.platform || platform.getCurrentPlatform();
Expand Down Expand Up @@ -157,7 +157,7 @@ gulp.task('package:offline', ['clean'], () => {

/// Test Task
gulp.task('test', () => {
gulp.src('out/test/*.tests.js')
gulp.src('out/test/**/*.tests.js')
.pipe(mocha({ui: "tdd"}))
.once('error', () => {
process.exit(1);
Expand Down
6 changes: 4 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@
"scripts": {
"compile": "node ./node_modules/vscode/bin/compile -p ./",
"watch": "node ./node_modules/vscode/bin/compile -watch -p ./",
"test": "mocha --timeout 15000 -u tdd ./out/test/*.tests.js",
"test": "mocha --timeout 15000 -u tdd ./out/test/**/*.tests.js",
"postinstall": "node ./node_modules/vscode/bin/install"
},
"dependencies": {
Expand All @@ -46,7 +46,9 @@
"tslint-microsoft-contrib": "^2.0.0",
"typescript": "^1.7.3",
"vscode": "^0.11.13",
"vsce": "^1.7.0"
"vsce": "^1.7.0",
"chai": "^3.5.0",
"vscode-textmate": "^2.1.1"
},
"engines": {
"vscode": "^1.3.0"
Expand Down
1 change: 0 additions & 1 deletion src/features/codeLensProvider.ts
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,6 @@ export default class OmniSharpCodeLensProvider extends AbstractSupport implement
};

provideCodeLenses(document: TextDocument, token: CancellationToken): CodeLens[] | Thenable<CodeLens[]> {
let request = { Filename: document.fileName };
return serverUtils.currentFileMembersAsTree(this._server, { Filename: document.fileName }, token).then(tree => {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

😄

let ret: CodeLens[] = [];
tree.TopLevelTypeDefinitions.forEach(node => this._convertQuickFix(ret, document.fileName, node));
Expand Down
79 changes: 79 additions & 0 deletions test/syntaxes/class.tests.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
import { should } from 'chai';
import { Tokens, Token } from './utils/tokenizer';
import { TokenizerUtil } from'./utils/tokenizerUtil';

describe("Grammar", function() {
before(function() {
should();
});

describe("Class", function() {
it("has a class keyword, a name and optional storage modifiers", function() {

const input = `
namespace TestNamespace
{
public class PublicClass { }

class DefaultClass { }

internal class InternalClass { }

static class DefaultStaticClass { }

public static class PublicStaticClass { }

sealed class DefaultSealedClass { }

public sealed class PublicSealedClass { }

public abstract class PublicAbstractClass { }

abstract class DefaultAbstractClass { }
}`;
let tokens: Token[] = TokenizerUtil.tokenize(input);

tokens.should.contain(Tokens.StorageModifierKeyword("public", 4, 5));
tokens.should.contain(Tokens.ClassKeyword("class", 4, 24));
tokens.should.contain(Tokens.ClassIdentifier("PublicClass", 4, 30));

tokens.should.contain(Tokens.ClassKeyword("class", 6, 24));
tokens.should.contain(Tokens.ClassIdentifier("DefaultClass", 6, 30));

tokens.should.contain(Tokens.StorageModifierKeyword("internal", 8, 5));
tokens.should.contain(Tokens.ClassKeyword("class", 8, 24));
tokens.should.contain(Tokens.ClassIdentifier("InternalClass", 8, 30));

tokens.should.contain(Tokens.StorageModifierKeyword("static", 10, 15));
tokens.should.contain(Tokens.ClassKeyword("class", 10, 24));
tokens.should.contain(Tokens.ClassIdentifier("DefaultStaticClass", 10, 30));

tokens.should.contain(Tokens.StorageModifierKeyword("public", 12, 5));
tokens.should.contain(Tokens.StorageModifierKeyword("static", 12, 15));
tokens.should.contain(Tokens.ClassKeyword("class", 12, 24));
tokens.should.contain(Tokens.ClassIdentifier("PublicStaticClass", 12, 30));

tokens.should.contain(Tokens.StorageModifierKeyword("sealed", 14, 15));
tokens.should.contain(Tokens.ClassKeyword("class", 14, 24));
tokens.should.contain(Tokens.ClassIdentifier("DefaultSealedClass", 14, 30));

tokens.should.contain(Tokens.StorageModifierKeyword("public", 16, 5));
tokens.should.contain(Tokens.StorageModifierKeyword("sealed", 16, 15));
tokens.should.contain(Tokens.ClassKeyword("class", 16, 24));
tokens.should.contain(Tokens.ClassIdentifier("PublicSealedClass", 16, 30));

tokens.should.contain(Tokens.StorageModifierKeyword("public", 18, 5));
tokens.should.contain(Tokens.StorageModifierKeyword("abstract", 18, 15));
tokens.should.contain(Tokens.ClassKeyword("class", 18, 24));
tokens.should.contain(Tokens.ClassIdentifier("PublicAbstractClass", 18, 30));

tokens.should.contain(Tokens.StorageModifierKeyword("abstract", 20, 15));
tokens.should.contain(Tokens.ClassKeyword("class", 20, 24));
tokens.should.contain(Tokens.ClassIdentifier("DefaultAbstractClass", 20, 30));

});

});
});


78 changes: 78 additions & 0 deletions test/syntaxes/namespace.tests.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import { should } from 'chai';
import { Tokens, Token } from './utils/tokenizer';
import { TokenizerUtil } from'./utils/tokenizerUtil';

describe("Grammar", function() {
before(function () {
should();
});

describe("Namespace", function() {
it("has a namespace keyword and a name", function() {

const input = `
namespace TestNamespace
{
}`;
let tokens: Token[] = TokenizerUtil.tokenize(input);

tokens.should.contain(Tokens.NamespaceKeyword("namespace", 2, 1));
tokens.should.contain(Tokens.NamespaceIdentifier("TestNamespace", 2, 11));
});

it("can be nested", function() {

const input = `
namespace TestNamespace
{
namespace NestedNamespace {

}
}`;
let tokens: Token[] = TokenizerUtil.tokenize(input);

tokens.should.contain(Tokens.NamespaceKeyword("namespace", 2, 1));
tokens.should.contain(Tokens.NamespaceIdentifier("TestNamespace", 2, 11));

tokens.should.contain(Tokens.NamespaceKeyword("namespace", 4, 5));
tokens.should.contain(Tokens.NamespaceIdentifier("NestedNamespace", 4, 15));
});

it("can contain using statements", function() {

const input = `
using UsineOne;
using one = UsineOne.Something;

namespace TestNamespace
{
using UsingTwo;
using two = UsineOne.Something;

namespace NestedNamespace
{
using UsingThree;
using three = UsineOne.Something;
}
}`;
let tokens: Token[] = TokenizerUtil.tokenize(input);

tokens.should.contain(Tokens.UsingKeyword("using", 2, 1));
tokens.should.contain(Tokens.UsingKeyword("using", 3, 1));

tokens.should.contain(Tokens.NamespaceKeyword("namespace", 5, 1));
tokens.should.contain(Tokens.NamespaceIdentifier("TestNamespace", 5, 11));

tokens.should.contain(Tokens.UsingKeyword("using", 7, 5));
tokens.should.contain(Tokens.UsingKeyword("using", 8, 5));

tokens.should.contain(Tokens.NamespaceKeyword("namespace", 10, 5));
tokens.should.contain(Tokens.NamespaceIdentifier("NestedNamespace", 10, 15));

tokens.should.contain(Tokens.UsingKeyword("using", 12, 9));
tokens.should.contain(Tokens.UsingKeyword("using", 12, 9));
});
});
});


77 changes: 77 additions & 0 deletions test/syntaxes/utils/tokenizer.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,77 @@
import {ITokenizeLineResult, Registry, IGrammar, StackElement} from 'vscode-textmate';

export class Tokenizer
{
private _grammar : IGrammar;

constructor(grammarFilePath: string) {
this._grammar = new Registry().loadGrammarFromPathSync(grammarFilePath);
}

public tokenize(input: string): Token[] {
let tokens: Token[] = [];

// ensure consistent line-endings irrelevant of OS
input = input.replace("\r\n","\n");

let previousStack : StackElement = null;

const lines: string[] = input.split("\n");

for (let lineIndex = 0; lineIndex < lines.length; lineIndex++) {
const line = lines[lineIndex];

let result: ITokenizeLineResult = this._grammar.tokenizeLine(line, previousStack);
previousStack = result.ruleStack;

for (const token of result.tokens) {
const text = line.substring(token.startIndex, token.endIndex);
const type : string = token.scopes[token.scopes.length - 1];
tokens.push(new Token(text, type, lineIndex+1, token.startIndex + 1));
}
}

return tokens;
}
}

export class Token {
constructor(text: string, type: string, line?: number, column?: number) {
this.text = text;
this.type = type;
this.column = column;
this.line = line;
}

public text: string;
public type: string;
public line: number;
public column: number;
}

export namespace Tokens {

function createToken(text: string, type: string, line?: number, column?: number) : Token {
return new Token(text, type, line, column);
}

export const NamespaceKeyword = (text: string, line?: number, column?: number) =>
createToken(text, "keyword.other.namespace.cs", line, column);

export const NamespaceIdentifier = (text: string, line?: number, column?: number) =>
createToken(text, "entity.name.type.namespace.cs", line, column);

export const UsingKeyword = (text: string, line?: number, column?: number) =>
createToken(text, "keyword.other.using.cs", line, column);

export const ClassKeyword = (text: string, line?: number, column?: number) =>
createToken(text, "storage.modifier.cs", line, column);

export const ClassIdentifier = (text: string, line?: number, column?: number) =>
createToken(text, "entity.name.type.class.cs", line, column);

export const StorageModifierKeyword = (text: string, line?: number, column?: number) =>
createToken(text, "storage.modifier.cs", line, column);

}

10 changes: 10 additions & 0 deletions test/syntaxes/utils/tokenizerUtil.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
import { Tokenizer, Token } from './tokenizer';

export class TokenizerUtil
{
private static _tokenizer: Tokenizer = new Tokenizer("syntaxes/csharp.json");

public static tokenize(input: string): Token[] {
return TokenizerUtil._tokenizer.tokenize(input);
}
}
Loading