1
0
mirror of https://github.com/pdemian/human2regex.git synced 2025-05-15 20:10:19 -07:00

Added tests and enforced a stricter eslint

This commit is contained in:
Patrick Demian 2020-10-29 10:37:56 -04:00
parent df11fc82a3
commit 91d1b37322
11 changed files with 4400 additions and 82 deletions

View File

@ -10,7 +10,8 @@
"parser": "@typescript-eslint/parser", "parser": "@typescript-eslint/parser",
"parserOptions": { "parserOptions": {
"ecmaVersion": 12, "ecmaVersion": 12,
"sourceType": "module" "sourceType": "module",
"project": "./../tsconfig.json"
}, },
"plugins": [ "plugins": [
"@typescript-eslint" "@typescript-eslint"
@ -18,7 +19,40 @@
"rules": { "rules": {
"@typescript-eslint/no-this-alias": "off", "@typescript-eslint/no-this-alias": "off",
"@typescript-eslint/no-inferrable-types": "off", "@typescript-eslint/no-inferrable-types": "off",
"@typescript-eslint/explicit-function-return-type": "off", "@typescript-eslint/explicit-function-return-type": [
"warn",
{ "allowExpressions": true }
],
"@typescript-eslint/no-shadow": "error",
//"@typescript-eslint/prefer-readonly-parameter-types": "warn",
//"@typescript-eslint/prefer-readonly": "warn",
"@typescript-eslint/prefer-optional-chain": "error",
"@typescript-eslint/prefer-for-of": "error",
"@typescript-eslint/no-unnecessary-condition": "error",
"@typescript-eslint/no-throw-literal": "error",
"@typescript-eslint/consistent-type-assertions": "error",
"@typescript-eslint/brace-style": [
"error",
"stroustrup"
],
"@typescript-eslint/comma-spacing": [
"error",
{ "before": false, "after": true }
],
"@typescript-eslint/keyword-spacing": "error",
"@typescript-eslint/naming-convention": [
"error",
{ "selector": "default", "format": [ "snake_case", "PascalCase" ] },
{ "selector": "property", "format": [ "camelCase", "snake_case", "PascalCase" ] },
{ "selector": [ "function", "method"], "format": [ "camelCase", "UPPER_CASE" ] },
{ "selector": "typeLike", "format": [ "PascalCase" ] }
],
"@typescript-eslint/quotes": [
"warn",
"double",
{ "avoidEscape": true }
],
"camelcase": "off",
"no-magic-numbers": [ "no-magic-numbers": [
"warn", "warn",
{ "ignoreArrayIndexes": true, "ignore": [-1,0,1,2,3,4,5,6,7,8,9]} { "ignoreArrayIndexes": true, "ignore": [-1,0,1,2,3,4,5,6,7,8,9]}
@ -47,14 +81,8 @@
], ],
"no-shadow": "off", "no-shadow": "off",
"no-undefined": "error", "no-undefined": "error",
"brace-style": [ "brace-style": "off",
"error", "comma-spacing": "off",
"stroustrup"
],
"comma-spacing": [
"error",
{ "before": false, "after": true }
],
"array-bracket-spacing": [ "array-bracket-spacing": [
"error", "error",
"always" "always"
@ -63,6 +91,7 @@
"error", "error",
"last" "last"
], ],
"keyword-spacing": "off",
"func-style": [ "func-style": [
"error", "error",
"declaration" "declaration"
@ -84,19 +113,11 @@
"error", "error",
"always" "always"
], ],
/*"indent": [
"error",
4
],*/
"linebreak-style": [ "linebreak-style": [
"error", "error",
"windows" "windows"
], ],
"quotes": [ "quotes": "off",
"warn",
"double",
{ "avoidEscape": true }
],
"semi": [ "semi": [
"error", "error",
"always" "always"

1
.gitignore vendored
View File

@ -1,3 +1,4 @@
# Node build artifacts # Node build artifacts
node_modules/ node_modules/
coverage/
npm-debug.log npm-debug.log

4
docs/bundle.min.js vendored

File diff suppressed because one or more lines are too long

16
jest.config.ts Normal file
View File

@ -0,0 +1,16 @@
/*
* For a detailed explanation regarding each configuration property and type check, visit:
* https://jestjs.io/docs/en/configuration.html
*/
export default {
transform: { "^.+\\.ts$": "ts-jest" },
testEnvironment: "node",
collectCoverage: true,
coverageDirectory: "coverage",
coveragePathIgnorePatterns: [ "/node_modules/", "/docs/" ],
coverageProvider: "v8",
testRegex: "/tests/.*\\.spec\\.(ts)$",
moduleFileExtensions: [ "ts", "js" ],
verbose: true
};

4268
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -6,7 +6,8 @@
"devDependencies": { "devDependencies": {
"@types/glob": "^7.1.3", "@types/glob": "^7.1.3",
"@types/html-minifier": "^3.5.3", "@types/html-minifier": "^3.5.3",
"@types/jquery": "^3.5.3", "@types/jest": "^26.0.15",
"@types/jquery": "^3.5.4",
"@types/mustache": "^4.0.1", "@types/mustache": "^4.0.1",
"@typescript-eslint/eslint-plugin": "^4.4.0", "@typescript-eslint/eslint-plugin": "^4.4.0",
"@typescript-eslint/parser": "^4.4.0", "@typescript-eslint/parser": "^4.4.0",
@ -16,17 +17,20 @@
"eslint": "^7.11.0", "eslint": "^7.11.0",
"glob": "^7.1.6", "glob": "^7.1.6",
"html-minifier": "^4.0.0", "html-minifier": "^4.0.0",
"jest": "^26.6.1",
"mini-css-extract-plugin": "^1.0.0", "mini-css-extract-plugin": "^1.0.0",
"mustache": "^4.0.1", "mustache": "^4.0.1",
"optimize-css-assets-webpack-plugin": "^5.0.4", "optimize-css-assets-webpack-plugin": "^5.0.4",
"ts-jest": "^26.4.3",
"ts-loader": "^8.0.4", "ts-loader": "^8.0.4",
"typescript": "^4.0.3", "ts-node": "^9.0.0",
"typescript": "^4.0.5",
"webpack": "^4.44.2", "webpack": "^4.44.2",
"webpack-cli": "^3.3.12" "webpack-cli": "^3.3.12"
}, },
"scripts": { "scripts": {
"build": "webpack --config webpack.config.js", "build": "webpack --config webpack.config.js",
"test": "echo \"Error: no test specified\" && exit 1" "test": "eslint && jest"
}, },
"keywords": [ "keywords": [
"regex" "regex"

View File

@ -29,10 +29,10 @@ export class Human2RegexLexer {
Human2RegexLexer.already_init = true; Human2RegexLexer.already_init = true;
this.set_options(options); this.setOptions(options);
} }
public set_options(options: Human2RegexLexerOptions) : void { public setOptions(options: Human2RegexLexerOptions) : void {
this.options = options; this.options = options;
let indent_regex: RegExp | null = null; let indent_regex: RegExp | null = null;
@ -55,7 +55,7 @@ export class Human2RegexLexer {
this.lexer = new Lexer(AllTokens, { ensureOptimizations: true, skipValidations: options.skip_validations }); this.lexer = new Lexer(AllTokens, { ensureOptimizations: true, skipValidations: options.skip_validations });
} }
private lex_error(token: IToken) : ILexingError { private lexError(token: IToken) : ILexingError {
return { return {
offset: token.startOffset, offset: token.startOffset,
line: token.startLine ?? NaN, line: token.startLine ?? NaN,
@ -66,75 +66,75 @@ export class Human2RegexLexer {
} }
public tokenize(text: string) : ILexingResult { public tokenize(text: string) : ILexingResult {
const lexResult = this.lexer.tokenize(text); const lex_result = this.lexer.tokenize(text);
if (lexResult.tokens.length === 0) { if (lex_result.tokens.length === 0) {
return lexResult; return lex_result;
} }
// create Outdents // create Outdents
const tokens: IToken[] = []; const tokens: IToken[] = [];
const indentStack = [ 0 ]; const indent_stack = [ 0 ];
let currIndentLevel = 0; let curr_indent_level = 0;
let startOfLine = true; let start_of_line = true;
let hadIndents = false; let had_indents = false;
for (let i = 0; i < lexResult.tokens.length; i++) { for (let i = 0; i < lex_result.tokens.length; i++) {
// EoL? check for indents next (by setting startOfLine = true) // EoL? check for indents next (by setting startOfLine = true)
if (lexResult.tokens[i].tokenType === EndOfLine) { if (lex_result.tokens[i].tokenType === EndOfLine) {
if(tokens.length === 0 || tokens[tokens.length-1].tokenType === EndOfLine) { if (tokens.length === 0 || tokens[tokens.length-1].tokenType === EndOfLine) {
// Ignore multiple EOLs and ignore first EOL // Ignore multiple EOLs and ignore first EOL
} }
else { else {
startOfLine = true; start_of_line = true;
tokens.push(lexResult.tokens[i]); tokens.push(lex_result.tokens[i]);
} }
} }
// start with 1 indent. Append all other indents // start with 1 indent. Append all other indents
else if (lexResult.tokens[i].tokenType === Indent) { else if (lex_result.tokens[i].tokenType === Indent) {
hadIndents = true; had_indents = true;
currIndentLevel = 1; curr_indent_level = 1;
const start_token = lexResult.tokens[i]; const start_token = lex_result.tokens[i];
let length = lexResult.tokens[i].image.length; let length = lex_result.tokens[i].image.length;
// grab all the indents (and their length) // grab all the indents (and their length)
while (lexResult.tokens.length > i && lexResult.tokens[i+1].tokenType === Indent) { while (lex_result.tokens.length > i && lex_result.tokens[i+1].tokenType === Indent) {
currIndentLevel++; curr_indent_level++;
i++; i++;
length += lexResult.tokens[i].image.length; length += lex_result.tokens[i].image.length;
} }
start_token.endOffset = start_token.startOffset + length; start_token.endOffset = start_token.startOffset + length;
start_token.endColumn = lexResult.tokens[i].endColumn; start_token.endColumn = lex_result.tokens[i].endColumn;
// must be the same line // must be the same line
//start_token.endLine = lexResult.tokens[i].endLine; //start_token.endLine = lex_result.tokens[i].endLine;
// are we an empty line? // are we an empty line?
if (lexResult.tokens.length > i && lexResult.tokens[i+1].tokenType === EndOfLine) { if (lex_result.tokens.length > i && lex_result.tokens[i+1].tokenType === EndOfLine) {
// Ignore all indents AND newline // Ignore all indents AND newline
// continue; // continue;
} }
else if (!startOfLine || (currIndentLevel > last(indentStack) + 1)) { else if (!start_of_line || (curr_indent_level > last(indent_stack) + 1)) {
lexResult.errors.push(this.lex_error(start_token)); lex_result.errors.push(this.lexError(start_token));
} }
else if (currIndentLevel > last(indentStack)) { else if (curr_indent_level > last(indent_stack)) {
indentStack.push(currIndentLevel); indent_stack.push(curr_indent_level);
tokens.push(start_token); tokens.push(start_token);
} }
else if (currIndentLevel < last(indentStack)) { else if (curr_indent_level < last(indent_stack)) {
const index = findLastIndex(indentStack, currIndentLevel); const index = findLastIndex(indent_stack, curr_indent_level);
if (index < 0) { if (index < 0) {
lexResult.errors.push(this.lex_error(start_token)); lex_result.errors.push(this.lexError(start_token));
} }
else { else {
const numberOfDedents = indentStack.length - index - 1; const number_of_dedents = indent_stack.length - index - 1;
for(let i = 0; i < numberOfDedents; i++) { for (let j = 0; j < number_of_dedents; j++) {
indentStack.pop(); indent_stack.pop();
tokens.push(createTokenInstance(Outdent, "", start_token.startOffset, start_token.startOffset + length, start_token.startLine ?? NaN, start_token.endLine ?? NaN, start_token.startColumn ?? NaN, (start_token.startColumn ?? NaN) + length)); tokens.push(createTokenInstance(Outdent, "", start_token.startOffset, start_token.startOffset + length, start_token.startLine ?? NaN, start_token.endLine ?? NaN, start_token.startColumn ?? NaN, (start_token.startColumn ?? NaN) + length));
} }
} }
@ -145,35 +145,35 @@ export class Human2RegexLexer {
} }
} }
else { else {
if(startOfLine && !hadIndents) { if (start_of_line && !had_indents) {
const tok = lexResult.tokens[i]; const tok = lex_result.tokens[i];
//add remaining Outdents //add remaining Outdents
while (indentStack.length > 1) { while (indent_stack.length > 1) {
indentStack.pop(); indent_stack.pop();
tokens.push(createTokenInstance(Outdent, "", tok.startOffset, tok.startOffset, tok.startLine ?? NaN, NaN, tok.startColumn ?? NaN, NaN)); tokens.push(createTokenInstance(Outdent, "", tok.startOffset, tok.startOffset, tok.startLine ?? NaN, NaN, tok.startColumn ?? NaN, NaN));
} }
} }
startOfLine = false; start_of_line = false;
hadIndents = false; had_indents = false;
tokens.push(lexResult.tokens[i]); tokens.push(lex_result.tokens[i]);
} }
} }
const tok = last(tokens); const tok = last(tokens);
// Do we have an EOL marker at the end? // Do we have an EOL marker at the end?
if(tok.tokenType !== EndOfLine) { if (tok.tokenType !== EndOfLine) {
tokens.push(createTokenInstance(EndOfLine, "\n", tok.endOffset ?? NaN, tok.endOffset ?? NaN, tok.startLine ?? NaN, NaN, tok.startColumn ?? NaN, NaN)); tokens.push(createTokenInstance(EndOfLine, "\n", tok.endOffset ?? NaN, tok.endOffset ?? NaN, tok.startLine ?? NaN, NaN, tok.startColumn ?? NaN, NaN));
} }
//add remaining Outdents //add remaining Outdents
while (indentStack.length > 1) { while (indent_stack.length > 1) {
indentStack.pop(); indent_stack.pop();
tokens.push(createTokenInstance(Outdent, "", tok.endOffset ?? NaN, tok.endOffset ?? NaN, tok.startLine ?? NaN, NaN, tok.startColumn ?? NaN, NaN)); tokens.push(createTokenInstance(Outdent, "", tok.endOffset ?? NaN, tok.endOffset ?? NaN, tok.startLine ?? NaN, NaN, tok.startColumn ?? NaN, NaN));
} }
lexResult.tokens = tokens; lex_result.tokens = tokens;
return lexResult; return lex_result;
} }
} }

View File

@ -3,8 +3,20 @@
import { CstParser, CstNode, IOrAlt } from "chevrotain"; import { CstParser, CstNode, IOrAlt } from "chevrotain";
import * as T from "./tokens"; import * as T from "./tokens";
export enum RobotLanguage {
JS,
Perl,
DotNet,
Java
}
export enum HumanLanguage {
English,
/* Todo: Humans speak more than just english! */
}
export class Human2RegexParserOptions { export class Human2RegexParserOptions {
constructor(public skip_validations: boolean = false) { constructor(public skip_validations: boolean = false, public robot_language: RobotLanguage = RobotLanguage.JS, public human_language: HumanLanguage = HumanLanguage.English) {
/* empty */ /* empty */
} }
} }

6
tests/lexer.spec.ts Normal file
View File

@ -0,0 +1,6 @@
describe("calculate", function() {
it("add", function() {
const result = 5 + 2;
expect(result).toBe(7);
});
});

View File

@ -24,7 +24,7 @@
"strict": true, /* Enable all strict type-checking options. */ "strict": true, /* Enable all strict type-checking options. */
"noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */ "noImplicitAny": false, /* Raise error on expressions and declarations with an implied 'any' type. */
"strictNullChecks": true, /* Enable strict null checks. */ "strictNullChecks": true, /* Enable strict null checks. */
// "strictFunctionTypes": true, /* Enable strict checking of function types. */ "strictFunctionTypes": true, /* Enable strict checking of function types. */
// "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */ // "strictPropertyInitialization": true, /* Enable strict checking of property initialization in classes. */
// "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */ // "noImplicitThis": true, /* Raise error on 'this' expressions with an implied 'any' type. */
// "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */ // "alwaysStrict": true, /* Parse in strict mode and emit "use strict" for each source file. */

View File

@ -3,7 +3,7 @@
const path = require("path"); const path = require("path");
const { glob } = require("glob"); const { glob } = require("glob");
const { render } = require("mustache"); const { render } = require("mustache");
const { readFileSync, writeFileSync } = require("fs"); const { readFileSync, writeFileSync, existsSync, mkdirSync } = require("fs");
const { minify } = require("html-minifier"); const { minify } = require("html-minifier");
const CopyPlugin = require("copy-webpack-plugin"); const CopyPlugin = require("copy-webpack-plugin");
const MiniCssExtractPlugin = require("mini-css-extract-plugin"); const MiniCssExtractPlugin = require("mini-css-extract-plugin");
@ -30,6 +30,10 @@ const config = {
function build_mustache() { function build_mustache() {
if (!existsSync(config.dst)){
mkdirSync(config.dst);
}
read_json_file = (filename) => JSON.parse(readFileSync(filename), "utf8"); read_json_file = (filename) => JSON.parse(readFileSync(filename), "utf8");
compress_html = (input) => config.prod ? minify(input, config.compression_config.html) : input; compress_html = (input) => config.prod ? minify(input, config.compression_config.html) : input;