Skip to content

Commit

Permalink
fix: corrected issues in tests that failed linting
Browse files Browse the repository at this point in the history
Some uses of deepEqual on primitives were corrected, and also configured support for helper files in
eslint rules for Ava.
  • Loading branch information
darylwright committed Mar 13, 2024
1 parent 1768d97 commit d415f2f
Show file tree
Hide file tree
Showing 12 changed files with 38 additions and 16 deletions.
6 changes: 6 additions & 0 deletions .eslintrc.json
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,12 @@
"sort-imports": [
"error",
{ "ignoreDeclarationSort": true, "ignoreCase": true }
],
"ava/no-ignored-test-files": [
"error",
{
"helpers": ["src/__tests__/**/utils.ts"]
}
]
}
}
6 changes: 3 additions & 3 deletions src/__tests__/cst_to_raw_visitor/status_indicator.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ test('returns a transaction object with an unmarked posting', (t) => {
'posting',
'should contain a transaction posting line'
);
t.deepEqual(
t.is(
((result[0] as Raw.Transaction).value.contentLines[0] as Raw.Posting).value
.status,
'unmarked',
Expand All @@ -42,7 +42,7 @@ test('returns a transaction object with a pending posting', (t) => {
'posting',
'should contain a transaction posting line'
);
t.deepEqual(
t.is(
((result[0] as Raw.Transaction).value.contentLines[0] as Raw.Posting).value
.status,
'pending',
Expand All @@ -65,7 +65,7 @@ test('returns a transaction object with a cleared posting', (t) => {
'posting',
'should contain a transaction posting line'
);
t.deepEqual(
t.is(
((result[0] as Raw.Transaction).value.contentLines[0] as Raw.Posting).value
.status,
'cleared',
Expand Down
4 changes: 3 additions & 1 deletion src/__tests__/lexer/account_directives.test.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import test from 'ava';

import { runLexerTests } from './utils';

const tests = [
Expand Down Expand Up @@ -81,4 +83,4 @@ const tests = [
}
];

runLexerTests(tests);
runLexerTests(test, tests);
4 changes: 3 additions & 1 deletion src/__tests__/lexer/amounts.test.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import test from 'ava';

import { runLexerTests } from './utils';

const getExpectedOutput = (expected: unknown[]): unknown[] => {
Expand Down Expand Up @@ -223,4 +225,4 @@ const tests = [
},
];

runLexerTests(tests);
runLexerTests(test, tests);
4 changes: 3 additions & 1 deletion src/__tests__/lexer/commodity_directives.test.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import test from 'ava';

import { runLexerTests } from './utils';

const tests = [
Expand Down Expand Up @@ -122,4 +124,4 @@ const tests = [
}
];

runLexerTests(tests);
runLexerTests(test, tests);
4 changes: 3 additions & 1 deletion src/__tests__/lexer/full_line_comments.test.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import test from 'ava';

import { runLexerTests } from './utils';

const tests = [
Expand All @@ -23,4 +25,4 @@ const tests = [
}
];

runLexerTests(tests);
runLexerTests(test, tests);
4 changes: 3 additions & 1 deletion src/__tests__/lexer/price_directives.test.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import test from 'ava';

import { runLexerTests } from './utils';

const tests = [
Expand Down Expand Up @@ -136,4 +138,4 @@ const tests = [
}
];

runLexerTests(tests);
runLexerTests(test, tests);
4 changes: 3 additions & 1 deletion src/__tests__/lexer/start_of_line_directives.test.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import test from 'ava';

import { runLexerTests } from './utils';

const tests = [
Expand All @@ -9,4 +11,4 @@ const tests = [
// TODO: Write tests for each start of line directive.
];

runLexerTests(tests);
runLexerTests(test, tests);
4 changes: 3 additions & 1 deletion src/__tests__/lexer/tags.test.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import test from 'ava';

import { runLexerTests } from './utils';

const tests = [
Expand Down Expand Up @@ -248,4 +250,4 @@ const tests = [
}
];

runLexerTests(tests);
runLexerTests(test, tests);
4 changes: 3 additions & 1 deletion src/__tests__/lexer/transactions.test.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
import test from 'ava';

import { runLexerTests } from './utils';

const tests = [
Expand Down Expand Up @@ -129,6 +131,6 @@ const tests = [
}
];

runLexerTests(tests);
runLexerTests(test, tests);

// TODO: Add tests with more date patterns: https://hledger.org/1.30/hledger.html#smart-dates
8 changes: 4 additions & 4 deletions src/__tests__/lexer/utils.ts
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
import test from 'ava';

import HLedgerLexer from '../../lib/lexer';
import * as utils from '../utils';

import type { TestInterface } from 'ava';

export interface LexerTest {
pattern: string;
expected: unknown[];
Expand All @@ -13,9 +13,9 @@ function tokenize(pattern: string) {
return utils.simplifyLexResult(HLedgerLexer.tokenize(pattern));
}

export function runLexerTests(tests: LexerTest[]) {
export function runLexerTests(avaTest: TestInterface, tests: LexerTest[]) {
for (const { pattern, expected, title } of tests) {
test(title, (t) => {
avaTest(title, (t) => {
const result = tokenize(pattern);

t.deepEqual(result, expected, pattern);
Expand Down
2 changes: 1 addition & 1 deletion src/__tests__/utils.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { ExecutionContext } from 'ava';
import {
createTokenInstance,
CstElement,
Expand All @@ -13,6 +12,7 @@ import { notEmpty } from '../lib/type_utils';
import CstToRawVisitor from '../lib/visitors/cst_to_raw';
import * as Raw from '../lib/visitors/raw_types';

import type { ExecutionContext } from 'ava';
import type { CstNode } from 'chevrotain';

export class MockLexer {
Expand Down

0 comments on commit d415f2f

Please sign in to comment.