|
123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227 |
- import descriptionTokenizer from '../../src/parser/tokenizers/description';
- import { seedSpec, seedTokens } from '../../src/util';
-
- const sourceSingle = [
- {
- number: 1,
- source: '...',
- tokens: seedTokens({ description: ' one two ' }),
- },
- ];
-
- const sourceMultiple = [
- {
- number: 1,
- source: '...',
- tokens: seedTokens({ description: 'one two ' }),
- },
- {
- number: 2,
- source: '...',
- tokens: seedTokens({ description: '' }),
- },
- {
- number: 3,
- source: '...',
- tokens: seedTokens({ description: ' three four' }),
- },
- {
- number: 4,
- source: '...',
- tokens: seedTokens({ description: '' }),
- },
- ];
-
- test('compact - single line', () => {
- const tokenize = descriptionTokenizer('compact');
- const input = seedSpec({ source: sourceSingle });
- const output = seedSpec({ source: sourceSingle, description: 'one two' });
- expect(tokenize(input)).toEqual(output);
- });
-
- test('compact - multiple lines', () => {
- const tokenize = descriptionTokenizer('compact');
- const input = seedSpec({ source: sourceMultiple });
- const output = seedSpec({
- source: sourceMultiple,
- description: 'one two three four',
- });
- expect(tokenize(input)).toEqual(output);
- });
-
- test('preserve - multiple lines', () => {
- const tokenize = descriptionTokenizer('preserve');
- const input = seedSpec({ source: sourceMultiple });
- const output = seedSpec({
- source: sourceMultiple,
- description: 'one two \n\n three four\n',
- });
-
- expect(tokenize(input)).toEqual(output);
- });
-
- test('preserve - one-liner', () => {
- const tokenize = descriptionTokenizer('preserve');
- const input = seedSpec({
- source: [
- {
- number: 1,
- source: '...',
- tokens: seedTokens({
- delimiter: '/**',
- postDelimiter: ' ',
- description: 'description',
- end: '*/',
- }),
- },
- ],
- });
- const output = seedSpec({
- description: 'description',
- source: [
- {
- number: 1,
- source: '...',
- tokens: seedTokens({
- delimiter: '/**',
- postDelimiter: ' ',
- description: 'description',
- end: '*/',
- }),
- },
- ],
- });
-
- expect(tokenize(input)).toEqual(output);
- });
-
- test('preserve - leading empty lines', () => {
- const source = [
- {
- number: 1,
- source: '...',
- tokens: seedTokens({ delimiter: '/**' }),
- },
- {
- number: 2,
- source: '...',
- tokens: seedTokens(),
- },
- {
- number: 3,
- source: '...',
- tokens: seedTokens({ description: ' line 1 ' }),
- },
- {
- number: 4,
- source: '...',
- tokens: seedTokens({ description: ' line 2 ' }),
- },
- {
- number: 5,
- source: '...',
- tokens: seedTokens({ description: '' }),
- },
- ];
-
- const tokenize = descriptionTokenizer('preserve');
-
- const input = seedSpec({ source });
- const output = seedSpec({
- source,
- description: '\n line 1 \n line 2 \n',
- });
-
- expect(tokenize(input)).toEqual(output);
- });
-
- test('preserve - leading type lines', () => {
- const source = [
- {
- number: 1,
- source: '...',
- tokens: seedTokens({ delimiter: '/**' }),
- },
- {
- number: 2,
- source: '...',
- tokens: seedTokens(),
- },
- {
- number: 3,
- source: '...',
- tokens: seedTokens({ type: '{function(' }),
- },
- {
- number: 4,
- source: '...',
- tokens: seedTokens({ type: ' number' }),
- },
- {
- number: 5,
- source: '...',
- tokens: seedTokens({
- type: ')}',
- postType: ' ',
- description: 'line 1 ',
- }),
- },
- {
- number: 6,
- source: '...',
- tokens: seedTokens({ description: ' line 2 ' }),
- },
- {
- number: 7,
- source: '...',
- tokens: seedTokens({ description: '' }),
- },
- ];
-
- const tokenize = descriptionTokenizer('preserve');
-
- const input = seedSpec({ source });
- const output = seedSpec({
- source,
- description: 'line 1 \n line 2 \n',
- });
-
- expect(tokenize(input)).toEqual(output);
- });
-
- test('custom joiner - single line', () => {
- const tokenize = descriptionTokenizer((lines) => {
- return lines
- .reduce((str, { tokens: { description } }) => {
- const trimmed = description.trim();
- if (!trimmed) {
- return str;
- }
- return str + ' ' + trimmed;
- }, '')
- .slice(1);
- });
- const input = seedSpec({ source: sourceSingle });
- const output = seedSpec({ source: sourceSingle, description: 'one two' });
- expect(tokenize(input)).toEqual(output);
- });
-
- test('custom joiner - multiple lines', () => {
- const tokenize = descriptionTokenizer((lines) => {
- return lines
- .reduce((str, { tokens: { description } }) => {
- const trimmed = description.trim();
- if (!trimmed) {
- return str;
- }
- return str + ' ' + trimmed;
- }, '')
- .slice(1);
- });
- const input = seedSpec({ source: sourceMultiple });
- const output = seedSpec({
- source: sourceMultiple,
- description: 'one two three four',
- });
- expect(tokenize(input)).toEqual(output);
- });
|