Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 5 additions & 0 deletions src/Lexer.js
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,11 @@ export class Lexer {
// newline
if (token = this.tokenizer.space(src)) {
src = src.substring(token.raw.length);
if (token.raw.length === 1 && tokens[tokens.length - 1]) {
// if there's a single \n as a spacer, it's terminating the last line, so move it there so that we don't get unecessary paragraph tags
tokens[tokens.length - 1].raw = `${tokens[tokens.length - 1].raw}\n`;
continue;
}
if (token.type) {
tokens.push(token);
}
Expand Down
21 changes: 19 additions & 2 deletions src/Tokenizer.js
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ export class Tokenizer {
space(src) {
const cap = this.rules.block.newline.exec(src);
if (cap) {
if (cap[0].length > 1) {
if (cap[0].length) {
return {
type: 'space',
raw: cap[0]
Expand Down Expand Up @@ -303,7 +303,24 @@ export class Tokenizer {
for (i = 0; i < l; i++) {
this.lexer.state.top = false;
list.items[i].tokens = this.lexer.blockTokens(list.items[i].text, []);
if (!list.loose && list.items[i].tokens.some(t => t.type === 'space')) {
const spacers = list.items[i].tokens.filter(t => t.type === 'space');
const hasMultipleLineBreaks = spacers.every(t => {
const chars = t.raw.split('');
let lineBreaks = 0;
for (const char of chars) {
if (char === '\n') {
lineBreaks += 1;
}
if (lineBreaks > 1) {
return true;
}
}

return false;
});

if (!list.loose && spacers.length && hasMultipleLineBreaks) {
// Having a single line break doesn't mean a list is loose. A single line break is terminating the last list item
list.loose = true;
list.items[i].loose = true;
}
Expand Down
94 changes: 62 additions & 32 deletions test/unit/Lexer-spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,7 @@ describe('Lexer', () => {
describe('headings', () => {
it('depth', () => {
expectTokens({
md: `
# heading 1
md: `# heading 1

## heading 2

Expand Down Expand Up @@ -169,8 +168,7 @@ lheading 2
describe('table', () => {
it('pipe table', () => {
expectTokens({
md: `
| a | b |
md: `| a | b |
|---|---|
| 1 | 2 |
`,
Expand Down Expand Up @@ -206,16 +204,15 @@ lheading 2

it('table after para', () => {
expectTokens({
md: `
paragraph 1
md: `paragraph 1
| a | b |
|---|---|
| 1 | 2 |
`,
tokens: [
{
type: 'paragraph',
raw: 'paragraph 1',
raw: 'paragraph 1\n',
text: 'paragraph 1',
tokens: [{ type: 'text', raw: 'paragraph 1', text: 'paragraph 1' }]
},
Expand Down Expand Up @@ -252,8 +249,7 @@ paragraph 1

it('align table', () => {
expectTokens({
md: `
| a | b | c |
md: `| a | b | c |
|:--|:-:|--:|
| 1 | 2 | 3 |
`,
Expand Down Expand Up @@ -297,8 +293,7 @@ paragraph 1

it('no pipe table', () => {
expectTokens({
md: `
a | b
md: `a | b
--|--
1 | 2
`,
Expand Down Expand Up @@ -342,6 +337,19 @@ a | b
]
});
});

it('after line break does not consume raw \n', () => {
expectTokens({
md: 'T\nh\n---',
tokens:
jasmine.arrayContaining([
jasmine.objectContaining({
raw: 'T\nh\n'
}),
{ type: 'hr', raw: '---' }
])
});
});
});

describe('blockquote', () => {
Expand Down Expand Up @@ -370,14 +378,13 @@ a | b
describe('list', () => {
it('unordered', () => {
expectTokens({
md: `
- item 1
md: `- item 1
- item 2
`,
tokens: [
{
type: 'list',
raw: '- item 1\n- item 2',
raw: '- item 1\n- item 2\n',
ordered: false,
start: '',
loose: false,
Expand Down Expand Up @@ -418,14 +425,13 @@ a | b

it('ordered', () => {
expectTokens({
md: `
1. item 1
md: `1. item 1
2. item 2
`,
tokens: jasmine.arrayContaining([
jasmine.objectContaining({
type: 'list',
raw: '1. item 1\n2. item 2',
raw: '1. item 1\n2. item 2\n',
ordered: true,
start: 1,
items: [
Expand All @@ -443,14 +449,13 @@ a | b

it('ordered with parenthesis', () => {
expectTokens({
md: `
1) item 1
md: `1) item 1
2) item 2
`,
tokens: jasmine.arrayContaining([
jasmine.objectContaining({
type: 'list',
raw: '1) item 1\n2) item 2',
raw: '1) item 1\n2) item 2\n',
ordered: true,
start: 1,
items: [
Expand All @@ -468,8 +473,7 @@ a | b

it('space after list', () => {
expectTokens({
md: `
- item 1
md: `- item 1
- item 2

paragraph
Expand Down Expand Up @@ -515,7 +519,7 @@ paragraph
{ type: 'space', raw: '\n\n' },
{
type: 'paragraph',
raw: 'paragraph',
raw: 'paragraph\n',
text: 'paragraph',
tokens: [{
type: 'text',
Expand All @@ -529,14 +533,13 @@ paragraph

it('start', () => {
expectTokens({
md: `
2. item 1
md: `2. item 1
3. item 2
`,
tokens: jasmine.arrayContaining([
jasmine.objectContaining({
type: 'list',
raw: '2. item 1\n3. item 2',
raw: '2. item 1\n3. item 2\n',
ordered: true,
start: 2,
items: [
Expand All @@ -554,15 +557,14 @@ paragraph

it('loose', () => {
expectTokens({
md: `
- item 1
md: `- item 1

- item 2
`,
tokens: jasmine.arrayContaining([
jasmine.objectContaining({
type: 'list',
raw: '- item 1\n\n- item 2',
raw: '- item 1\n\n- item 2\n',
loose: true,
items: [
jasmine.objectContaining({
Expand All @@ -577,16 +579,44 @@ paragraph
});
});

it('not loose with spaces', () => {
expectTokens({
md: `- item 1
- item 2
`,
tokens: jasmine.arrayContaining([
jasmine.objectContaining({
type: 'list',
raw: '- item 1\n - item 2\n',
loose: false,
items: [
jasmine.objectContaining({
raw: '- item 1\n - item 2',
tokens: jasmine.arrayContaining([
jasmine.objectContaining({
raw: 'item 1\n'
}),
jasmine.objectContaining({
type: 'list',
raw: '- item 2'
})
])
})
]
})
])
});
});

it('task', () => {
expectTokens({
md: `
- [ ] item 1
md: `- [ ] item 1
- [x] item 2
`,
tokens: jasmine.arrayContaining([
jasmine.objectContaining({
type: 'list',
raw: '- [ ] item 1\n- [x] item 2',
raw: '- [ ] item 1\n- [x] item 2\n',
items: [
jasmine.objectContaining({
raw: '- [ ] item 1\n',
Expand Down
1 change: 1 addition & 0 deletions test/unit/marked-spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -994,6 +994,7 @@ br
});

expect(tokensSeen).toEqual([
['space', ''],
['paragraph', 'paragraph'],
['text', 'paragraph'],
['space', ''],
Expand Down