Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Next Next commit
Fix lexer and tokenizer to retain line breaks properly
  • Loading branch information
phillipb committed Dec 31, 2021
commit 7183372f1acc14298a8e55e3f40c9412d1ac4766
5 changes: 5 additions & 0 deletions src/Lexer.js
Original file line number Diff line number Diff line change
Expand Up @@ -152,6 +152,11 @@ export class Lexer {
// newline
if (token = this.tokenizer.space(src)) {
src = src.substring(token.raw.length);
if (token.raw.length === 1 && tokens[tokens.length - 1]) {
// if there's a single \n as a spacer, it's terminating the last line, so move it there so that we don't get unecessary paragraph tags
tokens[tokens.length - 1].raw = `${tokens[tokens.length - 1].raw}\n`
continue
}
if (token.type) {
tokens.push(token);
}
Expand Down
7 changes: 5 additions & 2 deletions src/Tokenizer.js
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ export class Tokenizer {
space(src) {
const cap = this.rules.block.newline.exec(src);
if (cap) {
if (cap[0].length > 1) {
if (cap[0].length) {
return {
type: 'space',
raw: cap[0]
Expand Down Expand Up @@ -303,7 +303,10 @@ export class Tokenizer {
for (i = 0; i < l; i++) {
this.lexer.state.top = false;
list.items[i].tokens = this.lexer.blockTokens(list.items[i].text, []);
if (!list.loose && list.items[i].tokens.some(t => t.type === 'space')) {
const spacers = list.items[i].tokens.filter(t => t.type === 'space');
const hasMultipleSpaces = spacers.every(t => t.raw.length > 1);
if (!list.loose && spacers.length && hasMultipleSpaces) {
// Having a single line break doesn't mean a list is loose. A single line break is terminating the last list item
list.loose = true;
list.items[i].loose = true;
}
Expand Down
52 changes: 20 additions & 32 deletions test/unit/Lexer-spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,7 @@ describe('Lexer', () => {
describe('headings', () => {
it('depth', () => {
expectTokens({
md: `
# heading 1
md: `# heading 1

## heading 2

Expand Down Expand Up @@ -169,8 +168,7 @@ lheading 2
describe('table', () => {
it('pipe table', () => {
expectTokens({
md: `
| a | b |
md: `| a | b |
|---|---|
| 1 | 2 |
`,
Expand Down Expand Up @@ -206,16 +204,15 @@ lheading 2

it('table after para', () => {
expectTokens({
md: `
paragraph 1
md: `paragraph 1
| a | b |
|---|---|
| 1 | 2 |
`,
tokens: [
{
type: 'paragraph',
raw: 'paragraph 1',
raw: 'paragraph 1\n',
text: 'paragraph 1',
tokens: [{ type: 'text', raw: 'paragraph 1', text: 'paragraph 1' }]
},
Expand Down Expand Up @@ -252,8 +249,7 @@ paragraph 1

it('align table', () => {
expectTokens({
md: `
| a | b | c |
md: `| a | b | c |
|:--|:-:|--:|
| 1 | 2 | 3 |
`,
Expand Down Expand Up @@ -297,8 +293,7 @@ paragraph 1

it('no pipe table', () => {
expectTokens({
md: `
a | b
md: `a | b
--|--
1 | 2
`,
Expand Down Expand Up @@ -370,14 +365,13 @@ a | b
describe('list', () => {
it('unordered', () => {
expectTokens({
md: `
- item 1
md: `- item 1
- item 2
`,
tokens: [
{
type: 'list',
raw: '- item 1\n- item 2',
raw: '- item 1\n- item 2\n',
ordered: false,
start: '',
loose: false,
Expand Down Expand Up @@ -418,14 +412,13 @@ a | b

it('ordered', () => {
expectTokens({
md: `
1. item 1
md: `1. item 1
2. item 2
`,
tokens: jasmine.arrayContaining([
jasmine.objectContaining({
type: 'list',
raw: '1. item 1\n2. item 2',
raw: '1. item 1\n2. item 2\n',
ordered: true,
start: 1,
items: [
Expand All @@ -443,14 +436,13 @@ a | b

it('ordered with parenthesis', () => {
expectTokens({
md: `
1) item 1
md: `1) item 1
2) item 2
`,
tokens: jasmine.arrayContaining([
jasmine.objectContaining({
type: 'list',
raw: '1) item 1\n2) item 2',
raw: '1) item 1\n2) item 2\n',
ordered: true,
start: 1,
items: [
Expand All @@ -468,8 +460,7 @@ a | b

it('space after list', () => {
expectTokens({
md: `
- item 1
md: `- item 1
- item 2

paragraph
Expand Down Expand Up @@ -515,7 +506,7 @@ paragraph
{ type: 'space', raw: '\n\n' },
{
type: 'paragraph',
raw: 'paragraph',
raw: 'paragraph\n',
text: 'paragraph',
tokens: [{
type: 'text',
Expand All @@ -529,14 +520,13 @@ paragraph

it('start', () => {
expectTokens({
md: `
2. item 1
md: `2. item 1
3. item 2
`,
tokens: jasmine.arrayContaining([
jasmine.objectContaining({
type: 'list',
raw: '2. item 1\n3. item 2',
raw: '2. item 1\n3. item 2\n',
ordered: true,
start: 2,
items: [
Expand All @@ -554,15 +544,14 @@ paragraph

it('loose', () => {
expectTokens({
md: `
- item 1
md: `- item 1

- item 2
`,
tokens: jasmine.arrayContaining([
jasmine.objectContaining({
type: 'list',
raw: '- item 1\n\n- item 2',
raw: '- item 1\n\n- item 2\n',
loose: true,
items: [
jasmine.objectContaining({
Expand All @@ -579,14 +568,13 @@ paragraph

it('task', () => {
expectTokens({
md: `
- [ ] item 1
md: `- [ ] item 1
- [x] item 2
`,
tokens: jasmine.arrayContaining([
jasmine.objectContaining({
type: 'list',
raw: '- [ ] item 1\n- [x] item 2',
raw: '- [ ] item 1\n- [x] item 2\n',
items: [
jasmine.objectContaining({
raw: '- [ ] item 1\n',
Expand Down
1 change: 1 addition & 0 deletions test/unit/marked-spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -994,6 +994,7 @@ br
});

expect(tokensSeen).toEqual([
['space', ''],
['paragraph', 'paragraph'],
['text', 'paragraph'],
['space', ''],
Expand Down