diff --git a/src/Tokenizer.js b/src/Tokenizer.js index 282ad57068..79f8611be2 100644 --- a/src/Tokenizer.js +++ b/src/Tokenizer.js @@ -285,13 +285,16 @@ module.exports = class Tokenizer { // trim item newlines at end item = rtrim(item, '\n'); + if (i !== l - 1) { + raw = raw + '\n'; + } // Determine whether item is loose or not. // Use: /(^|\n)(?! )[^\n]+\n\n(?!\s*$)/ // for discount behavior. loose = next || /\n\n(?!\s*$)/.test(raw); if (i !== l - 1) { - next = raw.charAt(raw.length - 1) === '\n'; + next = raw.slice(-2) === '\n\n'; if (!loose) loose = next; } diff --git a/test/unit/Lexer-spec.js b/test/unit/Lexer-spec.js index 99aeb08d3d..f8361ace77 100644 --- a/test/unit/Lexer-spec.js +++ b/test/unit/Lexer-spec.js @@ -308,7 +308,7 @@ a | b items: [ { type: 'list_item', - raw: '- item 1', + raw: '- item 1\n', task: false, checked: undefined, loose: false, @@ -354,7 +354,7 @@ a | b start: 1, items: [ jasmine.objectContaining({ - raw: '1. item 1' + raw: '1. item 1\n' }), jasmine.objectContaining({ raw: '2. item 2\n' @@ -379,7 +379,7 @@ a | b start: 1, items: [ jasmine.objectContaining({ - raw: '1) item 1' + raw: '1) item 1\n' }), jasmine.objectContaining({ raw: '2) item 2\n' @@ -408,7 +408,7 @@ paragraph items: [ { type: 'list_item', - raw: '- item 1', + raw: '- item 1\n', task: false, checked: undefined, loose: false, @@ -464,7 +464,7 @@ paragraph start: 2, items: [ jasmine.objectContaining({ - raw: '2. item 1' + raw: '2. item 1\n' }), jasmine.objectContaining({ raw: '3. item 2\n' @@ -486,7 +486,15 @@ paragraph jasmine.objectContaining({ type: 'list', raw: '- item 1\n\n- item 2\n', - loose: true + loose: true, + items: [ + jasmine.objectContaining({ + raw: '- item 1\n\n' + }), + jasmine.objectContaining({ + raw: '- item 2\n' + }) + ] }) ]) }); @@ -504,7 +512,7 @@ paragraph raw: '- [ ] item 1\n- [x] item 2\n', items: [ jasmine.objectContaining({ - raw: '- [ ] item 1', + raw: '- [ ] item 1\n', task: true, checked: false }),