marked.test.js 29 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963
  1. import { Marked, Renderer, lexer, parseInline, getDefaults, walkTokens, defaults, setOptions } from '../../lib/marked.esm.js';
  2. import { timeout } from './utils.js';
  3. import assert from 'node:assert';
  4. import { describe, it, beforeEach, mock } from 'node:test';
  5. describe('marked unit', () => {
  6. let marked;
  7. beforeEach(() => {
  8. marked = new Marked();
  9. });
  10. describe('Test paragraph token type', () => {
  11. it('should use the "paragraph" type on top level', () => {
  12. const md = 'A Paragraph.\n\n> A blockquote\n\n- list item\n';
  13. const tokens = lexer(md);
  14. assert.strictEqual(tokens[0].type, 'paragraph');
  15. assert.strictEqual(tokens[2].tokens[0].type, 'paragraph');
  16. assert.strictEqual(tokens[3].items[0].tokens[0].type, 'text');
  17. });
  18. });
  19. describe('changeDefaults', () => {
  20. it('should change global defaults', async() => {
  21. const { defaults, setOptions } = await import('../../lib/marked.esm.js');
  22. assert.ok(!defaults.test);
  23. setOptions({ test: true });
  24. assert.ok((await import('../../lib/marked.esm.js')).defaults.test);
  25. });
  26. });
  27. describe('inlineLexer', () => {
  28. it('should send html to renderer.html', () => {
  29. const renderer = new Renderer();
  30. mock.method(renderer, 'html');
  31. const md = 'HTML Image: <img alt="MY IMAGE" src="example.png" />';
  32. marked.parse(md, { renderer });
  33. assert.strictEqual(renderer.html.mock.calls[0].arguments[0], '<img alt="MY IMAGE" src="example.png" />');
  34. });
  35. });
  36. describe('task', () => {
  37. it('space after checkbox', () => {
  38. const html = marked.parse('- [ ] item');
  39. assert.strictEqual(html, '<ul>\n<li><input disabled="" type="checkbox"> item</li>\n</ul>\n');
  40. });
  41. it('space after loose checkbox', () => {
  42. const html = marked.parse('- [ ] item 1\n\n- [ ] item 2');
  43. assert.strictEqual(html, '<ul>\n<li><p><input disabled="" type="checkbox"> \nitem 1</p>\n</li>\n<li><p><input disabled="" type="checkbox"> \nitem 2</p>\n</li>\n</ul>\n');
  44. });
  45. });
  46. describe('parseInline', () => {
  47. it('should parse inline tokens', () => {
  48. const md = '**strong** _em_';
  49. const html = parseInline(md);
  50. assert.strictEqual(html, '<strong>strong</strong> <em>em</em>');
  51. });
  52. it('should not parse block tokens', () => {
  53. const md = '# header\n\n_em_';
  54. const html = parseInline(md);
  55. assert.strictEqual(html, '# header\n\n<em>em</em>');
  56. });
  57. });
  58. describe('use extension', () => {
  59. it('should use custom block tokenizer + renderer extensions', () => {
  60. const underline = {
  61. name: 'underline',
  62. level: 'block',
  63. tokenizer(src) {
  64. const rule = /^:([^\n]*)(?:\n|$)/;
  65. const match = rule.exec(src);
  66. if (match) {
  67. return {
  68. type: 'underline',
  69. raw: match[0], // This is the text that you want your token to consume from the source
  70. text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
  71. };
  72. }
  73. },
  74. renderer(token) {
  75. return `<u>${token.text}</u>\n`;
  76. }
  77. };
  78. marked.use({ extensions: [underline] });
  79. let html = marked.parse('Not Underlined\n:Underlined\nNot Underlined');
  80. assert.strictEqual(html, '<p>Not Underlined\n:Underlined\nNot Underlined</p>\n');
  81. html = marked.parse('Not Underlined\n\n:Underlined\n\nNot Underlined');
  82. assert.strictEqual(html, '<p>Not Underlined</p>\n<u>Underlined</u>\n<p>Not Underlined</p>\n');
  83. });
  84. it('should interrupt paragraphs if using "start" property', () => {
  85. const underline = {
  86. extensions: [{
  87. name: 'underline',
  88. level: 'block',
  89. start(src) { return src.indexOf(':'); },
  90. tokenizer(src) {
  91. const rule = /^:([^\n]*):(?:\n|$)/;
  92. const match = rule.exec(src);
  93. if (match) {
  94. return {
  95. type: 'underline',
  96. raw: match[0], // This is the text that you want your token to consume from the source
  97. text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
  98. };
  99. }
  100. },
  101. renderer(token) {
  102. return `<u>${token.text}</u>\n`;
  103. }
  104. }]
  105. };
  106. marked.use(underline);
  107. const html = marked.parse('Not Underlined A\n:Underlined B:\nNot Underlined C\n:Not Underlined D');
  108. assert.strictEqual(html, '<p>Not Underlined A</p>\n<u>Underlined B</u>\n<p>Not Underlined C\n:Not Underlined D</p>\n');
  109. });
  110. it('should use custom inline tokenizer + renderer extensions', () => {
  111. const underline = {
  112. name: 'underline',
  113. level: 'inline',
  114. start(src) { return src.indexOf('='); },
  115. tokenizer(src) {
  116. const rule = /^=([^=]+)=/;
  117. const match = rule.exec(src);
  118. if (match) {
  119. return {
  120. type: 'underline',
  121. raw: match[0], // This is the text that you want your token to consume from the source
  122. text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
  123. };
  124. }
  125. },
  126. renderer(token) {
  127. return `<u>${token.text}</u>`;
  128. }
  129. };
  130. marked.use({ extensions: [underline] });
  131. const html = marked.parse('Not Underlined =Underlined= Not Underlined');
  132. assert.strictEqual(html, '<p>Not Underlined <u>Underlined</u> Not Underlined</p>\n');
  133. });
  134. it('should handle interacting block and inline extensions', () => {
  135. const descriptionlist = {
  136. name: 'descriptionList',
  137. level: 'block',
  138. start(src) {
  139. const match = src.match(/:[^:\n]/);
  140. if (match) {
  141. return match.index;
  142. }
  143. },
  144. tokenizer(src, tokens) {
  145. const rule = /^(?::[^:\n]+:[^:\n]*(?:\n|$))+/;
  146. const match = rule.exec(src);
  147. if (match) {
  148. const token = {
  149. type: 'descriptionList',
  150. raw: match[0], // This is the text that you want your token to consume from the source
  151. text: match[0].trim(), // You can add additional properties to your tokens to pass along to the renderer
  152. tokens: []
  153. };
  154. this.lexer.inlineTokens(token.text, token.tokens);
  155. return token;
  156. }
  157. },
  158. renderer(token) {
  159. return `<dl>${this.parser.parseInline(token.tokens)}\n</dl>`;
  160. }
  161. };
  162. const description = {
  163. name: 'description',
  164. level: 'inline',
  165. start(src) { return src.indexOf(':'); },
  166. tokenizer(src, tokens) {
  167. const rule = /^:([^:\n]+):([^:\n]*)(?:\n|$)/;
  168. const match = rule.exec(src);
  169. if (match) {
  170. const token = {
  171. type: 'description',
  172. raw: match[0],
  173. dt: [],
  174. dd: []
  175. };
  176. this.lexer.inline(match[1].trim(), token.dt);
  177. this.lexer.inline(match[2].trim(), token.dd);
  178. return token;
  179. }
  180. },
  181. renderer(token) {
  182. return `\n<dt>${this.parser.parseInline(token.dt)}</dt><dd>${this.parser.parseInline(token.dd)}</dd>`;
  183. }
  184. };
  185. marked.use({ extensions: [descriptionlist, description] });
  186. const html = marked.parse('A Description List with One Description:\n'
  187. + ': Topic 1 : Description 1\n'
  188. + ': **Topic 2** : *Description 2*');
  189. assert.strictEqual(html, '<p>A Description List with One Description:</p>\n'
  190. + '<dl>'
  191. + '\n<dt>Topic 1</dt><dd>Description 1</dd>'
  192. + '\n<dt><strong>Topic 2</strong></dt><dd><em>Description 2</em></dd>'
  193. + '\n</dl>');
  194. });
  195. it('should allow other options mixed into the extension', () => {
  196. const extension = {
  197. name: 'underline',
  198. level: 'block',
  199. start(src) { return src.indexOf(':'); },
  200. tokenizer(src) {
  201. const rule = /^:([^\n]*):(?:\n|$)/;
  202. const match = rule.exec(src);
  203. if (match) {
  204. return {
  205. type: 'underline',
  206. raw: match[0], // This is the text that you want your token to consume from the source
  207. text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
  208. };
  209. }
  210. },
  211. renderer(token) {
  212. return `<u>${token.text}</u>\n`;
  213. }
  214. };
  215. marked.use({ silent: true, extensions: [extension] });
  216. const html = marked.parse(':test:\ntest\n<div></div>');
  217. assert.strictEqual(html, '<u>test</u>\n<p>test</p>\n<div></div>');
  218. });
  219. it('should handle renderers that return false', () => {
  220. const extension = {
  221. name: 'test',
  222. level: 'block',
  223. tokenizer(src) {
  224. const rule = /^:([^\n]*):(?:\n|$)/;
  225. const match = rule.exec(src);
  226. if (match) {
  227. return {
  228. type: 'test',
  229. raw: match[0], // This is the text that you want your token to consume from the source
  230. text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
  231. };
  232. }
  233. },
  234. renderer(token) {
  235. if (token.text === 'test') {
  236. return 'test';
  237. }
  238. return false;
  239. }
  240. };
  241. const fallbackRenderer = {
  242. name: 'test',
  243. level: 'block',
  244. renderer(token) {
  245. if (token.text === 'Test') {
  246. return 'fallback';
  247. }
  248. return false;
  249. }
  250. };
  251. marked.use({ extensions: [fallbackRenderer, extension] });
  252. const html = marked.parse(':Test:\n\n:test:\n\n:none:');
  253. assert.strictEqual(html, 'fallbacktest');
  254. });
  255. it('should fall back when tokenizers return false', () => {
  256. const extension = {
  257. name: 'test',
  258. level: 'block',
  259. tokenizer(src) {
  260. const rule = /^:([^\n]*):(?:\n|$)/;
  261. const match = rule.exec(src);
  262. if (match) {
  263. return {
  264. type: 'test',
  265. raw: match[0], // This is the text that you want your token to consume from the source
  266. text: match[1].trim() // You can add additional properties to your tokens to pass along to the renderer
  267. };
  268. }
  269. return false;
  270. },
  271. renderer(token) {
  272. return token.text;
  273. }
  274. };
  275. const extension2 = {
  276. name: 'test',
  277. level: 'block',
  278. tokenizer(src) {
  279. const rule = /^:([^\n]*):(?:\n|$)/;
  280. const match = rule.exec(src);
  281. if (match) {
  282. if (match[1].match(/^[A-Z]/)) {
  283. return {
  284. type: 'test',
  285. raw: match[0],
  286. text: match[1].trim().toUpperCase()
  287. };
  288. }
  289. }
  290. return false;
  291. }
  292. };
  293. marked.use({ extensions: [extension, extension2] });
  294. const html = marked.parse(':Test:\n\n:test:');
  295. assert.strictEqual(html, 'TESTtest');
  296. });
  297. it('should override original tokenizer/renderer with same name, but fall back if returns false', () => {
  298. const extension = {
  299. extensions: [{
  300. name: 'heading',
  301. level: 'block',
  302. tokenizer(src) {
  303. return false; // fall back to default `heading` tokenizer
  304. },
  305. renderer(token) {
  306. return '<h' + token.depth + '>' + token.text + ' RENDERER EXTENSION</h' + token.depth + '>\n';
  307. }
  308. },
  309. {
  310. name: 'code',
  311. level: 'block',
  312. tokenizer(src) {
  313. const rule = /^:([^\n]*):(?:\n|$)/;
  314. const match = rule.exec(src);
  315. if (match) {
  316. return {
  317. type: 'code',
  318. raw: match[0],
  319. text: match[1].trim() + ' TOKENIZER EXTENSION'
  320. };
  321. }
  322. },
  323. renderer(token) {
  324. return false; // fall back to default `code` renderer
  325. }
  326. }]
  327. };
  328. marked.use(extension);
  329. const html = marked.parse('# extension1\n:extension2:');
  330. assert.strictEqual(html, '<h1>extension1 RENDERER EXTENSION</h1>\n<pre><code>extension2 TOKENIZER EXTENSION\n</code></pre>\n');
  331. });
  332. it('should walk only specified child tokens', () => {
  333. const walkableDescription = {
  334. extensions: [{
  335. name: 'walkableDescription',
  336. level: 'inline',
  337. start(src) { return src.indexOf(':'); },
  338. tokenizer(src, tokens) {
  339. const rule = /^:([^:\n]+):([^:\n]*)(?:\n|$)/;
  340. const match = rule.exec(src);
  341. if (match) {
  342. const token = {
  343. type: 'walkableDescription',
  344. raw: match[0],
  345. dt: this.lexer.inline(match[1].trim()),
  346. dd: [],
  347. tokens: []
  348. };
  349. this.lexer.inline(match[2].trim(), token.dd);
  350. this.lexer.inline('unwalked', token.tokens);
  351. return token;
  352. }
  353. },
  354. renderer(token) {
  355. return `\n<dt>${this.parser.parseInline(token.dt)} - ${this.parser.parseInline(token.tokens)}</dt><dd>${this.parser.parseInline(token.dd)}</dd>`;
  356. },
  357. childTokens: ['dd', 'dt']
  358. }],
  359. walkTokens(token) {
  360. if (token.type === 'text') {
  361. token.text += ' walked';
  362. }
  363. }
  364. };
  365. marked.use(walkableDescription);
  366. const html = marked.parse(': Topic 1 : Description 1\n'
  367. + ': **Topic 2** : *Description 2*');
  368. assert.strictEqual(html, '<p>\n<dt>Topic 1 walked - unwalked</dt><dd>Description 1 walked</dd>'
  369. + '\n<dt><strong>Topic 2 walked</strong> - unwalked</dt><dd><em>Description 2 walked</em></dd></p>\n');
  370. });
  371. describe('multiple extensions', () => {
  372. function createExtension(name) {
  373. return {
  374. extensions: [{
  375. name: `block-${name}`,
  376. level: 'block',
  377. start(src) { return src.indexOf('::'); },
  378. tokenizer(src, tokens) {
  379. if (src.startsWith(`::${name}\n`)) {
  380. const text = `:${name}`;
  381. const token = {
  382. type: `block-${name}`,
  383. raw: `::${name}\n`,
  384. text,
  385. tokens: []
  386. };
  387. this.lexer.inline(token.text, token.tokens);
  388. return token;
  389. }
  390. },
  391. renderer(token) {
  392. return `<${token.type}>${this.parser.parseInline(token.tokens)}</${token.type}>\n`;
  393. }
  394. }, {
  395. name: `inline-${name}`,
  396. level: 'inline',
  397. start(src) { return src.indexOf(':'); },
  398. tokenizer(src, tokens) {
  399. if (src.startsWith(`:${name}`)) {
  400. return {
  401. type: `inline-${name}`,
  402. raw: `:${name}`,
  403. text: `used ${name}`
  404. };
  405. }
  406. },
  407. renderer(token) {
  408. return token.text;
  409. }
  410. }],
  411. tokenizer: {
  412. heading(src) {
  413. if (src.startsWith(`# ${name}`)) {
  414. const token = {
  415. type: 'heading',
  416. raw: `# ${name}`,
  417. text: `used ${name}`,
  418. depth: 1,
  419. tokens: []
  420. };
  421. this.lexer.inline(token.text, token.tokens);
  422. return token;
  423. }
  424. return false;
  425. }
  426. },
  427. renderer: {
  428. heading(text, depth, raw) {
  429. if (text === name) {
  430. return `<h${depth}>${text}</h${depth}>\n`;
  431. }
  432. return false;
  433. }
  434. },
  435. walkTokens(token) {
  436. if (token.text === `used ${name}`) {
  437. token.text += ' walked';
  438. }
  439. }
  440. };
  441. }
  442. function createFalseExtension(name) {
  443. return {
  444. extensions: [{
  445. name: `block-${name}`,
  446. level: 'block',
  447. start(src) { return src.indexOf('::'); },
  448. tokenizer(src, tokens) {
  449. return false;
  450. },
  451. renderer(token) {
  452. return false;
  453. }
  454. }, {
  455. name: `inline-${name}`,
  456. level: 'inline',
  457. start(src) { return src.indexOf(':'); },
  458. tokenizer(src, tokens) {
  459. return false;
  460. },
  461. renderer(token) {
  462. return false;
  463. }
  464. }]
  465. };
  466. }
  467. function runTest() {
  468. const html = marked.parse(`
  469. ::extension1
  470. ::extension2
  471. :extension1
  472. :extension2
  473. # extension1
  474. # extension2
  475. # no extension
  476. `);
  477. assert.strictEqual(`\n${html}\n`.replace(/\n+/g, '\n'), `
  478. <block-extension1>used extension1 walked</block-extension1>
  479. <block-extension2>used extension2 walked</block-extension2>
  480. <p>used extension1 walked
  481. used extension2 walked</p>
  482. <h1>used extension1 walked</h1>
  483. <h1>used extension2 walked</h1>
  484. <h1>no extension</h1>
  485. `);
  486. }
  487. it('should merge extensions when calling marked.use multiple times', () => {
  488. marked.use(createExtension('extension1'));
  489. marked.use(createExtension('extension2'));
  490. runTest();
  491. });
  492. it('should merge extensions when calling marked.use with multiple extensions', () => {
  493. marked.use(
  494. createExtension('extension1'),
  495. createExtension('extension2')
  496. );
  497. runTest();
  498. });
  499. it('should fall back to any extensions with the same name if the first returns false', () => {
  500. marked.use(
  501. createExtension('extension1'),
  502. createExtension('extension2'),
  503. createFalseExtension('extension1'),
  504. createFalseExtension('extension2')
  505. );
  506. runTest();
  507. });
  508. it('should merge extensions correctly', () => {
  509. marked.use(
  510. {},
  511. { tokenizer: {} },
  512. { renderer: {} },
  513. { walkTokens: () => {} },
  514. { extensions: [] }
  515. );
  516. // should not throw
  517. marked.parse('# test');
  518. });
  519. });
  520. it('should be async if any extension in use args is async', () => {
  521. marked.use(
  522. { async: true },
  523. { async: false }
  524. );
  525. assert.ok(marked.defaults.async);
  526. });
  527. it.only('should be async if any extension in use is async', () => {
  528. marked.use({ async: true });
  529. marked.use({ async: false });
  530. assert.ok(marked.defaults.async);
  531. });
  532. it('should reset async with setOptions', () => {
  533. marked.use({ async: true });
  534. setOptions({ async: false });
  535. assert.ok(!defaults.async);
  536. });
  537. it('should return Promise if async', () => {
  538. assert.ok(marked.parse('test', { async: true }) instanceof Promise);
  539. });
  540. it('should return string if not async', () => {
  541. assert.strictEqual(typeof marked.parse('test', { async: false }), 'string');
  542. });
  543. it('should return Promise if async is set by extension', () => {
  544. marked.use({ async: true });
  545. assert.ok(marked.parse('test', { async: false }) instanceof Promise);
  546. });
  547. it('should allow deleting/editing tokens', () => {
  548. const styleTags = {
  549. extensions: [{
  550. name: 'inlineStyleTag',
  551. level: 'inline',
  552. start(src) {
  553. const match = src.match(/ *{[^\{]/);
  554. if (match) {
  555. return match.index;
  556. }
  557. },
  558. tokenizer(src, tokens) {
  559. const rule = /^ *{([^\{\}\n]+)}$/;
  560. const match = rule.exec(src);
  561. if (match) {
  562. return {
  563. type: 'inlineStyleTag',
  564. raw: match[0], // This is the text that you want your token to consume from the source
  565. text: match[1]
  566. };
  567. }
  568. }
  569. },
  570. {
  571. name: 'styled',
  572. renderer(token) {
  573. token.type = token.originalType;
  574. const text = this.parser.parse([token]);
  575. const openingTag = /(<[^\s<>]+)([^\n<>]*>.*)/s.exec(text);
  576. if (openingTag) {
  577. return `${openingTag[1]} ${token.style}${openingTag[2]}`;
  578. }
  579. return text;
  580. }
  581. }],
  582. walkTokens(token) {
  583. if (token.tokens) {
  584. const finalChildToken = token.tokens[token.tokens.length - 1];
  585. if (finalChildToken && finalChildToken.type === 'inlineStyleTag') {
  586. token.originalType = token.type;
  587. token.type = 'styled';
  588. token.style = `style="color:${finalChildToken.text};"`;
  589. token.tokens.pop();
  590. }
  591. }
  592. }
  593. };
  594. marked.use(styleTags);
  595. const html = marked.parse('This is a *paragraph* with blue text. {blue}\n'
  596. + '# This is a *header* with red text {red}');
  597. assert.strictEqual(html, '<p style="color:blue;">This is a <em>paragraph</em> with blue text.</p>\n'
  598. + '<h1 style="color:red;">This is a <em>header</em> with red text</h1>\n');
  599. });
  600. it('should use renderer', () => {
  601. const extension = {
  602. renderer: {
  603. paragraph(text) {
  604. return 'extension';
  605. }
  606. }
  607. };
  608. mock.method(extension.renderer, 'paragraph');
  609. marked.use(extension);
  610. const html = marked.parse('text');
  611. assert.strictEqual(extension.renderer.paragraph.mock.calls[0].arguments[0], 'text');
  612. assert.strictEqual(html, 'extension');
  613. });
  614. it('should use tokenizer', () => {
  615. const extension = {
  616. tokenizer: {
  617. paragraph(text) {
  618. const token = {
  619. type: 'paragraph',
  620. raw: text,
  621. text: 'extension',
  622. tokens: []
  623. };
  624. this.lexer.inline(token.text, token.tokens);
  625. return token;
  626. }
  627. }
  628. };
  629. mock.method(extension.tokenizer, 'paragraph');
  630. marked.use(extension);
  631. const html = marked.parse('text');
  632. assert.strictEqual(extension.tokenizer.paragraph.mock.calls[0].arguments[0], 'text');
  633. assert.strictEqual(html, '<p>extension</p>\n');
  634. });
  635. it('should use walkTokens', () => {
  636. let walked = 0;
  637. const extension = {
  638. walkTokens(token) {
  639. walked++;
  640. }
  641. };
  642. marked.use(extension);
  643. marked.parse('text');
  644. assert.strictEqual(walked, 2);
  645. });
  646. it('should use options from extension', () => {
  647. const extension = {
  648. breaks: true
  649. };
  650. marked.use(extension);
  651. const html = marked.parse('line1\nline2');
  652. assert.strictEqual(html, '<p>line1<br>line2</p>\n');
  653. });
  654. it('should call all walkTokens in reverse order', () => {
  655. let walkedOnce = 0;
  656. let walkedTwice = 0;
  657. const extension1 = {
  658. walkTokens(token) {
  659. if (token.walkedOnce) {
  660. walkedTwice++;
  661. }
  662. }
  663. };
  664. const extension2 = {
  665. walkTokens(token) {
  666. walkedOnce++;
  667. token.walkedOnce = true;
  668. }
  669. };
  670. marked.use(extension1);
  671. marked.use(extension2);
  672. marked.parse('text');
  673. assert.strictEqual(walkedOnce, 2);
  674. assert.strictEqual(walkedTwice, 2);
  675. });
  676. it('should use last extension function and not override others', () => {
  677. const extension1 = {
  678. renderer: {
  679. paragraph(text) {
  680. return 'extension1 paragraph\n';
  681. },
  682. html(html) {
  683. return 'extension1 html\n';
  684. }
  685. }
  686. };
  687. const extension2 = {
  688. renderer: {
  689. paragraph(text) {
  690. return 'extension2 paragraph\n';
  691. }
  692. }
  693. };
  694. marked.use(extension1);
  695. marked.use(extension2);
  696. const html = marked.parse(`
  697. paragraph
  698. <html />
  699. # heading
  700. `);
  701. assert.strictEqual(html, 'extension2 paragraph\nextension1 html\n<h1>heading</h1>\n');
  702. });
  703. it('should use previous extension when returning false', () => {
  704. const extension1 = {
  705. renderer: {
  706. paragraph(text) {
  707. if (text !== 'original') {
  708. return 'extension1 paragraph\n';
  709. }
  710. return false;
  711. }
  712. }
  713. };
  714. const extension2 = {
  715. renderer: {
  716. paragraph(text) {
  717. if (text !== 'extension1' && text !== 'original') {
  718. return 'extension2 paragraph\n';
  719. }
  720. return false;
  721. }
  722. }
  723. };
  724. marked.use(extension1);
  725. marked.use(extension2);
  726. const html = marked.parse(`
  727. paragraph
  728. extension1
  729. original
  730. `);
  731. assert.strictEqual(html, 'extension2 paragraph\nextension1 paragraph\n<p>original</p>\n');
  732. });
  733. it('should get options with this.options', () => {
  734. const extension = {
  735. renderer: {
  736. heading: () => {
  737. return this && this.options ? 'arrow options\n' : 'arrow no options\n';
  738. },
  739. html: function() {
  740. return this.options ? 'function options\n' : 'function no options\n';
  741. },
  742. paragraph() {
  743. return this.options ? 'shorthand options\n' : 'shorthand no options\n';
  744. }
  745. }
  746. };
  747. marked.use(extension);
  748. const html = marked.parse(`
  749. # heading
  750. <html />
  751. paragraph
  752. `);
  753. assert.strictEqual(html, 'arrow no options\nfunction options\nshorthand options\n');
  754. });
  755. });
  756. describe('walkTokens', () => {
  757. it('should walk over every token', () => {
  758. const markdown = `
  759. paragraph
  760. ---
  761. # heading
  762. \`\`\`
  763. code
  764. \`\`\`
  765. | a | b |
  766. |---|---|
  767. | 1 | 2 |
  768. | 3 | 4 |
  769. > blockquote
  770. - list
  771. <div>html</div>
  772. [link](https://example.com)
  773. ![image](https://example.com/image.jpg)
  774. **strong**
  775. *em*
  776. \`codespan\`
  777. ~~del~~
  778. br
  779. br
  780. `;
  781. const tokens = lexer(markdown, { ...getDefaults(), breaks: true });
  782. const tokensSeen = [];
  783. walkTokens(tokens, (token) => {
  784. tokensSeen.push([token.type, (token.raw || '').replace(/\n/g, '')]);
  785. });
  786. assert.deepEqual(tokensSeen, [
  787. ['space', ''],
  788. ['paragraph', 'paragraph'],
  789. ['text', 'paragraph'],
  790. ['space', ''],
  791. ['hr', '---'],
  792. ['heading', '# heading'],
  793. ['text', 'heading'],
  794. ['code', '```code```'],
  795. ['space', ''],
  796. ['table', '| a | b ||---|---|| 1 | 2 || 3 | 4 |'],
  797. ['text', 'a'],
  798. ['text', 'b'],
  799. ['text', '1'],
  800. ['text', '2'],
  801. ['text', '3'],
  802. ['text', '4'],
  803. ['blockquote', '> blockquote'],
  804. ['paragraph', 'blockquote'],
  805. ['text', 'blockquote'],
  806. ['list', '- list'],
  807. ['list_item', '- list'],
  808. ['text', 'list'],
  809. ['text', 'list'],
  810. ['space', ''],
  811. ['html', '<div>html</div>'],
  812. ['paragraph', '[link](https://example.com)'],
  813. ['link', '[link](https://example.com)'],
  814. ['text', 'link'],
  815. ['space', ''],
  816. ['paragraph', '![image](https://example.com/image.jpg)'],
  817. ['image', '![image](https://example.com/image.jpg)'],
  818. ['space', ''],
  819. ['paragraph', '**strong**'],
  820. ['strong', '**strong**'],
  821. ['text', 'strong'],
  822. ['space', ''],
  823. ['paragraph', '*em*'],
  824. ['em', '*em*'],
  825. ['text', 'em'],
  826. ['space', ''],
  827. ['paragraph', '`codespan`'],
  828. ['codespan', '`codespan`'],
  829. ['space', ''],
  830. ['paragraph', '~~del~~'],
  831. ['del', '~~del~~'],
  832. ['text', 'del'],
  833. ['space', ''],
  834. ['paragraph', 'brbr'],
  835. ['text', 'br'],
  836. ['br', ''],
  837. ['text', 'br']
  838. ]);
  839. });
  840. it('should assign marked to `this`', () => {
  841. marked.use({
  842. walkTokens(token) {
  843. if (token.type === 'em') {
  844. token.text += ' walked';
  845. token.tokens = this.Lexer.lexInline(token.text);
  846. }
  847. }
  848. });
  849. assert.strictEqual(marked.parse('*text*').trim(), '<p><em>text walked</em></p>');
  850. });
  851. it('should wait for async `walkTokens` function', async() => {
  852. marked.use({
  853. async: true,
  854. async walkTokens(token) {
  855. if (token.type === 'em') {
  856. await timeout();
  857. token.text += ' walked';
  858. token.tokens = this.Lexer.lexInline(token.text);
  859. }
  860. }
  861. });
  862. const promise = marked.parse('*text*');
  863. assert.ok(promise instanceof Promise);
  864. const html = await promise;
  865. assert.strictEqual(html.trim(), '<p><em>text walked</em></p>');
  866. });
  867. it('should return promise if async and no walkTokens function', async() => {
  868. marked.use({
  869. async: true
  870. });
  871. const promise = marked.parse('*text*');
  872. assert.ok(promise instanceof Promise);
  873. const html = await promise;
  874. assert.strictEqual(html.trim(), '<p><em>text</em></p>');
  875. });
  876. });
  877. });