fix(parser): fix utils
This commit is contained in:
parent
04b51f03cb
commit
0f835a90fc
|
@ -1,18 +1,25 @@
|
||||||
import type Token from 'markdown-it/lib/token'
|
import type Token from 'markdown-it/lib/token'
|
||||||
import MarkdownIt from 'markdown-it'
|
import MarkdownIt from 'markdown-it'
|
||||||
import type { ISectionOptions, IStylesOptions, Paragraph, Table, TableOfContents } from 'docx'
|
import type { ISectionOptions, IStylesOptions, Paragraph, Table, TableOfContents } from 'docx'
|
||||||
import { Document } from 'docx'
|
import { Document, Packer } from 'docx'
|
||||||
|
import type { IMarkdownReportConfig } from '@md-report/types'
|
||||||
import { sliceParagraph, sliceSection } from './utils'
|
import { sliceParagraph, sliceSection } from './utils'
|
||||||
import { paragraphParser } from './paragraph'
|
import { paragraphParser } from './paragraph'
|
||||||
|
|
||||||
const md = new MarkdownIt()
|
const md = new MarkdownIt()
|
||||||
|
|
||||||
export function parse(props: { markdown: string; config: { meta: Record<string, any>; styles: IStylesOptions } }): Document {
|
export async function getBuffer(props: { markdown: string; config: IMarkdownReportConfig }): Promise<Buffer> {
|
||||||
|
const document = parse(props)
|
||||||
|
const buffer = await Packer.toBuffer(document)
|
||||||
|
return buffer
|
||||||
|
}
|
||||||
|
|
||||||
|
export function parse(props: { markdown: string; config: IMarkdownReportConfig }): Document {
|
||||||
const { markdown, config } = props
|
const { markdown, config } = props
|
||||||
const { meta, styles } = config
|
const { styles } = config
|
||||||
// Get frontmatter.
|
// Get frontmatter.
|
||||||
// Get tokens.
|
// Get tokens.
|
||||||
const tokens: Token[] = md.parse(markdown, meta)
|
const tokens: Token[] = md.parse(markdown, {})
|
||||||
return parseDocument(tokens, styles)
|
return parseDocument(tokens, styles)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -22,9 +29,9 @@ export function parseDocument(tokens: Token[], styles: IStylesOptions): Document
|
||||||
const sections: ISectionOptions[] = []
|
const sections: ISectionOptions[] = []
|
||||||
// Split and parse sections.
|
// Split and parse sections.
|
||||||
while (pos < tokens.length) {
|
while (pos < tokens.length) {
|
||||||
const { tokens: section, offset: nextPos } = sliceSection(tokens.slice(pos))
|
const { tokens: section, offset } = sliceSection(tokens.slice(pos))
|
||||||
sections.push(parseSection(section))
|
sections.push(parseSection(section))
|
||||||
pos = nextPos
|
pos += offset
|
||||||
}
|
}
|
||||||
return new Document({
|
return new Document({
|
||||||
styles,
|
styles,
|
||||||
|
@ -38,10 +45,10 @@ export function parseSection(tokens: Token[]): ISectionOptions {
|
||||||
const children: (Paragraph | Table | TableOfContents)[] = []
|
const children: (Paragraph | Table | TableOfContents)[] = []
|
||||||
// Split and parse paragraphs.
|
// Split and parse paragraphs.
|
||||||
while (pos < tokens.length) {
|
while (pos < tokens.length) {
|
||||||
const { tokens: paragraph, offset: nextPos } = sliceParagraph(tokens.slice(pos))
|
const { tokens: paragraph, offset } = sliceParagraph(tokens.slice(pos))
|
||||||
const parser = paragraphParser[tokens[0].tag]
|
const parser = paragraphParser[paragraph[0].tag]
|
||||||
children.push(parser(paragraph))
|
children.push(parser(paragraph))
|
||||||
pos = nextPos
|
pos += offset
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
children,
|
children,
|
||||||
|
|
|
@ -2,11 +2,12 @@ import { readFileSync } from 'fs'
|
||||||
import type { IImageOptions, IRunOptions, ParagraphChild } from 'docx'
|
import type { IImageOptions, IRunOptions, ParagraphChild } from 'docx'
|
||||||
import { ImageRun, Paragraph, TextRun } from 'docx'
|
import { ImageRun, Paragraph, TextRun } from 'docx'
|
||||||
import type Token from 'markdown-it/lib/token'
|
import type Token from 'markdown-it/lib/token'
|
||||||
|
import { StyleId } from '@md-report/types'
|
||||||
import { sliceInlineText } from './utils'
|
import { sliceInlineText } from './utils'
|
||||||
|
|
||||||
export function parseInline(props: { tokens: Token[]; style?: string }): Paragraph {
|
export function parseInline(props: { tokens: Token[]; style?: StyleId }): Paragraph {
|
||||||
// Variables.
|
// Variables.
|
||||||
const { tokens, style = 'normal' } = props
|
const { tokens, style = StyleId.normal } = props
|
||||||
const { children: childrenTokens } = tokens[0]
|
const { children: childrenTokens } = tokens[0]
|
||||||
const { length } = childrenTokens || []
|
const { length } = childrenTokens || []
|
||||||
const children: ParagraphChild[] = []
|
const children: ParagraphChild[] = []
|
||||||
|
@ -14,7 +15,7 @@ export function parseInline(props: { tokens: Token[]; style?: string }): Paragra
|
||||||
// Parse inline children.
|
// Parse inline children.
|
||||||
while (pos < length) {
|
while (pos < length) {
|
||||||
const { tokens: paragraphChild, offset: nextPos } = sliceInlineText(tokens.slice(pos))
|
const { tokens: paragraphChild, offset: nextPos } = sliceInlineText(tokens.slice(pos))
|
||||||
if (tokens[0].tag === 'img')
|
if (paragraphChild[0].tag === 'img')
|
||||||
children.push(parseImage(paragraphChild))
|
children.push(parseImage(paragraphChild))
|
||||||
else
|
else
|
||||||
children.push(parseText(paragraphChild))
|
children.push(parseText(paragraphChild))
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
import type Token from 'markdown-it/lib/token'
|
import type Token from 'markdown-it/lib/token'
|
||||||
import { Paragraph, Table, TableCell, TableRow } from 'docx'
|
import { Paragraph, Table, TableCell, TableRow } from 'docx'
|
||||||
|
import { StyleId } from '@md-report/types'
|
||||||
import { sliceTableRow } from './utils'
|
import { sliceTableRow } from './utils'
|
||||||
import { parseInline } from './inline'
|
import { parseInline } from './inline'
|
||||||
|
|
||||||
|
@ -17,9 +18,9 @@ export function parseTable(tokens: Token[]): Table {
|
||||||
let pos = 0
|
let pos = 0
|
||||||
const rows: TableRow[] = []
|
const rows: TableRow[] = []
|
||||||
while (pos < tokens.length) {
|
while (pos < tokens.length) {
|
||||||
const { tokens: tableRow, offset: nextPos } = sliceTableRow(tokens.slice(pos))
|
const { tokens: tableRow, offset } = sliceTableRow(tokens.slice(pos))
|
||||||
rows.push(parseTableRow(tableRow))
|
rows.push(parseTableRow(tableRow))
|
||||||
pos = nextPos
|
pos += offset
|
||||||
}
|
}
|
||||||
return new Table({
|
return new Table({
|
||||||
style: 'table',
|
style: 'table',
|
||||||
|
@ -32,7 +33,7 @@ export function parseTableRow(tokens: Token[]): TableRow {
|
||||||
const children: TableCell[] = cells.map(cell => new TableCell({
|
const children: TableCell[] = cells.map(cell => new TableCell({
|
||||||
children: [parseInline({
|
children: [parseInline({
|
||||||
tokens: [cell],
|
tokens: [cell],
|
||||||
style: 'table',
|
style: StyleId.table,
|
||||||
})],
|
})],
|
||||||
}))
|
}))
|
||||||
return new TableRow({
|
return new TableRow({
|
||||||
|
@ -44,7 +45,7 @@ export function parseParagraph(tokens: Token[]): Paragraph {
|
||||||
const inline = tokens.filter(token => token.type === 'inline')
|
const inline = tokens.filter(token => token.type === 'inline')
|
||||||
return parseInline({
|
return parseInline({
|
||||||
tokens: inline,
|
tokens: inline,
|
||||||
style: 'normal',
|
style: StyleId.normal,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -55,7 +56,7 @@ export function parseHeading(tokens: Token[]): Paragraph {
|
||||||
const { length } = tokens[0].markup
|
const { length } = tokens[0].markup
|
||||||
return parseInline({
|
return parseInline({
|
||||||
tokens: inline,
|
tokens: inline,
|
||||||
style: `heading${length}`,
|
style: StyleId[`h${length}` as keyof typeof StyleId],
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -6,24 +6,27 @@ export interface SliceResult {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function sliceSection(tokens: Token[]): SliceResult {
|
export function sliceSection(tokens: Token[]): SliceResult {
|
||||||
let offset = 0
|
let offset = 1
|
||||||
if (tokens[0].tag === 'h1') {
|
while (offset < tokens.length) {
|
||||||
while (tokens[offset].nesting >= 0 || tokens[offset].tag !== 'h1')
|
if (tokens[offset].tag === 'h1' && tokens[offset].nesting === 1)
|
||||||
offset++
|
break
|
||||||
|
offset++
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
tokens: tokens.slice(0, offset + 1),
|
tokens: tokens.slice(0, offset),
|
||||||
offset: offset + 1,
|
offset,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export function sliceParagraph(tokens: Token[]): SliceResult {
|
export function sliceParagraph(tokens: Token[]): SliceResult {
|
||||||
let offset = 0
|
let offset = 0
|
||||||
// Code block.
|
if (tokens[0].tag !== 'code') {
|
||||||
if (tokens[0].type !== 'fence') {
|
|
||||||
// Normal paragraphs.
|
// Normal paragraphs.
|
||||||
while (tokens[offset].level > 0 || tokens[offset].nesting >= 0)
|
while (offset < tokens.length) {
|
||||||
|
if (tokens[offset].nesting === -1 && tokens[offset].level === 0)
|
||||||
|
break
|
||||||
offset++
|
offset++
|
||||||
|
}
|
||||||
}
|
}
|
||||||
// Return paragraph tokens.
|
// Return paragraph tokens.
|
||||||
return {
|
return {
|
||||||
|
@ -34,7 +37,7 @@ export function sliceParagraph(tokens: Token[]): SliceResult {
|
||||||
|
|
||||||
export function sliceTableRow(tokens: Token[]): SliceResult {
|
export function sliceTableRow(tokens: Token[]): SliceResult {
|
||||||
let offset = 0
|
let offset = 0
|
||||||
while (tokens[offset].type !== 'tr_open')
|
while (tokens[offset]?.type !== 'tr_open')
|
||||||
offset++
|
offset++
|
||||||
return {
|
return {
|
||||||
tokens: tokens.slice(0, offset),
|
tokens: tokens.slice(0, offset),
|
||||||
|
|
Loading…
Reference in New Issue