fix(parser): fix utils
This commit is contained in:
parent
04b51f03cb
commit
0f835a90fc
|
@ -1,18 +1,25 @@
|
|||
import type Token from 'markdown-it/lib/token'
|
||||
import MarkdownIt from 'markdown-it'
|
||||
import type { ISectionOptions, IStylesOptions, Paragraph, Table, TableOfContents } from 'docx'
|
||||
import { Document } from 'docx'
|
||||
import { Document, Packer } from 'docx'
|
||||
import type { IMarkdownReportConfig } from '@md-report/types'
|
||||
import { sliceParagraph, sliceSection } from './utils'
|
||||
import { paragraphParser } from './paragraph'
|
||||
|
||||
const md = new MarkdownIt()
|
||||
|
||||
export function parse(props: { markdown: string; config: { meta: Record<string, any>; styles: IStylesOptions } }): Document {
|
||||
export async function getBuffer(props: { markdown: string; config: IMarkdownReportConfig }): Promise<Buffer> {
|
||||
const document = parse(props)
|
||||
const buffer = await Packer.toBuffer(document)
|
||||
return buffer
|
||||
}
|
||||
|
||||
export function parse(props: { markdown: string; config: IMarkdownReportConfig }): Document {
|
||||
const { markdown, config } = props
|
||||
const { meta, styles } = config
|
||||
const { styles } = config
|
||||
// Get frontmatter.
|
||||
// Get tokens.
|
||||
const tokens: Token[] = md.parse(markdown, meta)
|
||||
const tokens: Token[] = md.parse(markdown, {})
|
||||
return parseDocument(tokens, styles)
|
||||
}
|
||||
|
||||
|
@ -22,9 +29,9 @@ export function parseDocument(tokens: Token[], styles: IStylesOptions): Document
|
|||
const sections: ISectionOptions[] = []
|
||||
// Split and parse sections.
|
||||
while (pos < tokens.length) {
|
||||
const { tokens: section, offset: nextPos } = sliceSection(tokens.slice(pos))
|
||||
const { tokens: section, offset } = sliceSection(tokens.slice(pos))
|
||||
sections.push(parseSection(section))
|
||||
pos = nextPos
|
||||
pos += offset
|
||||
}
|
||||
return new Document({
|
||||
styles,
|
||||
|
@ -38,10 +45,10 @@ export function parseSection(tokens: Token[]): ISectionOptions {
|
|||
const children: (Paragraph | Table | TableOfContents)[] = []
|
||||
// Split and parse paragraphs.
|
||||
while (pos < tokens.length) {
|
||||
const { tokens: paragraph, offset: nextPos } = sliceParagraph(tokens.slice(pos))
|
||||
const parser = paragraphParser[tokens[0].tag]
|
||||
const { tokens: paragraph, offset } = sliceParagraph(tokens.slice(pos))
|
||||
const parser = paragraphParser[paragraph[0].tag]
|
||||
children.push(parser(paragraph))
|
||||
pos = nextPos
|
||||
pos += offset
|
||||
}
|
||||
return {
|
||||
children,
|
||||
|
|
|
@ -2,11 +2,12 @@ import { readFileSync } from 'fs'
|
|||
import type { IImageOptions, IRunOptions, ParagraphChild } from 'docx'
|
||||
import { ImageRun, Paragraph, TextRun } from 'docx'
|
||||
import type Token from 'markdown-it/lib/token'
|
||||
import { StyleId } from '@md-report/types'
|
||||
import { sliceInlineText } from './utils'
|
||||
|
||||
export function parseInline(props: { tokens: Token[]; style?: string }): Paragraph {
|
||||
export function parseInline(props: { tokens: Token[]; style?: StyleId }): Paragraph {
|
||||
// Variables.
|
||||
const { tokens, style = 'normal' } = props
|
||||
const { tokens, style = StyleId.normal } = props
|
||||
const { children: childrenTokens } = tokens[0]
|
||||
const { length } = childrenTokens || []
|
||||
const children: ParagraphChild[] = []
|
||||
|
@ -14,7 +15,7 @@ export function parseInline(props: { tokens: Token[]; style?: string }): Paragra
|
|||
// Parse inline children.
|
||||
while (pos < length) {
|
||||
const { tokens: paragraphChild, offset: nextPos } = sliceInlineText(tokens.slice(pos))
|
||||
if (tokens[0].tag === 'img')
|
||||
if (paragraphChild[0].tag === 'img')
|
||||
children.push(parseImage(paragraphChild))
|
||||
else
|
||||
children.push(parseText(paragraphChild))
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import type Token from 'markdown-it/lib/token'
|
||||
import { Paragraph, Table, TableCell, TableRow } from 'docx'
|
||||
import { StyleId } from '@md-report/types'
|
||||
import { sliceTableRow } from './utils'
|
||||
import { parseInline } from './inline'
|
||||
|
||||
|
@ -17,9 +18,9 @@ export function parseTable(tokens: Token[]): Table {
|
|||
let pos = 0
|
||||
const rows: TableRow[] = []
|
||||
while (pos < tokens.length) {
|
||||
const { tokens: tableRow, offset: nextPos } = sliceTableRow(tokens.slice(pos))
|
||||
const { tokens: tableRow, offset } = sliceTableRow(tokens.slice(pos))
|
||||
rows.push(parseTableRow(tableRow))
|
||||
pos = nextPos
|
||||
pos += offset
|
||||
}
|
||||
return new Table({
|
||||
style: 'table',
|
||||
|
@ -32,7 +33,7 @@ export function parseTableRow(tokens: Token[]): TableRow {
|
|||
const children: TableCell[] = cells.map(cell => new TableCell({
|
||||
children: [parseInline({
|
||||
tokens: [cell],
|
||||
style: 'table',
|
||||
style: StyleId.table,
|
||||
})],
|
||||
}))
|
||||
return new TableRow({
|
||||
|
@ -44,7 +45,7 @@ export function parseParagraph(tokens: Token[]): Paragraph {
|
|||
const inline = tokens.filter(token => token.type === 'inline')
|
||||
return parseInline({
|
||||
tokens: inline,
|
||||
style: 'normal',
|
||||
style: StyleId.normal,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -55,7 +56,7 @@ export function parseHeading(tokens: Token[]): Paragraph {
|
|||
const { length } = tokens[0].markup
|
||||
return parseInline({
|
||||
tokens: inline,
|
||||
style: `heading${length}`,
|
||||
style: StyleId[`h${length}` as keyof typeof StyleId],
|
||||
})
|
||||
}
|
||||
|
||||
|
|
|
@ -6,24 +6,27 @@ export interface SliceResult {
|
|||
}
|
||||
|
||||
export function sliceSection(tokens: Token[]): SliceResult {
|
||||
let offset = 0
|
||||
if (tokens[0].tag === 'h1') {
|
||||
while (tokens[offset].nesting >= 0 || tokens[offset].tag !== 'h1')
|
||||
offset++
|
||||
let offset = 1
|
||||
while (offset < tokens.length) {
|
||||
if (tokens[offset].tag === 'h1' && tokens[offset].nesting === 1)
|
||||
break
|
||||
offset++
|
||||
}
|
||||
return {
|
||||
tokens: tokens.slice(0, offset + 1),
|
||||
offset: offset + 1,
|
||||
tokens: tokens.slice(0, offset),
|
||||
offset,
|
||||
}
|
||||
}
|
||||
|
||||
export function sliceParagraph(tokens: Token[]): SliceResult {
|
||||
let offset = 0
|
||||
// Code block.
|
||||
if (tokens[0].type !== 'fence') {
|
||||
if (tokens[0].tag !== 'code') {
|
||||
// Normal paragraphs.
|
||||
while (tokens[offset].level > 0 || tokens[offset].nesting >= 0)
|
||||
while (offset < tokens.length) {
|
||||
if (tokens[offset].nesting === -1 && tokens[offset].level === 0)
|
||||
break
|
||||
offset++
|
||||
}
|
||||
}
|
||||
// Return paragraph tokens.
|
||||
return {
|
||||
|
@ -34,7 +37,7 @@ export function sliceParagraph(tokens: Token[]): SliceResult {
|
|||
|
||||
export function sliceTableRow(tokens: Token[]): SliceResult {
|
||||
let offset = 0
|
||||
while (tokens[offset].type !== 'tr_open')
|
||||
while (tokens[offset]?.type !== 'tr_open')
|
||||
offset++
|
||||
return {
|
||||
tokens: tokens.slice(0, offset),
|
||||
|
|
Loading…
Reference in New Issue