refactor(feishu): type docx and media sdk seams

This commit is contained in:
Ayaan Zaidi
2026-03-27 11:12:54 +05:30
parent 2f979e9be0
commit bd4ecbfe49
5 changed files with 301 additions and 137 deletions

View File

@@ -8,18 +8,43 @@
import type * as Lark from "@larksuiteoapi/node-sdk";
import { cleanBlocksForDescendant } from "./docx-table-ops.js";
import type { FeishuDocxBlock, FeishuDocxBlockChild } from "./docx-types.js";
export const BATCH_SIZE = 1000; // Feishu API limit per request
type Logger = { info?: (msg: string) => void };
type DocxDescendantCreatePayload = NonNullable<
Parameters<Lark.Client["docx"]["documentBlockDescendant"]["create"]>[0]
>;
type DocxDescendantCreateBlock = NonNullable<
NonNullable<DocxDescendantCreatePayload["data"]>["descendants"]
>[number];
function normalizeChildIds(children: string[] | string | undefined): string[] | undefined {
if (Array.isArray(children)) {
return children;
}
return typeof children === "string" ? [children] : undefined;
}
function toDescendantBlock(block: FeishuDocxBlock): DocxDescendantCreateBlock {
const children = normalizeChildIds(block.children);
return {
...block,
...(children ? { children } : {}),
} as DocxDescendantCreateBlock;
}
/**
* Collect all descendant blocks for a given first-level block ID.
* Recursively traverses the block tree to gather all children.
*/
// eslint-disable-next-line @typescript-eslint/no-explicit-any -- SDK block types
function collectDescendants(blockMap: Map<string, any>, rootId: string): any[] {
const result: any[] = [];
function collectDescendants(
blockMap: Map<string, FeishuDocxBlock>,
rootId: string,
): FeishuDocxBlock[] {
const result: FeishuDocxBlock[] = [];
const visited = new Set<string>();
function collect(blockId: string) {
@@ -57,11 +82,11 @@ function collectDescendants(blockMap: Map<string, any>, rootId: string): any[] {
async function insertBatch(
client: Lark.Client,
docToken: string,
blocks: any[],
blocks: FeishuDocxBlock[],
firstLevelBlockIds: string[],
parentBlockId: string = docToken,
index: number = -1,
): Promise<any[]> {
): Promise<FeishuDocxBlockChild[]> {
const descendants = cleanBlocksForDescendant(blocks);
if (descendants.length === 0) {
@@ -72,7 +97,7 @@ async function insertBatch(
path: { document_id: docToken, block_id: parentBlockId },
data: {
children_id: firstLevelBlockIds,
descendants,
descendants: descendants.map(toDescendantBlock),
index,
},
});
@@ -104,26 +129,31 @@ async function insertBatch(
export async function insertBlocksInBatches(
client: Lark.Client,
docToken: string,
blocks: any[],
blocks: FeishuDocxBlock[],
firstLevelBlockIds: string[],
logger?: Logger,
parentBlockId: string = docToken,
startIndex: number = -1,
): Promise<{ children: any[]; skipped: string[] }> {
const allChildren: any[] = [];
): Promise<{ children: FeishuDocxBlockChild[]; skipped: string[] }> {
const allChildren: FeishuDocxBlockChild[] = [];
// Build batches ensuring each batch has ≤1000 total descendants
const batches: { firstLevelIds: string[]; blocks: any[] }[] = [];
let currentBatch: { firstLevelIds: string[]; blocks: any[] } = { firstLevelIds: [], blocks: [] };
const batches: Array<{ firstLevelIds: string[]; blocks: FeishuDocxBlock[] }> = [];
let currentBatch: { firstLevelIds: string[]; blocks: FeishuDocxBlock[] } = {
firstLevelIds: [],
blocks: [],
};
const usedBlockIds = new Set<string>();
const blockMap = new Map<string, any>();
const blockMap = new Map<string, FeishuDocxBlock>();
for (const block of blocks) {
blockMap.set(block.block_id, block);
if (block.block_id) {
blockMap.set(block.block_id, block);
}
}
for (const firstLevelId of firstLevelBlockIds) {
const descendants = collectDescendants(blockMap, firstLevelId);
const newBlocks = descendants.filter((b) => !usedBlockIds.has(b.block_id));
const newBlocks = descendants.filter((b) => b.block_id && !usedBlockIds.has(b.block_id));
// A single block whose subtree exceeds the API limit cannot be split
// (a table or other compound block must be inserted atomically).
@@ -148,7 +178,9 @@ export async function insertBlocksInBatches(
currentBatch.firstLevelIds.push(firstLevelId);
for (const block of newBlocks) {
currentBatch.blocks.push(block);
usedBlockIds.add(block.block_id);
if (block.block_id) {
usedBlockIds.add(block.block_id);
}
}
}