'use client';
import * as React from 'react';
import { Plate, usePlateEditor } from 'platejs/react';
import { EditorKit } from '@/components/editor/editor-kit';
import { Editor, EditorContainer } from '@/components/ui/editor';
import { DEMO_VALUES } from './values/demo-values';
export default function Demo({ id }: { id: string }) {
  const editor = usePlateEditor({
    plugins: EditorKit,
    value: DEMO_VALUES[id],
  });
  return (
    <Plate editor={editor}>
      <EditorContainer variant="demo">
        <Editor />
      </EditorContainer>
    </Plate>
  );
}
Features
- Context-aware command menu that adapts to cursor, text selection, and block selection workflows.
 - Streaming Markdown/MDX insertion with table, column, and code block support powered by 
streamInsertChunk. - Insert and chat review modes with undo-safe batching via 
withAIBatchandtf.ai.undo(). - Block selection aware transforms to replace or append entire sections using 
tf.aiChat.replaceSelectionandtf.aiChat.insertBelow. - Direct integration with 
@ai-sdk/reactsoapi.aiChat.submitcan stream responses from Vercel AI SDK helpers. - Suggestion and comment utilities that diff AI edits, accept/reject changes, and map AI feedback back to document ranges.
 
Kit Usage
Installation
The fastest way to add AI functionality is with the AIKit. It ships the configured AIPlugin, AIChatPlugin, Markdown streaming helpers, cursor overlay, and their Plate UI components.
'use client';
 
import { withAIBatch } from '@platejs/ai';
import {
  AIChatPlugin,
  AIPlugin,
  applyAISuggestions,
  streamInsertChunk,
  useChatChunk,
} from '@platejs/ai/react';
import { getPluginType, KEYS, PathApi } from 'platejs';
import { usePluginOption } from 'platejs/react';
 
import { AILoadingBar, AIMenu } from '@/components/ui/ai-menu';
import { AIAnchorElement, AILeaf } from '@/components/ui/ai-node';
 
import { useChat } from '../use-chat';
import { CursorOverlayKit } from './cursor-overlay-kit';
import { MarkdownKit } from './markdown-kit';
 
export const aiChatPlugin = AIChatPlugin.extend({
  options: {
    chatOptions: {
      api: '/api/ai/command',
      body: {},
    },
  },
  render: {
    afterContainer: AILoadingBar,
    afterEditable: AIMenu,
    node: AIAnchorElement,
  },
  shortcuts: { show: { keys: 'mod+j' } },
  useHooks: ({ editor, getOption }) => {
    useChat();
 
    const mode = usePluginOption(AIChatPlugin, 'mode');
    const toolName = usePluginOption(AIChatPlugin, 'toolName');
    useChatChunk({
      onChunk: ({ chunk, isFirst, nodes, text: content }) => {
        if (isFirst && mode == 'insert') {
          editor.tf.withoutSaving(() => {
            editor.tf.insertNodes(
              {
                children: [{ text: '' }],
                type: getPluginType(editor, KEYS.aiChat),
              },
              {
                at: PathApi.next(editor.selection!.focus.path.slice(0, 1)),
              }
            );
          });
          editor.setOption(AIChatPlugin, 'streaming', true);
        }
 
        if (mode === 'insert' && nodes.length > 0) {
          withAIBatch(
            editor,
            () => {
              if (!getOption('streaming')) return;
              editor.tf.withScrolling(() => {
                streamInsertChunk(editor, chunk, {
                  textProps: {
                    [getPluginType(editor, KEYS.ai)]: true,
                  },
                });
              });
            },
            { split: isFirst }
          );
        }
 
        if (toolName === 'edit' && mode === 'chat') {
          withAIBatch(
            editor,
            () => {
              applyAISuggestions(editor, content);
            },
            {
              split: isFirst,
            }
          );
        }
      },
      onFinish: () => {
        editor.setOption(AIChatPlugin, 'streaming', false);
        editor.setOption(AIChatPlugin, '_blockChunks', '');
        editor.setOption(AIChatPlugin, '_blockPath', null);
        editor.setOption(AIChatPlugin, '_mdxName', null);
      },
    });
  },
});
 
export const AIKit = [
  ...CursorOverlayKit,
  ...MarkdownKit,
  AIPlugin.withComponent(AILeaf),
  aiChatPlugin,
];AIMenu: Floating command surface for prompts, tool shortcuts, and chat review.AILoadingBar: Displays streaming status at the editor container.AIAnchorElement: Invisible anchor node used to position the floating menu during streaming.AILeaf: Renders AI-marked text with subtle styling.
Add Kit
import { createPlateEditor } from 'platejs/react';
import { AIKit } from '@/components/editor/plugins/ai-kit';
 
const editor = createPlateEditor({
  plugins: [
    // ...otherPlugins,
    ...AIKit,
  ],
});Add API Route
Expose a streaming command endpoint that proxies your model provider:
import type {
  ChatMessage,
  ToolName,
} from '@/components/editor/use-chat';
import type { NextRequest } from 'next/server';
 
import { createGateway } from '@ai-sdk/gateway';
import {
  type LanguageModel,
  type UIMessageStreamWriter,
  createUIMessageStream,
  createUIMessageStreamResponse,
  generateObject,
  streamObject,
  streamText,
  tool,
} from 'ai';
import { NextResponse } from 'next/server';
import { type SlateEditor, createSlateEditor, nanoid } from 'platejs';
import { z } from 'zod';
 
import { BaseEditorKit } from '@/components/editor/editor-base-kit';
import { markdownJoinerTransform } from '@/lib/markdown-joiner-transform';
 
import {
  getChooseToolPrompt,
  getCommentPrompt,
  getEditPrompt,
  getGeneratePrompt,
} from './prompts';
 
export async function POST(req: NextRequest) {
  const {
    apiKey: key,
    ctx,
    messages: messagesRaw = [],
    model,
  } = await req.json();
 
  const { children, selection, toolName: toolNameParam } = ctx;
 
  const editor = createSlateEditor({
    plugins: BaseEditorKit,
    selection,
    value: children,
  });
 
  const apiKey = key || process.env.AI_GATEWAY_API_KEY;
 
  if (!apiKey) {
    return NextResponse.json(
      { error: 'Missing AI Gateway API key.' },
      { status: 401 }
    );
  }
 
  const isSelecting = editor.api.isExpanded();
 
  const gatewayProvider = createGateway({
    apiKey,
  });
 
  try {
    const stream = createUIMessageStream<ChatMessage>({
      execute: async ({ writer }) => {
        let toolName = toolNameParam;
 
        if (!toolName) {
          const { object: AIToolName } = await generateObject({
            enum: isSelecting
              ? ['generate', 'edit', 'comment']
              : ['generate', 'comment'],
            model: gatewayProvider(model || 'google/gemini-2.5-flash'),
            output: 'enum',
            prompt: getChooseToolPrompt(messagesRaw),
          });
 
          writer.write({
            data: AIToolName as ToolName,
            type: 'data-toolName',
          });
 
          toolName = AIToolName;
        }
 
        const stream = streamText({
          experimental_transform: markdownJoinerTransform(),
          model: gatewayProvider(model || 'openai/gpt-4o-mini'),
          // Not used
          prompt: '',
          tools: {
            comment: getCommentTool(editor, {
              messagesRaw,
              model: gatewayProvider(model || 'google/gemini-2.5-flash'),
              writer,
            }),
          },
          prepareStep: async (step) => {
            if (toolName === 'comment') {
              return {
                ...step,
                toolChoice: { toolName: 'comment', type: 'tool' },
              };
            }
 
            if (toolName === 'edit') {
              const editPrompt = getEditPrompt(editor, {
                isSelecting,
                messages: messagesRaw,
              });
 
              return {
                ...step,
                activeTools: [],
                messages: [
                  {
                    content: editPrompt,
                    role: 'user',
                  },
                ],
              };
            }
 
            if (toolName === 'generate') {
              const generatePrompt = getGeneratePrompt(editor, {
                messages: messagesRaw,
              });
 
              return {
                ...step,
                activeTools: [],
                messages: [
                  {
                    content: generatePrompt,
                    role: 'user',
                  },
                ],
                model: gatewayProvider(model || 'openai/gpt-4o-mini'),
              };
            }
          },
        });
 
        writer.merge(stream.toUIMessageStream({ sendFinish: false }));
      },
    });
 
    return createUIMessageStreamResponse({ stream });
  } catch {
    return NextResponse.json(
      { error: 'Failed to process AI request' },
      { status: 500 }
    );
  }
}
 
const getCommentTool = (
  editor: SlateEditor,
  {
    messagesRaw,
    model,
    writer,
  }: {
    messagesRaw: ChatMessage[];
    model: LanguageModel;
    writer: UIMessageStreamWriter<ChatMessage>;
  }
) => {
  return tool({
    description: 'Comment on the content',
    inputSchema: z.object({}),
    execute: async () => {
      const { elementStream } = streamObject({
        model,
        output: 'array',
        prompt: getCommentPrompt(editor, {
          messages: messagesRaw,
        }),
        schema: z
          .object({
            blockId: z
              .string()
              .describe(
                'The id of the starting block. If the comment spans multiple blocks, use the id of the first block.'
              ),
            comment: z
              .string()
              .describe('A brief comment or explanation for this fragment.'),
            content: z
              .string()
              .describe(
                String.raw`The original document fragment to be commented on.It can be the entire block, a small part within a block, or span multiple blocks. If spanning multiple blocks, separate them with two \n\n.`
              ),
          })
          .describe('A single comment'),
      });
 
      for await (const comment of elementStream) {
        const commentDataId = nanoid();
 
        writer.write({
          id: commentDataId,
          data: {
            comment: comment,
            status: 'streaming',
          },
          type: 'data-comment',
        });
      }
 
      writer.write({
        id: nanoid(),
        data: {
          comment: null,
          status: 'finished',
        },
        type: 'data-comment',
      });
    },
  });
};Configure Environment
Set your AI Gateway key locally (replace with your provider secret if you are not using a gateway):
AI_GATEWAY_API_KEY="your-api-key"Manual Usage
Installation
pnpm add @platejs/ai @platejs/markdown @platejs/selection @ai-sdk/react ai
@platejs/suggestion is optional but required for diff-based edit suggestions.
Add Plugins
import { createPlateEditor } from 'platejs/react';
import { AIChatPlugin, AIPlugin } from '@platejs/ai/react';
import { BlockSelectionPlugin } from '@platejs/selection/react';
import { MarkdownPlugin } from '@platejs/markdown';
 
export const editor = createPlateEditor({
  plugins: [
    BlockSelectionPlugin,
    MarkdownPlugin,
    AIPlugin,
    AIChatPlugin, // extended in the next step
  ],
});BlockSelectionPlugin: Enables multi-block selections thatAIChatPluginrelies on for insert/replace transforms.MarkdownPlugin: Provides Markdown serialization used by streaming utilities.AIPlugin: Adds the AI mark and transforms for undoing AI batches.AIChatPlugin: Supplies the AI combobox, API helpers, and transforms.
Use AIPlugin.withComponent with your own element (or AILeaf) to highlight AI-generated text.
Configure AIChatPlugin
Extend AIChatPlugin to hook streaming and edits. The example mirrors the core logic from AIKit while keeping the UI headless.
import { AIChatPlugin, applyAISuggestions, streamInsertChunk, useChatChunk } from '@platejs/ai/react';
import { withAIBatch } from '@platejs/ai';
import { getPluginType, KEYS, PathApi } from 'platejs';
import { usePluginOption } from 'platejs/react';
 
export const aiChatPlugin = AIChatPlugin.extend({
  options: {
    chatOptions: {
      api: '/api/ai/command',
      body: {
        model: 'openai/gpt-4o-mini',
      },
    },
    trigger: ' ',
    triggerPreviousCharPattern: /^\s?$/,
  },
  useHooks: ({ editor, getOption }) => {
    const mode = usePluginOption(AIChatPlugin, 'mode');
    const toolName = usePluginOption(AIChatPlugin, 'toolName');
 
    useChatChunk({
      onChunk: ({ chunk, isFirst, text }) => {
        if (mode === 'insert') {
          if (isFirst) {
            editor.setOption(AIChatPlugin, 'streaming', true);
 
            editor.tf.insertNodes(
              {
                children: [{ text: '' }],
                type: getPluginType(editor, KEYS.aiChat),
              },
              {
                at: PathApi.next(editor.selection!.focus.path.slice(0, 1)),
              }
            );
          }
 
          if (!getOption('streaming')) return;
 
          withAIBatch(
            editor,
            () => {
              streamInsertChunk(editor, chunk, {
                textProps: {
                  [getPluginType(editor, KEYS.ai)]: true,
                },
              });
            },
            { split: isFirst }
          );
        }
 
        if (toolName === 'edit' && mode === 'chat') {
          withAIBatch(
            editor,
            () => {
              applyAISuggestions(editor, text);
            },
            { split: isFirst }
          );
        }
      },
      onFinish: () => {
        editor.setOption(AIChatPlugin, 'streaming', false);
        editor.setOption(AIChatPlugin, '_blockChunks', '');
        editor.setOption(AIChatPlugin, '_blockPath', null);
        editor.setOption(AIChatPlugin, '_mdxName', null);
      },
    });
  },
});useChatChunk: WatchesUseChatHelpersstatus and yields incremental chunks.streamInsertChunk: Streams Markdown/MDX into the document, reusing the existing block when possible.applyAISuggestions: Converts responses into transient suggestion nodes whentoolName === 'edit'.withAIBatch: Marks history batches sotf.ai.undo()only reverts the last AI-generated change.
Provide your own render components (toolbar button, floating menu, etc.) when you extend the plugin.
Build API Route
Handle api.aiChat.submit requests on the server. Each request includes the chat messages from @ai-sdk/react and a ctx payload that contains the editor children, current selection, and last toolName.
Complete API example
import { createGateway } from '@ai-sdk/gateway';
import { convertToCoreMessages, streamText } from 'ai';
import { createSlateEditor } from 'platejs';
 
import { BaseEditorKit } from '@/registry/components/editor/editor-base-kit';
import { markdownJoinerTransform } from '@/registry/lib/markdown-joiner-transform';
 
export async function POST(req: Request) {
  const { apiKey, ctx, messages, model } = await req.json();
 
  const editor = createSlateEditor({
    plugins: BaseEditorKit,
    selection: ctx.selection,
    value: ctx.children,
  });
 
  const gateway = createGateway({
    apiKey: apiKey ?? process.env.AI_GATEWAY_API_KEY!,
  });
 
  const result = streamText({
    experimental_transform: markdownJoinerTransform(),
    messages: convertToCoreMessages(messages),
    model: gateway(model ?? 'openai/gpt-4o-mini'),
    system: ctx.toolName === 'edit' ? 'You are an editor that rewrites user text.' : undefined,
  });
 
  return result.toDataStreamResponse();
}ctx.childrenandctx.selectionare rehydrated into a Slate editor so you can build rich prompts (see Prompt Templates).- Forward provider settings (model, apiKey, temperature, gateway flags, etc.) through 
chatOptions.body; everything you add is passed verbatim in the JSON payload and can be read before callingcreateGateway. - Always read secrets from the server. The client should only send opaque identifiers or short-lived tokens.
 - Return a streaming response so 
useChatanduseChatChunkcan process tokens incrementally. 
Connect useChat
Bridge the editor and your model endpoint with @ai-sdk/react. Store helpers on the plugin so transforms can reload, stop, or show chat state.
import { useEffect } from 'react';
 
import { type UIMessage, DefaultChatTransport } from 'ai';
import { type UseChatHelpers, useChat } from '@ai-sdk/react';
import { AIChatPlugin } from '@platejs/ai/react';
import { useEditorPlugin } from 'platejs/react';
 
type ChatMessage = UIMessage<{}, { toolName: 'comment' | 'edit' | 'generate'; comment?: unknown }>;
 
export const useEditorAIChat = () => {
  const { editor, setOption } = useEditorPlugin(AIChatPlugin);
 
  const chat = useChat<ChatMessage>({
    id: 'editor',
    api: '/api/ai/command',
    transport: new DefaultChatTransport(),
    onData(data) {
      if (data.type === 'data-toolName') {
        editor.setOption(AIChatPlugin, 'toolName', data.data);
      }
    },
  });
 
  useEffect(() => {
    setOption('chat', chat as UseChatHelpers<ChatMessage>);
  }, [chat, setOption]);
 
  return chat;
};Combine the helper with useEditorChat to keep the floating menu anchored correctly:
import { useEditorChat } from '@platejs/ai/react';
 
useEditorChat({
  chat,
  onOpenChange: (open) => {
    if (!open) chat.stop?.();
  },
});Now you can submit prompts programmatically:
import { AIChatPlugin } from '@platejs/ai/react';
 
editor.getApi(AIChatPlugin).aiChat.submit('', {
  prompt: {
    default: 'Continue the document after {block}',
    selecting: 'Rewrite {selection} with a clearer tone',
  },
  toolName: 'generate',
});Prompt Templates
Client Prompting
api.aiChat.submitaccepts anEditorPrompt. Provide a string, an object withdefault/selecting/blockSelecting, or a function that receives{ editor, isSelecting, isBlockSelecting }. The helpergetEditorPromptin the client turns that value into the final string.- Combine it with 
replacePlaceholders(editor, template, { prompt })to expand{editor},{block},{blockSelection}, and{prompt}using Markdown generated by@platejs/ai. 
import { replacePlaceholders } from '@platejs/ai';
 
editor.getApi(AIChatPlugin).aiChat.submit('Improve tone', {
  prompt: ({ isSelecting }) =>
    isSelecting
      ? replacePlaceholders(editor, 'Rewrite {blockSelection} using a friendly tone.')
      : replacePlaceholders(editor, 'Continue {block} with two more sentences.'),
  toolName: 'generate',
});Server Prompting
The demo backend in apps/www/src/app/api/ai/command reconstructs the editor from ctx and builds structured prompts:
getChooseToolPromptdecides whether the request isgenerate,edit, orcomment.getGeneratePrompt,getEditPrompt, andgetCommentPrompttransform the current editor state into instructions tailored to each mode.- Utility helpers like 
getMarkdown,getMarkdownWithSelection, andbuildStructuredPrompt(seeapps/www/src/app/api/ai/command/prompts.ts) make it easy to embed block ids, selections, and MDX tags into the LLM request. 
Augment the payload you send from the client to fine-tune server prompts:
editor.setOption(aiChatPlugin, 'chatOptions', {
  api: '/api/ai/command',
  body: {
    model: 'openai/gpt-4o-mini',
    tone: 'playful',
    temperature: 0.4,
  },
});Everything under chatOptions.body arrives in the route handler, letting you swap providers, pass user-specific metadata, or branch into different prompt templates.
Keyboard Shortcuts
| Key | Description | 
|---|---|
| Space | Open the AI menu in an empty block (cursor mode) | 
| Cmd + J | Show the AI menu (set via shortcuts.show) | 
| Escape | Hide the AI menu and stop streaming | 
Streaming
The streaming utilities keep complex layouts intact while responses arrive:
streamInsertChunk(editor, chunk, options)deserializes Markdown chunks, updates the current block in place, and appends new blocks as needed. UsetextProps/elementPropsto tag streamed nodes (e.g., mark AI text).streamDeserializeMdandstreamDeserializeInlineMdprovide lower-level access if you need to control streaming for custom node types.streamSerializeMdmirrors the editor state so you can detect drift between streamed content and the response buffer.
Reset the internal _blockChunks, _blockPath, and _mdxName options when streaming finishes to start the next response from a clean slate.
Streaming Example
'use client';
import {
  type HTMLAttributes,
  useCallback,
  useReducer,
  useRef,
  useState,
} from 'react';
import { AIChatPlugin, streamInsertChunk } from '@platejs/ai/react';
import { deserializeMd } from '@platejs/markdown';
import {
  ChevronFirstIcon,
  ChevronLastIcon,
  PauseIcon,
  PlayIcon,
  RotateCcwIcon,
} from 'lucide-react';
import { getPluginType, KEYS } from 'platejs';
import { Plate, usePlateEditor, usePlateViewEditor } from 'platejs/react';
import { Button } from '@/components/ui/button';
import { cn } from '@/lib/utils';
import { EditorKit } from '@/components/editor/editor-kit';
import { CopilotKit } from '@/components/editor/plugins/copilot-kit';
import { MarkdownJoiner } from '@/lib/markdown-joiner-transform';
import { Editor, EditorContainer, EditorView } from '@/components/ui/editor';
import { BaseEditorKit } from '../components/editor/editor-base-kit';
const testScenarios = {
  // Basic markdown with complete elements
  columns: [
    'paragraph\n\n<column',
    '_group',
    '>\n',
    ' ',
    ' <',
    'column',
    ' width',
    '="',
    '33',
    '.',
    '333',
    '333',
    '333',
    '333',
    '336',
    '%">\n',
    '   ',
    ' ',
    '1',
    '\n',
    ' ',
    ' </',
    'column',
    '>\n',
    ' ',
    ' <',
    'column',
    ' width',
    '="',
    '33',
    '.',
    '333',
    '333',
    '333',
    '333',
    '336',
    '%">\n',
    '   ',
    ' ',
    '2',
    '\n',
    ' ',
    ' </',
    'column',
    '>\n',
    ' ',
    ' <',
    'column',
    ' width',
    '="',
    '33',
    '.',
    '333',
    '333',
    '333',
    '333',
    '336',
    '%">\n',
    '   ',
    ' ',
    '3',
    '\n',
    ' ',
    ' </',
    'column',
    '>\n',
    '</',
    'column',
    '_group',
    '>\n\nparagraph',
  ],
  links: [
    '[Link ',
    'to OpenA',
    'I](https://www.openai.com)\n\n',
    '[Link ',
    'to Google',
    'I](https://ww',
    'w.google.com/1',
    '11',
    '22',
    'xx',
    'yy',
    'zz',
    'aa',
    'bb',
    'cc',
    'dd',
    'ee',
    '33)\n\n',
    '[False Positive',
    '11',
    '22',
    '33',
    '44',
    '55',
    '66',
    '77',
    '88',
    '99',
    '100',
  ],
  lists: ['1.', ' number 1\n', '- ', 'List B\n', '-', ' [x] ', 'Task C'],
  listWithImage: [
    '## ',
    'Links ',
    'and ',
    'Images\n\n',
    '- [Link ',
    'to OpenA',
    'I](https://www.openai.com)\n',
    '- \n\n',
  ],
  nestedStructureBlock: [
    '```',
    'javascript',
    '\n',
    'import',
    ' React',
    ' from',
    " '",
    'react',
    "';\n",
    'import',
    ' {',
    ' Plate',
    ' }',
    ' from',
    " '@",
    'ud',
    'ecode',
    '/',
    'plate',
    "';\n\n",
    'const',
    ' Basic',
    'Editor',
    ' =',
    ' ()',
    ' =>',
    ' {\n',
    ' ',
    ' return',
    ' (\n',
    '   ',
    ' <',
    'Plate',
    '>\n',
    '     ',
    ' {/*',
    ' Add',
    ' your',
    ' plugins',
    ' and',
    ' components',
    ' here',
    ' */}\n',
    '   ',
    ' </',
    'Plate',
    '>\n',
    ' ',
    ' );\n',
    '};\n\n',
    'export',
    ' default',
    ' Basic',
    'Editor',
    ';\n',
    '```',
  ],
  table: [
    '| Feature          |',
    ' Plate',
    '.js',
    '                                     ',
    ' ',
    '| Slate.js                                     ',
    ' ',
    '|\n|------------------',
    '|--------------------------------',
    '---------------',
    '|--------------------------------',
    '---------------',
    '|\n| Purpose         ',
    ' ',
    '| Rich text editor framework',
    '                   ',
    ' ',
    '| Rich text editor framework',
    '                   ',
    ' ',
    '|\n| Flexibility     ',
    ' ',
    '| Highly customizable',
    ' with',
    ' plugins',
    '             ',
    ' ',
    '| Highly customizable',
    ' with',
    ' plugins',
    '             ',
    ' ',
    '|\n| Community       ',
    ' ',
    '| Growing community support',
    '                    ',
    ' ',
    '| Established community',
    ' support',
    '                ',
    ' ',
    '|\n| Documentation   ',
    ' ',
    '| Comprehensive documentation',
    ' available',
    '        ',
    ' ',
    '| Comprehensive documentation',
    ' available',
    '        ',
    ' ',
    '|\n| Performance     ',
    ' ',
    '| Optimized for performance',
    ' with',
    ' large',
    ' documents',
    '| Good performance, but',
    ' may',
    ' require',
    ' optimization',
    '|\n| Integration     ',
    ' ',
    '| Easy integration with',
    ' React',
    '                  ',
    ' ',
    '| Easy integration with',
    ' React',
    '                  ',
    ' ',
    '|\n| Use Cases       ',
    ' ',
    '| Suitable for complex',
    ' editing',
    ' needs',
    '           ',
    ' ',
    '| Suitable for complex',
    ' editing',
    ' needs',
    '           ',
    ' ',
    '\n\n',
    'Paragraph ',
    'should ',
    'exist ',
    'from ',
    'table',
  ],
};
export default function MarkdownStreamingDemo() {
  const [selectedScenario, setSelectedScenario] =
    useState<keyof typeof testScenarios>('columns');
  const [activeIndex, setActiveIndex] = useState<number>(0);
  const isPauseRef = useRef(false);
  const streamSessionRef = useRef(0);
  const [, forceUpdate] = useReducer((x) => x + 1, 0);
  const [streaming, setStreaming] = useState(false);
  const [isPlateStatic, setIsPlateStatic] = useState(false);
  const [speed, setSpeed] = useState<number | null>(null);
  const editor = usePlateEditor(
    {
      plugins: [...CopilotKit, ...EditorKit],
      value: [],
    },
    []
  );
  const editorStatic = usePlateViewEditor(
    {
      plugins: BaseEditorKit,
    },
    []
  );
  const currentChunks = testScenarios[selectedScenario];
  const transformedCurrentChunks = transformedChunks(currentChunks);
  const onStreaming = useCallback(async () => {
    setStreaming(true);
    streamSessionRef.current += 1;
    const sessionId = streamSessionRef.current;
    isPauseRef.current = false;
    setActiveIndex(0);
    // editor.tf.setValue([]);
    editor.setOption(AIChatPlugin, 'streaming', false);
    editor.setOption(AIChatPlugin, '_blockChunks', '');
    editor.setOption(AIChatPlugin, '_blockPath', null);
    for (let i = 0; i < transformedCurrentChunks.length; i++) {
      while (isPauseRef.current) {
        if (sessionId !== streamSessionRef.current) return;
        await new Promise((resolve) => setTimeout(resolve, 100));
      }
      if (sessionId !== streamSessionRef.current) return;
      setActiveIndex(i + 1);
      const chunk = transformedCurrentChunks[i];
      streamInsertChunk(editor, chunk.chunk, {
        textProps: {
          [getPluginType(editor, KEYS.ai)]: true,
        },
      });
      await new Promise((resolve) =>
        setTimeout(resolve, speed ?? chunk.delayInMs)
      );
      if (sessionId !== streamSessionRef.current) return;
    }
    setStreaming(false);
  }, [editor, transformedCurrentChunks, speed]);
  const onStreamingStatic = useCallback(async () => {
    let output = '';
    setStreaming(true);
    streamSessionRef.current += 1;
    for (const chunk of transformedCurrentChunks) {
      output += chunk.chunk;
      editorStatic.children = deserializeMd(editorStatic, output);
      setActiveIndex((prev) => prev + 1);
      forceUpdate();
      await new Promise((resolve) =>
        setTimeout(resolve, speed ?? chunk.delayInMs)
      );
    }
    setStreaming(false);
  }, [editorStatic, speed, transformedCurrentChunks]);
  const onReset = useCallback(() => {
    setActiveIndex(0);
    if (isPlateStatic) {
      editorStatic.children = [];
      forceUpdate();
    } else {
      editor.tf.setValue([]);
      editor.setOption(AIChatPlugin, 'streaming', false);
      editor.setOption(AIChatPlugin, '_blockChunks', '');
      editor.setOption(AIChatPlugin, '_blockPath', null);
    }
  }, [editor, editorStatic, isPlateStatic]);
  const onNavigate = useCallback(
    (targetIndex: number) => {
      // Check if navigation is possible
      if (targetIndex < 0 || targetIndex > transformedCurrentChunks.length)
        return;
      if (isPlateStatic) {
        let output = '';
        for (const chunk of transformedCurrentChunks.slice(0, targetIndex)) {
          output += chunk.chunk;
        }
        editorStatic.children = deserializeMd(editorStatic, output);
        setActiveIndex(targetIndex);
        forceUpdate();
      } else {
        editor.tf.setValue([]);
        editor.setOption(AIChatPlugin, 'streaming', false);
        editor.setOption(AIChatPlugin, '_blockChunks', '');
        editor.setOption(AIChatPlugin, '_blockPath', null);
        for (const chunk of transformedCurrentChunks.slice(0, targetIndex)) {
          streamInsertChunk(editor, chunk.chunk, {
            textProps: {
              [getPluginType(editor, KEYS.ai)]: true,
            },
          });
        }
        setActiveIndex(targetIndex);
      }
    },
    [editor, editorStatic, isPlateStatic, transformedCurrentChunks]
  );
  const onPrev = useCallback(
    () => onNavigate(activeIndex - 1),
    [onNavigate, activeIndex]
  );
  const onNext = useCallback(
    () => onNavigate(activeIndex + 1),
    [onNavigate, activeIndex]
  );
  return (
    <section className="h-full overflow-y-auto p-20">
      <div className="mb-10 rounded bg-gray-100 p-4">
        {/* Scenario Selection */}
        <div className="mb-4">
          <span className="mb-2 block text-sm font-medium">Test Scenario:</span>
          <select
            className="w-64 rounded border px-3 py-2"
            value={selectedScenario}
            onChange={(e) => {
              setSelectedScenario(e.target.value as keyof typeof testScenarios);
              setActiveIndex(0);
              editor.tf.setValue([]);
            }}
          >
            {Object.entries(testScenarios).map(([key]) => (
              <option key={key} value={key}>
                {key
                  .replace(/([A-Z])/g, ' $1')
                  .replace(/^./, (str) => str.toUpperCase())}
              </option>
            ))}
          </select>
        </div>
        {/* Control Buttons */}
        <div className="mb-4 flex items-center gap-2">
          <Button onClick={onPrev}>
            <ChevronFirstIcon />
          </Button>
          <Button
            onClick={() => {
              if (streaming) {
                isPauseRef.current = !isPauseRef.current;
                forceUpdate();
              } else {
                if (isPlateStatic) {
                  onStreamingStatic();
                } else {
                  onStreaming();
                }
              }
            }}
          >
            {isPauseRef.current || !streaming ? <PlayIcon /> : <PauseIcon />}
          </Button>
          <Button onClick={onNext}>
            <ChevronLastIcon />
          </Button>
          <Button onClick={() => onReset()}>
            <RotateCcwIcon />
          </Button>
          <Button
            onClick={() => {
              setIsPlateStatic(!isPlateStatic);
              onReset();
            }}
          >
            Switch to {isPlateStatic ? 'Plate' : 'PlateStatic'}
          </Button>
        </div>
        <div className="mb-4 flex items-center gap-2">
          <span className="block text-sm font-medium">Speed:</span>
          <select
            className="rounded border px-2 py-1"
            value={speed ?? 'default'}
            onChange={(e) =>
              setSpeed(
                e.target.value === 'default' ? null : Number(e.target.value)
              )
            }
          >
            {['default', 10, 100, 200].map((ms) => (
              <option key={ms} value={ms}>
                {ms === 'default'
                  ? 'Default'
                  : ms === 10
                    ? 'Fast(10ms)'
                    : ms === 100
                      ? 'Medium(100ms)'
                      : ms === 200
                        ? 'Slow(200ms)'
                        : `${ms}ms`}
              </option>
            ))}
          </select>
          <span className="text-sm text-muted-foreground">
            The default speed is 10ms, but it adjusts to 100ms when streaming a
            table or code block.
          </span>
        </div>
        <div className="my-4 h-2 w-full rounded bg-gray-200">
          <div
            className="h-2 rounded bg-primary transition-all duration-300"
            style={{
              width: `${(activeIndex / (transformedCurrentChunks.length || 1)) * 100}%`,
            }}
          />
        </div>
        <span className="text-sm text-muted-foreground">
          PlateStatic offers more robust and flawless performance.
        </span>
      </div>
      <div className="my-2 flex gap-10">
        <div className="w-1/2">
          <h3 className="mb-2 font-semibold">
            Transformed Chunks ({activeIndex}/{transformedCurrentChunks.length})
          </h3>
          <Tokens
            activeIndex={activeIndex}
            chunkClick={onNavigate}
            chunks={splitChunksByLinebreak(
              transformedCurrentChunks.map((c) => c.chunk)
            )}
          />
        </div>
        <div className="w-1/2">
          <h3 className="mb-2 font-semibold">Editor Output</h3>
          {isPlateStatic ? (
            <EditorView
              className="h-[500px] overflow-y-auto rounded border"
              editor={editorStatic}
            />
          ) : (
            <>
              <Plate editor={editor}>
                <EditorContainer className="h-[500px] overflow-y-auto rounded border">
                  <Editor
                    variant="demo"
                    className="pb-[20vh]"
                    placeholder="Type something..."
                    spellCheck={false}
                  />
                </EditorContainer>
              </Plate>
            </>
          )}
        </div>
      </div>
      <h2 className="mt-8 mb-4 text-xl font-semibold">Raw Token Comparison</h2>
      <div className="my-2 flex gap-10">
        <div className="w-1/2">
          <h3 className="mb-2 font-semibold">Original Chunks</h3>
          <Tokens
            activeIndex={0}
            chunks={splitChunksByLinebreak(currentChunks)}
          />
        </div>
        <div className="w-1/2">
          <h3 className="mb-2 font-semibold">Raw Markdown Text</h3>
          <textarea
            className={cn(
              'h-[500px] w-full overflow-y-auto rounded border p-4 font-mono text-sm'
            )}
            readOnly
            value={currentChunks.join('')}
          />
        </div>
      </div>
    </section>
  );
}
type TChunks = {
  chunks: {
    index: number;
    text: string;
  }[];
  linebreaks: number;
};
function splitChunksByLinebreak(chunks: string[]) {
  const result: TChunks[] = [];
  let current: { index: number; text: string }[] = [];
  for (let i = 0; i < chunks.length; i++) {
    const chunk = chunks[i];
    current.push({ index: i, text: chunk });
    const match = /(\n+)$/.exec(chunk);
    if (match) {
      const linebreaks = match[1].length;
      result.push({
        chunks: [...current],
        linebreaks,
      });
      current = [];
    }
  }
  if (current.length > 0) {
    result.push({
      chunks: [...current],
      linebreaks: 0,
    });
  }
  return result;
}
type TChunk = { chunk: string; delayInMs: number };
const transformedChunks = (chunks: string[]): TChunk[] => {
  const result: TChunk[] = [];
  const joiner = new MarkdownJoiner();
  for (const chunk of chunks) {
    const processed = joiner.processText(chunk);
    if (processed) {
      result.push({ chunk: processed, delayInMs: joiner.delayInMs });
    }
  }
  // flush any remaining buffered content
  const remaining = joiner.flush();
  if (remaining) {
    result.push({ chunk: remaining, delayInMs: joiner.delayInMs });
  }
  return result;
};
const Tokens = ({
  activeIndex,
  chunkClick,
  chunks,
  ...props
}: {
  activeIndex: number;
  chunks: TChunks[];
  chunkClick?: (index: number) => void;
} & HTMLAttributes<HTMLDivElement>) => {
  return (
    <div
      className="my-1 h-[500px] overflow-y-auto rounded bg-gray-100 p-4 font-mono"
      {...props}
    >
      {chunks.map((chunk, index) => {
        return (
          <div key={index} className="py-1">
            {chunk.chunks.map((c, j) => {
              const lineBreak = c.text.replaceAll('\n', '⤶');
              const space = lineBreak.replaceAll(' ', '␣');
              return (
                <span
                  key={j}
                  className={cn(
                    'mx-1 inline-block rounded border p-1',
                    activeIndex && c.index < activeIndex && 'bg-amber-400'
                  )}
                  onClick={() => chunkClick && chunkClick(c.index + 1)}
                >
                  {space}
                </span>
              );
            })}
          </div>
        );
      })}
    </div>
  );
};
Plate Plus
Combobox menu with free-form prompt input
- Additional trigger methods:
- Block menu button
 - Slash command menu
 
 - Beautifully crafted UI
 
Hooks
useAIChatEditor
Registers an auxiliary editor for chat previews and deserializes Markdown with block-level memoization.
import { usePlateEditor } from 'platejs/react';
import { MarkdownPlugin } from '@platejs/markdown';
import { AIChatPlugin, useAIChatEditor } from '@platejs/ai/react';
 
const aiPreviewEditor = usePlateEditor({
  plugins: [MarkdownPlugin, AIChatPlugin],
});
 
useAIChatEditor(aiPreviewEditor, responseMarkdown, {
  parser: { exclude: ['space'] },
});useEditorChat
Connects UseChatHelpers to editor state so the AI menu knows whether to anchor to cursor, selection, or block selection.
useChatChunk
Streams chat responses chunk-by-chunk and gives you full control over insertion.
Utilities
withAIBatch
Groups editor operations into a single history batch and flags it as AI-generated so tf.ai.undo() removes it safely.
applyAISuggestions
Diffs AI output against stored chatNodes and writes transient suggestion nodes. Requires @platejs/suggestion.
Complementary helpers allow you to finalize or discard the diff:
acceptAISuggestions(editor): Converts transient suggestion nodes into permanent suggestions.rejectAISuggestions(editor): Removes transient suggestion nodes and clears suggestion marks.
aiCommentToRange
Maps streamed comment metadata back to document ranges so comments can be inserted automatically.
findTextRangeInBlock
Fuzzy-search helper that uses LCS to find the closest match inside a block.
getEditorPrompt
Generates prompts that respect cursor, selection, or block selection states.
replacePlaceholders
Replaces placeholders like {editor}, {blockSelection}, and {prompt} with serialized Markdown.
Plugins
AIPlugin
Adds an ai mark to streamed text and exposes transforms to remove AI nodes or undo the last AI batch. Use .withComponent to render AI-marked text with a custom component.
AIChatPlugin
Main plugin that powers the AI menu, chat state, and transforms.
' './^\s?$/.false to cancel opening in specific contexts.useChat so API calls can access them.'insert'.false.false.API
api.aiChat.submit(input, options?)
Submits a prompt to your model provider. When mode is omitted it defaults to 'insert' for a collapsed cursor and 'chat' otherwise.
api.aiChat.reset(options?)
Clears chat state, removes AI nodes, and optionally undoes the last AI batch.
api.aiChat.node(options?)
Retrieves the first AI node that matches the specified criteria.
api.aiChat.reload()
Replays the last prompt using the stored UseChatHelpers, restoring the original selection or block selection before resubmitting.
api.aiChat.stop()
Stops streaming and calls chat.stop.
api.aiChat.show()
Opens the AI menu, clears previous chat messages, and resets tool state.
api.aiChat.hide(options?)
Closes the AI menu, optionally undoing the last AI batch and refocusing the editor.
Transforms
tf.aiChat.accept()
Accepts the latest response. In insert mode it removes AI marks and places the caret at the end of the streamed content. In chat mode it applies the pending suggestions.
tf.aiChat.insertBelow(sourceEditor, options?)
Inserts the chat preview (sourceEditor) below the current selection or block selection.
tf.aiChat.replaceSelection(sourceEditor, options?)
Replaces the current selection or block selection with the chat preview.
tf.aiChat.removeAnchor(options?)
Removes the temporary anchor node used to position the AI menu.
tf.ai.insertNodes(nodes, options?)
Inserts nodes tagged with the AI mark at the current selection (or options.target).
tf.ai.removeMarks(options?)
Clears the AI mark from matching nodes.
tf.ai.removeNodes(options?)
Removes text nodes that are marked as AI-generated.
tf.ai.undo()
Undoes the latest history entry if it was created by withAIBatch and contained AI content. Clears the paired redo entry to avoid re-applying AI output.
Customization
Adding Custom AI Commands
'use client';
 
import * as React from 'react';
 
import {
  AIChatPlugin,
  AIPlugin,
  useEditorChat,
  useLastAssistantMessage,
} from '@platejs/ai/react';
import { getTransientCommentKey } from '@platejs/comment';
import { BlockSelectionPlugin, useIsSelecting } from '@platejs/selection/react';
import { getTransientSuggestionKey } from '@platejs/suggestion';
import { Command as CommandPrimitive } from 'cmdk';
import {
  Album,
  BadgeHelp,
  BookOpenCheck,
  Check,
  CornerUpLeft,
  FeatherIcon,
  ListEnd,
  ListMinus,
  ListPlus,
  Loader2Icon,
  PauseIcon,
  PenLine,
  SmileIcon,
  Wand,
  X,
} from 'lucide-react';
import {
  type NodeEntry,
  type SlateEditor,
  isHotkey,
  KEYS,
  NodeApi,
  TextApi,
} from 'platejs';
import {
  useEditorPlugin,
  useFocusedLast,
  useHotkeys,
  usePluginOption,
} from 'platejs/react';
import { type PlateEditor, useEditorRef } from 'platejs/react';
 
import { Button } from '@/components/ui/button';
import {
  Command,
  CommandGroup,
  CommandItem,
  CommandList,
} from '@/components/ui/command';
import {
  Popover,
  PopoverAnchor,
  PopoverContent,
} from '@/components/ui/popover';
import { cn } from '@/lib/utils';
import { commentPlugin } from '@/components/editor/plugins/comment-kit';
 
import { AIChatEditor } from './ai-chat-editor';
 
export function AIMenu() {
  const { api, editor } = useEditorPlugin(AIChatPlugin);
  const mode = usePluginOption(AIChatPlugin, 'mode');
  const toolName = usePluginOption(AIChatPlugin, 'toolName');
 
  const streaming = usePluginOption(AIChatPlugin, 'streaming');
  const isSelecting = useIsSelecting();
  const isFocusedLast = useFocusedLast();
  const open = usePluginOption(AIChatPlugin, 'open') && isFocusedLast;
  const [value, setValue] = React.useState('');
 
  const [input, setInput] = React.useState('');
 
  const chat = usePluginOption(AIChatPlugin, 'chat');
 
  const { messages, status } = chat;
  const [anchorElement, setAnchorElement] = React.useState<HTMLElement | null>(
    null
  );
 
  const content = useLastAssistantMessage()?.parts.find(
    (part) => part.type === 'text'
  )?.text;
 
  React.useEffect(() => {
    if (streaming) {
      const anchor = api.aiChat.node({ anchor: true });
      setTimeout(() => {
        const anchorDom = editor.api.toDOMNode(anchor![0])!;
        setAnchorElement(anchorDom);
      }, 0);
    }
    // eslint-disable-next-line react-hooks/exhaustive-deps
  }, [streaming]);
 
  const setOpen = (open: boolean) => {
    if (open) {
      api.aiChat.show();
    } else {
      api.aiChat.hide();
    }
  };
 
  const show = (anchorElement: HTMLElement) => {
    setAnchorElement(anchorElement);
    setOpen(true);
  };
 
  useEditorChat({
    chat,
    onOpenBlockSelection: (blocks: NodeEntry[]) => {
      show(editor.api.toDOMNode(blocks.at(-1)![0])!);
    },
    onOpenChange: (open) => {
      if (!open) {
        setAnchorElement(null);
        setInput('');
      }
    },
    onOpenCursor: () => {
      const [ancestor] = editor.api.block({ highest: true })!;
 
      if (!editor.api.isAt({ end: true }) && !editor.api.isEmpty(ancestor)) {
        editor
          .getApi(BlockSelectionPlugin)
          .blockSelection.set(ancestor.id as string);
      }
 
      show(editor.api.toDOMNode(ancestor)!);
    },
    onOpenSelection: () => {
      show(editor.api.toDOMNode(editor.api.blocks().at(-1)![0])!);
    },
  });
 
  useHotkeys('esc', () => {
    api.aiChat.stop();
 
    // remove when you implement the route /api/ai/command
    (chat as any)._abortFakeStream();
  });
 
  const isLoading = status === 'streaming' || status === 'submitted';
 
  React.useEffect(() => {
    if (toolName === 'edit' && mode === 'chat' && !isLoading) {
      let anchorNode = editor.api.node({
        at: [],
        reverse: true,
        match: (n) => !!n[KEYS.suggestion] && !!n[getTransientSuggestionKey()],
      });
 
      if (!anchorNode) {
        anchorNode = editor
          .getApi(BlockSelectionPlugin)
          .blockSelection.getNodes({ selectionFallback: true, sort: true })
          .at(-1);
      }
 
      if (!anchorNode) return;
 
      const block = editor.api.block({ at: anchorNode[1] });
      setAnchorElement(editor.api.toDOMNode(block![0]!)!);
    }
    // eslint-disable-next-line react-hooks/exhaustive-deps
  }, [isLoading]);
 
  if (isLoading && mode === 'insert') return null;
 
  if (toolName === 'comment') return null;
 
  if (toolName === 'edit' && mode === 'chat' && isLoading) return null;
 
  return (
    <Popover open={open} onOpenChange={setOpen} modal={false}>
      <PopoverAnchor virtualRef={{ current: anchorElement! }} />
 
      <PopoverContent
        className="border-none bg-transparent p-0 shadow-none"
        style={{
          width: anchorElement?.offsetWidth,
        }}
        onEscapeKeyDown={(e) => {
          e.preventDefault();
 
          api.aiChat.hide();
        }}
        align="center"
        side="bottom"
      >
        <Command
          className="w-full rounded-lg border shadow-md"
          value={value}
          onValueChange={setValue}
        >
          {mode === 'chat' &&
            isSelecting &&
            content &&
            toolName === 'generate' && <AIChatEditor content={content} />}
 
          {isLoading ? (
            <div className="flex grow items-center gap-2 p-2 text-sm text-muted-foreground select-none">
              <Loader2Icon className="size-4 animate-spin" />
              {messages.length > 1 ? 'Editing...' : 'Thinking...'}
            </div>
          ) : (
            <CommandPrimitive.Input
              className={cn(
                'flex h-9 w-full min-w-0 border-input bg-transparent px-3 py-1 text-base transition-[color,box-shadow] outline-none placeholder:text-muted-foreground md:text-sm dark:bg-input/30',
                'aria-invalid:border-destructive aria-invalid:ring-destructive/20 dark:aria-invalid:ring-destructive/40',
                'border-b focus-visible:ring-transparent'
              )}
              value={input}
              onKeyDown={(e) => {
                if (isHotkey('backspace')(e) && input.length === 0) {
                  e.preventDefault();
                  api.aiChat.hide();
                }
                if (isHotkey('enter')(e) && !e.shiftKey && !value) {
                  e.preventDefault();
                  void api.aiChat.submit(input);
                  setInput('');
                }
              }}
              onValueChange={setInput}
              placeholder="Ask AI anything..."
              data-plate-focus
              autoFocus
            />
          )}
 
          {!isLoading && (
            <CommandList>
              <AIMenuItems
                input={input}
                setInput={setInput}
                setValue={setValue}
              />
            </CommandList>
          )}
        </Command>
      </PopoverContent>
    </Popover>
  );
}
 
type EditorChatState =
  | 'cursorCommand'
  | 'cursorSuggestion'
  | 'selectionCommand'
  | 'selectionSuggestion';
 
const AICommentIcon = () => (
  <svg
    fill="none"
    height="24"
    stroke="currentColor"
    strokeLinecap="round"
    strokeLinejoin="round"
    strokeWidth="2"
    viewBox="0 0 24 24"
    width="24"
    xmlns="http://www.w3.org/2000/svg"
  >
    <path d="M0 0h24v24H0z" fill="none" stroke="none" />
    <path d="M8 9h8" />
    <path d="M8 13h4.5" />
    <path d="M10 19l-1 -1h-3a3 3 0 0 1 -3 -3v-8a3 3 0 0 1 3 -3h12a3 3 0 0 1 3 3v4.5" />
    <path d="M17.8 20.817l-2.172 1.138a.392 .392 0 0 1 -.568 -.41l.415 -2.411l-1.757 -1.707a.389 .389 0 0 1 .217 -.665l2.428 -.352l1.086 -2.193a.392 .392 0 0 1 .702 0l1.086 2.193l2.428 .352a.39 .39 0 0 1 .217 .665l-1.757 1.707l.414 2.41a.39 .39 0 0 1 -.567 .411l-2.172 -1.138z" />
  </svg>
);
 
const aiChatItems = {
  accept: {
    icon: <Check />,
    label: 'Accept',
    value: 'accept',
    onSelect: ({ aiEditor, editor }) => {
      const { mode, toolName } = editor.getOptions(AIChatPlugin);
 
      if (mode === 'chat' && toolName === 'generate') {
        return editor
          .getTransforms(AIChatPlugin)
          .aiChat.replaceSelection(aiEditor);
      }
 
      editor.getTransforms(AIChatPlugin).aiChat.accept();
      editor.tf.focus({ edge: 'end' });
    },
  },
  comment: {
    icon: <AICommentIcon />,
    label: 'Comment',
    value: 'comment',
    onSelect: ({ editor, input }) => {
      editor.getApi(AIChatPlugin).aiChat.submit(input, {
        mode: 'insert',
        prompt:
          'Please comment on the following content and provide reasonable and meaningful feedback.',
        toolName: 'comment',
      });
    },
  },
  continueWrite: {
    icon: <PenLine />,
    label: 'Continue writing',
    value: 'continueWrite',
    onSelect: ({ editor, input }) => {
      const ancestorNode = editor.api.block({ highest: true });
 
      if (!ancestorNode) return;
 
      const isEmpty = NodeApi.string(ancestorNode[0]).trim().length === 0;
 
      void editor.getApi(AIChatPlugin).aiChat.submit(input, {
        mode: 'insert',
        prompt: isEmpty
          ? `<Document>
{editor}
</Document>
Start writing a new paragraph AFTER <Document> ONLY ONE SENTENCE`
          : 'Continue writing AFTER <Block> ONLY ONE SENTENCE. DONT REPEAT THE TEXT.',
        toolName: 'generate',
      });
    },
  },
  discard: {
    icon: <X />,
    label: 'Discard',
    shortcut: 'Escape',
    value: 'discard',
    onSelect: ({ editor, input }) => {
      editor.getTransforms(AIPlugin).ai.undo();
      editor.getApi(AIChatPlugin).aiChat.hide();
    },
  },
  emojify: {
    icon: <SmileIcon />,
    label: 'Emojify',
    value: 'emojify',
    onSelect: ({ editor, input }) => {
      void editor.getApi(AIChatPlugin).aiChat.submit(input, {
        prompt: 'Emojify',
        toolName: 'edit',
      });
    },
  },
  explain: {
    icon: <BadgeHelp />,
    label: 'Explain',
    value: 'explain',
    onSelect: ({ editor, input }) => {
      void editor.getApi(AIChatPlugin).aiChat.submit(input, {
        prompt: {
          default: 'Explain {editor}',
          selecting: 'Explain',
        },
        toolName: 'generate',
      });
    },
  },
  fixSpelling: {
    icon: <Check />,
    label: 'Fix spelling & grammar',
    value: 'fixSpelling',
    onSelect: ({ editor, input }) => {
      void editor.getApi(AIChatPlugin).aiChat.submit(input, {
        prompt: 'Fix spelling and grammar',
        toolName: 'edit',
      });
    },
  },
  generateMarkdownSample: {
    icon: <BookOpenCheck />,
    label: 'Generate Markdown sample',
    value: 'generateMarkdownSample',
    onSelect: ({ editor, input }) => {
      void editor.getApi(AIChatPlugin).aiChat.submit(input, {
        prompt: 'Generate a markdown sample',
        toolName: 'generate',
      });
    },
  },
  generateMdxSample: {
    icon: <BookOpenCheck />,
    label: 'Generate MDX sample',
    value: 'generateMdxSample',
    onSelect: ({ editor, input }) => {
      void editor.getApi(AIChatPlugin).aiChat.submit(input, {
        prompt: 'Generate a mdx sample',
        toolName: 'generate',
      });
    },
  },
  improveWriting: {
    icon: <Wand />,
    label: 'Improve writing',
    value: 'improveWriting',
    onSelect: ({ editor, input }) => {
      void editor.getApi(AIChatPlugin).aiChat.submit(input, {
        prompt: 'Improve the writing',
        toolName: 'edit',
      });
    },
  },
  insertBelow: {
    icon: <ListEnd />,
    label: 'Insert below',
    value: 'insertBelow',
    onSelect: ({ aiEditor, editor }) => {
      /** Format: 'none' Fix insert table */
      void editor
        .getTransforms(AIChatPlugin)
        .aiChat.insertBelow(aiEditor, { format: 'none' });
    },
  },
  makeLonger: {
    icon: <ListPlus />,
    label: 'Make longer',
    value: 'makeLonger',
    onSelect: ({ editor, input }) => {
      void editor.getApi(AIChatPlugin).aiChat.submit(input, {
        prompt: 'Make longer',
        toolName: 'edit',
      });
    },
  },
  makeShorter: {
    icon: <ListMinus />,
    label: 'Make shorter',
    value: 'makeShorter',
    onSelect: ({ editor, input }) => {
      void editor.getApi(AIChatPlugin).aiChat.submit(input, {
        prompt: 'Make shorter',
        toolName: 'edit',
      });
    },
  },
  replace: {
    icon: <Check />,
    label: 'Replace selection',
    value: 'replace',
    onSelect: ({ aiEditor, editor }) => {
      void editor.getTransforms(AIChatPlugin).aiChat.replaceSelection(aiEditor);
    },
  },
  simplifyLanguage: {
    icon: <FeatherIcon />,
    label: 'Simplify language',
    value: 'simplifyLanguage',
    onSelect: ({ editor, input }) => {
      void editor.getApi(AIChatPlugin).aiChat.submit(input, {
        prompt: 'Simplify the language',
        toolName: 'edit',
      });
    },
  },
  summarize: {
    icon: <Album />,
    label: 'Add a summary',
    value: 'summarize',
    onSelect: ({ editor, input }) => {
      void editor.getApi(AIChatPlugin).aiChat.submit(input, {
        mode: 'insert',
        prompt: {
          default: 'Summarize {editor}',
          selecting: 'Summarize',
        },
        toolName: 'generate',
      });
    },
  },
  tryAgain: {
    icon: <CornerUpLeft />,
    label: 'Try again',
    value: 'tryAgain',
    onSelect: ({ editor, input }) => {
      void editor.getApi(AIChatPlugin).aiChat.reload();
    },
  },
} satisfies Record<
  string,
  {
    icon: React.ReactNode;
    label: string;
    value: string;
    component?: React.ComponentType<{ menuState: EditorChatState }>;
    filterItems?: boolean;
    items?: { label: string; value: string }[];
    shortcut?: string;
    onSelect?: ({
      aiEditor,
      editor,
      input,
    }: {
      aiEditor: SlateEditor;
      editor: PlateEditor;
      input: string;
    }) => void;
  }
>;
 
const menuStateItems: Record<
  EditorChatState,
  {
    items: (typeof aiChatItems)[keyof typeof aiChatItems][];
    heading?: string;
  }[]
> = {
  cursorCommand: [
    {
      items: [
        aiChatItems.comment,
        aiChatItems.generateMdxSample,
        aiChatItems.generateMarkdownSample,
        aiChatItems.continueWrite,
        aiChatItems.summarize,
        aiChatItems.explain,
      ],
    },
  ],
  cursorSuggestion: [
    {
      items: [aiChatItems.accept, aiChatItems.discard, aiChatItems.tryAgain],
    },
  ],
  selectionCommand: [
    {
      items: [
        aiChatItems.improveWriting,
        aiChatItems.comment,
        aiChatItems.emojify,
        aiChatItems.makeLonger,
        aiChatItems.makeShorter,
        aiChatItems.fixSpelling,
        aiChatItems.simplifyLanguage,
      ],
    },
  ],
  selectionSuggestion: [
    {
      items: [
        aiChatItems.accept,
        aiChatItems.discard,
        aiChatItems.insertBelow,
        aiChatItems.tryAgain,
      ],
    },
  ],
};
 
export const AIMenuItems = ({
  input,
  setInput,
  setValue,
}: {
  input: string;
  setInput: (value: string) => void;
  setValue: (value: string) => void;
}) => {
  const editor = useEditorRef();
  const { messages } = usePluginOption(AIChatPlugin, 'chat');
  const aiEditor = usePluginOption(AIChatPlugin, 'aiEditor')!;
  const isSelecting = useIsSelecting();
 
  const menuState = React.useMemo(() => {
    if (messages && messages.length > 0) {
      return isSelecting ? 'selectionSuggestion' : 'cursorSuggestion';
    }
 
    return isSelecting ? 'selectionCommand' : 'cursorCommand';
  }, [isSelecting, messages]);
 
  const menuGroups = React.useMemo(() => {
    const items = menuStateItems[menuState];
 
    return items;
  }, [menuState]);
 
  React.useEffect(() => {
    if (menuGroups.length > 0 && menuGroups[0].items.length > 0) {
      setValue(menuGroups[0].items[0].value);
    }
  }, [menuGroups, setValue]);
 
  return (
    <>
      {menuGroups.map((group, index) => (
        <CommandGroup key={index} heading={group.heading}>
          {group.items.map((menuItem) => (
            <CommandItem
              key={menuItem.value}
              className="[&_svg]:text-muted-foreground"
              value={menuItem.value}
              onSelect={() => {
                menuItem.onSelect?.({
                  aiEditor,
                  editor: editor,
                  input,
                });
                setInput('');
              }}
            >
              {menuItem.icon}
              <span>{menuItem.label}</span>
            </CommandItem>
          ))}
        </CommandGroup>
      ))}
    </>
  );
};
 
export function AILoadingBar() {
  const editor = useEditorRef();
 
  const toolName = usePluginOption(AIChatPlugin, 'toolName');
  const chat = usePluginOption(AIChatPlugin, 'chat');
  const mode = usePluginOption(AIChatPlugin, 'mode');
 
  const { status } = chat;
 
  const { api } = useEditorPlugin(AIChatPlugin);
 
  const isLoading = status === 'streaming' || status === 'submitted';
 
  const handleComments = (type: 'accept' | 'reject') => {
    if (type === 'accept') {
      editor.tf.unsetNodes([getTransientCommentKey()], {
        at: [],
        match: (n) => TextApi.isText(n) && !!n[KEYS.comment],
      });
    }
 
    if (type === 'reject') {
      editor
        .getTransforms(commentPlugin)
        .comment.unsetMark({ transient: true });
    }
 
    api.aiChat.hide();
  };
 
  useHotkeys('esc', () => {
    api.aiChat.stop();
 
    // remove when you implement the route /api/ai/command
    (chat as any)._abortFakeStream();
  });
 
  if (
    isLoading &&
    (mode === 'insert' ||
      toolName === 'comment' ||
      (toolName === 'edit' && mode === 'chat'))
  ) {
    return (
      <div
        className={cn(
          'absolute bottom-4 left-1/2 z-20 flex -translate-x-1/2 items-center gap-3 rounded-md border border-border bg-muted px-3 py-1.5 text-sm text-muted-foreground shadow-md transition-all duration-300'
        )}
      >
        <span className="h-4 w-4 animate-spin rounded-full border-2 border-muted-foreground border-t-transparent" />
        <span>{status === 'submitted' ? 'Thinking...' : 'Writing...'}</span>
        <Button
          size="sm"
          variant="ghost"
          className="flex items-center gap-1 text-xs"
          onClick={() => api.aiChat.stop()}
        >
          <PauseIcon className="h-4 w-4" />
          Stop
          <kbd className="ml-1 rounded bg-border px-1 font-mono text-[10px] text-muted-foreground shadow-sm">
            Esc
          </kbd>
        </Button>
      </div>
    );
  }
 
  if (toolName === 'comment' && status === 'ready') {
    return (
      <div
        className={cn(
          'absolute bottom-4 left-1/2 z-50 flex -translate-x-1/2 flex-col items-center gap-0 rounded-xl border border-border/50 bg-popover p-1 text-sm text-muted-foreground shadow-xl backdrop-blur-sm',
          'p-3'
        )}
      >
        {/* Header with controls */}
        <div className="flex w-full items-center justify-between gap-3">
          <div className="flex items-center gap-5">
            <Button
              size="sm"
              disabled={isLoading}
              onClick={() => handleComments('accept')}
            >
              Accept
            </Button>
 
            <Button
              size="sm"
              disabled={isLoading}
              onClick={() => handleComments('reject')}
            >
              Reject
            </Button>
          </div>
        </div>
      </div>
    );
  }
 
  return null;
}Extend the aiChatItems map to add new commands. Each command receives { aiEditor, editor, input } and can dispatch api.aiChat.submit with custom prompts or transforms.
Simple Custom Command
summarizeInBullets: {
  icon: <ListIcon />,
  label: 'Summarize in bullets',
  value: 'summarizeInBullets',
  onSelect: ({ editor }) => {
    void editor.getApi(AIChatPlugin).aiChat.submit('', {
      prompt: 'Summarize the current selection using bullet points',
      toolName: 'generate',
    });
  },
},Command with Complex Logic
generateTOC: {
  icon: <BookIcon />,
  label: 'Generate table of contents',
  value: 'generateTOC',
  onSelect: ({ editor }) => {
    const headings = editor.api.nodes({
      match: (n) => ['h1', 'h2', 'h3'].includes(n.type as string),
    });
 
    const prompt =
      headings.length === 0
        ? 'Create a realistic table of contents for this document'
        : 'Generate a table of contents that reflects the existing headings';
 
    void editor.getApi(AIChatPlugin).aiChat.submit('', {
      mode: 'insert',
      prompt,
      toolName: 'generate',
    });
  },
},The menu automatically switches between command and suggestion states:
cursorCommand: Cursor is collapsed and no response yet.selectionCommand: Text is selected and no response yet.cursorSuggestion/selectionSuggestion: A response exists, so actions like Accept, Try Again, or Insert Below are shown.
Use toolName ('generate' | 'edit' | 'comment') to control how streaming hooks process the response. For example, 'edit' enables diff-based suggestions, and 'comment' allows you to convert streamed comments into discussion threads with aiCommentToRange.
On This Page
FeaturesKit UsageInstallationAdd KitAdd API RouteConfigure EnvironmentManual UsageInstallationAdd PluginsConfigure AIChatPluginBuild API RouteConnect useChatPrompt TemplatesClient PromptingServer PromptingKeyboard ShortcutsStreamingStreaming ExamplePlate PlusHooksuseAIChatEditoruseEditorChatuseChatChunkUtilitieswithAIBatchapplyAISuggestionsaiCommentToRangefindTextRangeInBlockgetEditorPromptreplacePlaceholdersPluginsAIPluginAIChatPluginAPIapi.aiChat.submit(input, options?)api.aiChat.reset(options?)api.aiChat.node(options?)api.aiChat.reload()api.aiChat.stop()api.aiChat.show()api.aiChat.hide(options?)Transformstf.aiChat.accept()tf.aiChat.insertBelow(sourceEditor, options?)tf.aiChat.replaceSelection(sourceEditor, options?)tf.aiChat.removeAnchor(options?)tf.ai.insertNodes(nodes, options?)tf.ai.removeMarks(options?)tf.ai.removeNodes(options?)tf.ai.undo()CustomizationAdding Custom AI CommandsSimple Custom CommandCommand with Complex Logic