Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions src/app-bridge.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -215,6 +215,21 @@ describe("App <-> AppBridge integration", () => {
expect(receivedCancellations[0]).toEqual({});
});

it("tool notifications work with default no-op handlers", async () => {
// Don't set any custom handlers - use defaults
await app.connect(appTransport);

// These should not throw (default handlers silently accept them)
// Just verify they complete without error
await bridge.sendToolInput({ arguments: {} });
await bridge.sendToolInputPartial({ arguments: {} });
await bridge.sendToolResult({ content: [{ type: "text", text: "ok" }] });
await bridge.sendToolCancelled({});

// If we got here without throwing, the test passes
expect(true).toBe(true);
});

it("setHostContext triggers app.onhostcontextchanged", async () => {
const receivedContexts: unknown[] = [];
app.onhostcontextchanged = (params) => {
Expand Down
97 changes: 74 additions & 23 deletions src/app.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,6 @@ import {
PingRequestSchema,
} from "@modelcontextprotocol/sdk/types.js";
import { AppNotification, AppRequest, AppResult } from "./types";
import { PostMessageTransport } from "./message-transport";
import {
LATEST_PROTOCOL_VERSION,
McpUiAppCapabilities,
Expand Down Expand Up @@ -47,8 +46,12 @@ import {
McpUiRequestDisplayModeResultSchema,
} from "./types";
import { Transport } from "@modelcontextprotocol/sdk/shared/transport.js";
import { PostMessageTransport } from "./message-transport";
import { OpenAITransport, isOpenAIEnvironment } from "./openai/transport.js";

export { PostMessageTransport } from "./message-transport";
export { OpenAITransport, isOpenAIEnvironment } from "./openai/transport";
export * from "./openai/types";
export * from "./types";
export {
applyHostStyleVariables,
Expand Down Expand Up @@ -101,7 +104,7 @@ export const RESOURCE_MIME_TYPE = "text/html;profile=mcp-app";
*
* @see ProtocolOptions from @modelcontextprotocol/sdk for inherited options
*/
type AppOptions = ProtocolOptions & {
export type AppOptions = ProtocolOptions & {
/**
* Automatically report size changes to the host using ResizeObserver.
*
Expand All @@ -112,6 +115,19 @@ type AppOptions = ProtocolOptions & {
* @default true
*/
autoResize?: boolean;

/**
* Enable experimental OpenAI compatibility.
*
* When enabled (default), the App will auto-detect the environment:
* - If `window.openai` exists → use OpenAI Apps SDK
* - Otherwise → use MCP Apps protocol via PostMessageTransport
*
* Set to `false` to force MCP-only mode.
*
* @default true
*/
experimentalOAICompatibility?: boolean;
};

type RequestHandlerExtra = Parameters<
Expand Down Expand Up @@ -220,7 +236,10 @@ export class App extends Protocol<AppRequest, AppNotification, AppResult> {
constructor(
private _appInfo: Implementation,
private _capabilities: McpUiAppCapabilities = {},
private options: AppOptions = { autoResize: true },
private options: AppOptions = {
autoResize: true,
experimentalOAICompatibility: true,
},
) {
super(options);

Expand All @@ -229,9 +248,13 @@ export class App extends Protocol<AppRequest, AppNotification, AppResult> {
return {};
});

// Set up default handler to update _hostContext when notifications arrive.
// Users can override this by setting onhostcontextchanged.
// Set up default handlers for notifications.
// Users can override these by setting the corresponding on* properties.
this.onhostcontextchanged = () => {};
this.ontoolinput = () => {};
this.ontoolinputpartial = () => {};
this.ontoolresult = () => {};
this.ontoolcancelled = () => {};
}

/**
Expand Down Expand Up @@ -989,50 +1012,73 @@ export class App extends Protocol<AppRequest, AppNotification, AppResult> {
return () => resizeObserver.disconnect();
}

/**
* Create the default transport based on detected platform.
* @internal
*/
private createDefaultTransport(): Transport {
const experimentalOAI = this.options?.experimentalOAICompatibility ?? true;
if (experimentalOAI && isOpenAIEnvironment()) {
return new OpenAITransport();
}
return new PostMessageTransport(window.parent, window.parent);
}

/**
* Establish connection with the host and perform initialization handshake.
*
* This method performs the following steps:
* 1. Connects the transport layer
* 2. Sends `ui/initialize` request with app info and capabilities
* 3. Receives host capabilities and context in response
* 4. Sends `ui/notifications/initialized` notification
* 5. Sets up auto-resize using {@link setupSizeChangedNotifications} if enabled (default)
* 1. Auto-detects platform if no transport is provided
* 2. Connects the transport layer
* 3. Sends `ui/initialize` request with app info and capabilities
* 4. Receives host capabilities and context in response
* 5. Sends `ui/notifications/initialized` notification
* 6. Sets up auto-resize using {@link setupSizeChangedNotifications} if enabled (default)
* 7. For OpenAI mode: delivers initial tool input/result from window.openai
*
* If initialization fails, the connection is automatically closed and an error
* is thrown.
*
* @param transport - Transport layer (typically PostMessageTransport)
* @param transport - Optional transport layer. If not provided, auto-detects
* based on the `platform` option:
* - `'openai'` or `window.openai` exists → uses {@link OpenAITransport}
* - `'mcp'` or no `window.openai` → uses {@link PostMessageTransport}
* @param options - Request options for the initialize request
*
* @throws {Error} If initialization fails or connection is lost
*
* @example Connect with PostMessageTransport
* @example Auto-detect platform (recommended)
* ```typescript
* const app = new App(
* { name: "MyApp", version: "1.0.0" },
* {}
* );
*
* try {
* await app.connect(new PostMessageTransport(window.parent));
* console.log("Connected successfully!");
* } catch (error) {
* console.error("Failed to connect:", error);
* }
* // Auto-detects: OpenAI if window.openai exists, MCP otherwise
* await app.connect();
* ```
*
* @example Explicit MCP transport
* ```typescript
* await app.connect(new PostMessageTransport(window.parent));
* ```
*
* @example Explicit OpenAI transport
* ```typescript
* await app.connect(new OpenAITransport());
* ```
*
* @see {@link McpUiInitializeRequest} for the initialization request structure
* @see {@link McpUiInitializedNotification} for the initialized notification
* @see {@link PostMessageTransport} for the typical transport implementation
* @see {@link PostMessageTransport} for MCP-compatible hosts
* @see {@link OpenAITransport} for OpenAI/ChatGPT hosts
*/
override async connect(
transport: Transport = new PostMessageTransport(
window.parent,
window.parent,
),
transport?: Transport,
options?: RequestOptions,
): Promise<void> {
transport ??= this.createDefaultTransport();

await super.connect(transport);

try {
Expand Down Expand Up @@ -1064,6 +1110,11 @@ export class App extends Protocol<AppRequest, AppNotification, AppResult> {
if (this.options?.autoResize) {
this.setupSizeChangedNotifications();
}

// For OpenAI mode: deliver initial state from window.openai
if (transport instanceof OpenAITransport) {
transport.deliverInitialState();
}
} catch (error) {
// Disconnect if initialization fails.
void this.close();
Expand Down
Loading
Loading