Skip to content

Commit

Permalink
Replace with correct return type for streamUI (#1911)
Browse files Browse the repository at this point in the history
  • Loading branch information
jeremyphilemon committed Jun 11, 2024
1 parent a4baf34 commit 4d5fbdb
Showing 1 changed file with 265 additions and 1 deletion.
266 changes: 265 additions & 1 deletion content/docs/07-reference/ai-sdk-rsc/01-stream-ui.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -338,7 +338,271 @@ A helper function to create a streamable UI from LLM providers. This function is

## Returns

It can return any valid ReactNode.
<PropertiesTable
content={[
{
name: 'value',
type: 'ReactNode',
description: 'The user interface based on the stream output.',
},
{
name: 'text',
type: 'Promise<string>',
description:
'The full text that has been generated. Resolved when the response is finished.',
},
{
name: 'toolCalls',
type: 'Promise<ToolCall[]>',
description:
'The tool calls that have been executed. Resolved when the response is finished.',
},
{
name: 'toolResults',
type: 'Promise<ToolResult[]>',
description:
'The tool results that have been generated. Resolved when the all tool executions are finished.',
},
{
name: 'finishReason',
type: "Promise<'stop' | 'length' | 'content-filter' | 'tool-calls' | 'error' | 'other' | 'unknown'>",
description:
'The reason why the generation finished. Resolved when the response is finished.',
},
{
name: 'usage',
type: 'Promise<TokenUsage>',
description:
'The token usage of the generated text. Resolved when the response is finished.',
properties: [
{
type: 'TokenUsage',
parameters: [
{
name: 'promptTokens',
type: 'number',
description: 'The total number of tokens in the prompt.',
},
{
name: 'completionTokens',
type: 'number',
description: 'The total number of tokens in the completion.',
},
{
name: 'totalTokens',
type: 'number',
description: 'The total number of tokens generated.',
},
],
},
],
},
{
name: 'rawResponse',
type: 'RawResponse',
optional: true,
description: 'Optional raw response data.',
properties: [
{
type: 'RawResponse',
parameters: [
{
name: 'header',
optional: true,
type: 'Record<string, string>',
description: 'Response headers.',
},
],
},
],
},
{
name: 'warnings',
type: 'Warning[] | undefined',
description:
'Warnings from the model provider (e.g. unsupported settings).',
},
{
name: 'textStream',
type: 'AsyncIterable<string> & ReadableStream<string>',
description:
'A text stream that returns only the generated text deltas. You can use it as either an AsyncIterable or a ReadableStream. When an error occurs, the stream will throw the error.',
},
{
name: 'fullStream',
type: 'AsyncIterable<TextStreamPart> & ReadableStream<TextStreamPart>',
description:
'A stream with all events, including text deltas, tool calls, tool results, and errors. You can use it as either an AsyncIterable or a ReadableStream. When an error occurs, the stream will throw the error.',
properties: [
{
type: 'TextStreamPart',
parameters: [
{
name: 'type',
type: "'text-delta'",
description: 'The type to identify the object as text delta.',
},
{
name: 'textDelta',
type: 'string',
description: 'The text delta.',
},
],
},
{
type: 'TextStreamPart',
parameters: [
{
name: 'type',
type: "'tool-call'",
description: 'The type to identify the object as tool call.',
},
{
name: 'toolCallId',
type: 'string',
description: 'The id of the tool call.',
},
{
name: 'toolName',
type: 'string',
description:
'The name of the tool, which typically would be the name of the function.',
},
{
name: 'args',
type: 'object based on zod schema',
description:
'Parameters generated by the model to be used by the tool.',
},
],
},
{
type: 'TextStreamPart',
description: 'The result of a tool call execution.',
parameters: [
{
name: 'type',
type: "'tool-result'",
description: 'The type to identify the object as tool result.',
},
{
name: 'toolCallId',
type: 'string',
description: 'The id of the tool call.',
},
{
name: 'toolName',
type: 'string',
description:
'The name of the tool, which typically would be the name of the function.',
},
{
name: 'args',
type: 'object based on zod schema',
description:
'Parameters generated by the model to be used by the tool.',
},
{
name: 'result',
type: 'any',
description:
'The result returned by the tool after execution has completed.',
},
],
},
{
type: 'TextStreamPart',
parameters: [
{
name: 'type',
type: "'error'",
description: 'The type to identify the object as error.',
},
{
name: 'error',
type: 'Error',
description:
'Describes the error that may have occurred during execution.',
},
],
},
{
type: 'TextStreamPart',
parameters: [
{
name: 'type',
type: "'finish'",
description: 'The type to identify the object as finish.',
},
{
name: 'finishReason',
type: "'stop' | 'length' | 'content-filter' | 'tool-calls' | 'error' | 'other' | 'unknown'",
description: 'The reason the model finished generating the text.',
},
{
name: 'usage',
type: 'TokenUsage',
description: 'The token usage of the generated text.',
properties: [
{
type: 'TokenUsage',
parameters: [
{
name: 'promptTokens',
type: 'number',
description: 'The total number of tokens in the prompt.',
},
{
name: 'completionTokens',
type: 'number',
description:
'The total number of tokens in the completion.',
},
{
name: 'totalTokens',
type: 'number',
description: 'The total number of tokens generated.',
},
],
},
],
},
],
},
],
},
{
name: 'toAIStream',
type: '(callbacks?: AIStreamCallbacksAndOptions) => AIStream',
description:
'Converts the result to an `AIStream` object that is compatible with `StreamingTextResponse`. It can be used with the `useChat` and `useCompletion` hooks.',
},
{
name: 'pipeAIStreamToResponse',
type: '(response: ServerResponse, init?: { headers?: Record<string, string>; status?: number } => void',
description:
'Writes stream data output to a Node.js response-like object. It sets a `Content-Type` header to `text/plain; charset=utf-8` and writes each stream data part as a separate chunk.',
},
{
name: 'pipeTextStreamToResponse',
type: '(response: ServerResponse, init?: { headers?: Record<string, string>; status?: number } => void',
description:
'Writes text delta output to a Node.js response-like object. It sets a `Content-Type` header to `text/plain; charset=utf-8` and writes each text delta as a separate chunk.',
},
{
name: 'toAIStreamResponse',
type: '(init?: ResponseInit) => Response',
description:
'Converts the result to a streamed response object with a stream data part stream. It can be used with the `useChat` and `useCompletion` hooks.',
},
{
name: 'toTextStreamResponse',
type: '(init?: ResponseInit) => Response',
description:
'Creates a simple text stream response. Each text delta is encoded as UTF-8 and sent as a separate chunk. Non-text-delta events are ignored.',
},
]}
/>

## Examples

Expand Down

0 comments on commit 4d5fbdb

Please sign in to comment.