diff --git a/src/data/nav/aitransport.ts b/src/data/nav/aitransport.ts
index 1f8dfa12f3..c1806d1b65 100644
--- a/src/data/nav/aitransport.ts
+++ b/src/data/nav/aitransport.ts
@@ -92,6 +92,10 @@ export default {
name: 'OpenAI token streaming - message per response',
link: '/docs/guides/ai-transport/openai-message-per-response',
},
+ {
+ name: 'OpenAI citations',
+ link: '/docs/guides/ai-transport/openai-citations',
+ },
{
name: 'Anthropic token streaming - message per token',
link: '/docs/guides/ai-transport/anthropic-message-per-token',
@@ -100,6 +104,10 @@ export default {
name: 'Anthropic token streaming - message per response',
link: '/docs/guides/ai-transport/anthropic-message-per-response',
},
+ {
+ name: 'Anthropic citations',
+ link: '/docs/guides/ai-transport/anthropic-citations',
+ },
],
},
],
diff --git a/src/pages/docs/guides/ai-transport/anthropic-citations.mdx b/src/pages/docs/guides/ai-transport/anthropic-citations.mdx
new file mode 100644
index 0000000000..39a3d9a233
--- /dev/null
+++ b/src/pages/docs/guides/ai-transport/anthropic-citations.mdx
@@ -0,0 +1,588 @@
+---
+title: "Guide: Attach citations to Anthropic responses using message annotations"
+meta_description: "Attach source citations to AI responses from the Anthropic Messages API using Ably message annotations."
+meta_keywords: "AI, citations, Anthropic, Claude, Messages API, AI transport, Ably, realtime, message annotations, source attribution"
+---
+
+This guide shows you how to attach source citations to AI responses from Anthropic's [Messages API](https://docs.anthropic.com/en/api/messages) using Ably [message annotations](/docs/messages/annotations). When Anthropic provides citations from documents or search results, you can publish them as annotations on Ably messages, enabling clients to display source references alongside AI responses in realtime.
+
+Using Ably to distribute citations enables you to separate citation metadata from response content, display citation summaries updated in realtime, and retrieve detailed citation data on demand.
+
+
+
+## Prerequisites
+
+To follow this guide, you need:
+- Node.js 20 or higher
+- An Anthropic API key
+- An Ably API key
+
+Useful links:
+- [Anthropic Citations documentation](https://docs.anthropic.com/en/docs/build-with-claude/citations)
+- [Ably JavaScript SDK getting started](/docs/getting-started/javascript)
+
+Create a new NPM package, which will contain the publisher and subscriber code:
+
+
+```shell
+mkdir ably-anthropic-citations && cd ably-anthropic-citations
+npm init -y
+```
+
+
+Install the required packages using NPM:
+
+
+```shell
+npm install @anthropic-ai/sdk@^0.71 ably@^2
+```
+
+
+
+
+Export your Anthropic API key to the environment, which will be used later in the guide by the Anthropic SDK:
+
+
+```shell
+export ANTHROPIC_API_KEY="your_api_key_here"
+```
+
+
+## Step 1: Enable message annotations
+
+Message annotations require "Message annotations, updates, deletes and appends" to be enabled in a [channel rule](/docs/channels#rules) associated with the channel.
+
+
+
+To enable the channel rule:
+
+1. Go to the [Ably dashboard](https://www.ably.com/dashboard) and select your app.
+2. Navigate to the "Configuration" > "Rules" section from the left-hand navigation bar.
+3. Choose "Add new rule".
+4. Enter a channel name or namespace pattern (e.g. `ai` for all channels starting with `ai:`).
+5. Select the "Message annotations, updates, deletes and appends" option from the list.
+6. Click "Create channel rule".
+
+The examples in this guide use the `ai:` namespace prefix, which assumes you have configured the rule for `ai:*`.
+
+
+
+## Step 2: Get a response with citations from Anthropic
+
+Initialize an Anthropic client and use the [Messages API](https://docs.anthropic.com/en/api/messages) with citations enabled. Anthropic supports citations from documents, PDFs, and search results.
+
+Create a new file `publisher.mjs` with the following contents:
+
+
+```javascript
+import Ably from 'ably';
+import Anthropic from '@anthropic-ai/sdk';
+
+// Initialize Anthropic client
+const anthropic = new Anthropic();
+
+// Process each response
+async function processResponse(response) {
+ console.log(JSON.stringify(response, null, 2));
+ // This function is updated in the next sections
+}
+
+// Create a response with citations enabled
+async function getAnthropicResponseWithCitations(question, documentContent) {
+ const response = await anthropic.messages.create({
+ model: "claude-sonnet-4-5",
+ max_tokens: 1024,
+ messages: [
+ {
+ role: "user",
+ content: [
+ {
+ type: "document",
+ source: {
+ type: "text",
+ media_type: "text/plain",
+ data: documentContent
+ },
+ title: "Source Document",
+ citations: { enabled: true }
+ },
+ {
+ type: "text",
+ text: question
+ }
+ ]
+ }
+ ]
+ });
+
+ await processResponse(response);
+}
+
+// Usage example
+const document = `The James Webb Space Telescope (JWST) launched on December 25, 2021.
+It is the largest optical telescope in space and is designed to conduct infrared astronomy.
+The telescope's first full-color images were released on July 12, 2022, revealing unprecedented
+details of distant galaxies, nebulae, and exoplanet atmospheres.`;
+
+getAnthropicResponseWithCitations(
+ "What are the latest discoveries from the James Webb Space Telescope?",
+ document
+);
+```
+
+
+### Understand Anthropic citation responses
+
+When citations are enabled, Anthropic's Messages API returns responses with multiple text blocks. Each text block can include a `citations` array containing references to specific locations in the source documents.
+
+The following example shows the response structure when citations are included:
+
+
+```json
+{
+ "content": [
+ {
+ "type": "text",
+ "text": "The James Webb Space Telescope launched on "
+ },
+ {
+ "type": "text",
+ "text": "December 25, 2021",
+ "citations": [{
+ "type": "char_location",
+ "cited_text": "The James Webb Space Telescope (JWST) launched on December 25, 2021.",
+ "document_index": 0,
+ "document_title": "Source Document",
+ "start_char_index": 0,
+ "end_char_index": 68
+ }]
+ },
+ {
+ "type": "text",
+ "text": ". Its first full-color images were released on "
+ },
+ {
+ "type": "text",
+ "text": "July 12, 2022",
+ "citations": [{
+ "type": "char_location",
+ "cited_text": "The telescope's first full-color images were released on July 12, 2022",
+ "document_index": 0,
+ "document_title": "Source Document",
+ "start_char_index": 185,
+ "end_char_index": 255
+ }]
+ }
+ ]
+}
+```
+
+
+Each citation includes:
+
+- `type`: The citation type (`char_location` for plain text, `page_location` for PDFs, `content_block_location` for custom content, or `search_result_location` for search results).
+- `cited_text`: The exact text being cited from the source.
+- `document_index`: The index of the source document (0-indexed).
+- `document_title`: The title of the source document.
+- Location fields: Character indices, page numbers, or block indices depending on the citation type.
+
+
+
+## Step 3: Publish response and citations to Ably
+
+Publish the AI response as an Ably message, then publish each citation as a message annotation referencing the response message's `serial`.
+
+### Initialize the Ably client
+
+Add the Ably import and client initialization to your `publisher.mjs` file:
+
+
+```javascript
+// Initialize Ably Realtime client
+const realtime = new Ably.Realtime({
+ key: '{{API_KEY}}',
+ echoMessages: false
+});
+
+// Create a channel for publishing AI responses
+const channel = realtime.channels.get('ai:{{RANDOM_CHANNEL_NAME}}');
+```
+
+
+The Ably Realtime client maintains a persistent connection to the Ably service, which allows you to publish messages with low latency.
+
+
+
+### Publish response and citations
+
+Update the `processResponse` function to extract the full text and citations from the Anthropic response, then publish them to Ably:
+
+
+```javascript
+// Process response and publish to Ably
+async function processResponse(response) {
+ let fullText = '';
+ const citations = [];
+ let currentOffset = 0;
+
+ // Extract text and citations from response
+ for (const block of response.content) {
+ if (block.type === 'text') {
+ const text = block.text;
+
+ if (block.citations) {
+ for (const citation of block.citations) {
+ citations.push({
+ ...citation,
+ // Track position in the full response text
+ responseStartOffset: currentOffset,
+ responseEndOffset: currentOffset + text.length
+ });
+ }
+ }
+
+ fullText += text;
+ currentOffset += text.length;
+ }
+ }
+
+ // Publish the AI response message
+ const { serials: [msgSerial] } = await channel.publish('response', fullText);
+ console.log('Published response with serial:', msgSerial);
+
+ // Publish each citation as an annotation
+ for (const citation of citations) {
+ let sourceDomain;
+ try {
+ sourceDomain = citation.source ? new URL(citation.source).hostname : citation.document_title;
+ } catch {
+ sourceDomain = citation.document_title || 'document';
+ }
+
+ await channel.annotations.publish(msgSerial, {
+ type: 'citations:multiple.v1',
+ name: sourceDomain,
+ data: {
+ title: citation.document_title,
+ citedText: citation.cited_text,
+ citationType: citation.type,
+ startOffset: citation.responseStartOffset,
+ endOffset: citation.responseEndOffset,
+ documentIndex: citation.document_index,
+ ...(citation.start_char_index !== undefined && {
+ startCharIndex: citation.start_char_index,
+ endCharIndex: citation.end_char_index
+ }),
+ ...(citation.start_page_number !== undefined && {
+ startPageNumber: citation.start_page_number,
+ endPageNumber: citation.end_page_number
+ })
+ }
+ });
+ }
+
+ console.log(`Published ${citations.length} citation(s)`);
+}
+```
+
+
+This implementation:
+
+- Extracts the full response text by concatenating all text blocks
+- Tracks the position of each citation within the full response
+- Publishes the response as a single Ably message and captures its `serial`
+- Publishes each citation as an annotation using the [`multiple.v1`](/docs/messages/annotations#multiple) summarization method
+- Uses the source domain as the annotation `name` for grouping in summaries
+
+
+
+Run the publisher to see responses and citations published to Ably:
+
+
+```shell
+node publisher.mjs
+```
+
+
+## Step 4: Subscribe to citation summaries
+
+Create a subscriber that receives AI responses and citation summaries in realtime.
+
+Create a new file `subscriber.mjs` with the following contents:
+
+
+```javascript
+import Ably from 'ably';
+
+// Initialize Ably Realtime client
+const realtime = new Ably.Realtime({ key: '{{API_KEY}}' });
+
+// Get the same channel used by the publisher
+const channel = realtime.channels.get('ai:{{RANDOM_CHANNEL_NAME}}');
+
+// Track responses
+const responses = new Map();
+
+// Subscribe to messages
+await channel.subscribe((message) => {
+ switch (message.action) {
+ case 'message.create':
+ console.log('\n[New response]');
+ console.log('Serial:', message.serial);
+ console.log('Content:', message.data);
+ responses.set(message.serial, { content: message.data, citations: [] });
+ break;
+
+ case 'message.update':
+ // Handle streamed content updates
+ const response = responses.get(message.serial);
+ if (response) {
+ response.content = message.data;
+ console.log('\n[Response updated]');
+ console.log('Content:', message.data);
+ }
+ break;
+ }
+});
+
+// Subscribe to individual citation annotations
+await channel.annotations.subscribe((annotation) => {
+ if (annotation.action === 'annotation.create' &&
+ annotation.type === 'citations:multiple.v1') {
+ const { title, citedText, citationType, documentIndex } = annotation.data;
+
+ console.log('\n[Citation received]');
+ console.log(` Source: ${title}`);
+ console.log(` Type: ${citationType}`);
+ console.log(` Document index: ${documentIndex}`);
+ console.log(` Cited text: "${citedText}"`);
+
+ // Store citation for the response
+ const response = responses.get(annotation.messageSerial);
+ if (response) {
+ response.citations.push(annotation.data);
+ }
+ }
+});
+
+console.log('Subscriber ready, waiting for responses and citations...');
+```
+
+
+Run the subscriber in a separate terminal:
+
+
+```shell
+node subscriber.mjs
+```
+
+
+With the subscriber running, run the publisher in another terminal. You'll see the response appear followed by citation summary updates showing counts grouped by source.
+
+## Step 5: Subscribe to individual citations
+
+To access the full citation data for rendering source links or inline markers, subscribe to individual annotation events.
+
+Create a new file `citation-subscriber.mjs` with the following contents:
+
+
+```javascript
+import Ably from 'ably';
+
+// Initialize Ably Realtime client
+const realtime = new Ably.Realtime({ key: '{{API_KEY}}' });
+
+// Get the channel with annotation subscription enabled
+const channel = realtime.channels.get('ai:{{RANDOM_CHANNEL_NAME}}', {
+ modes: ['SUBSCRIBE', 'ANNOTATION_SUBSCRIBE']
+});
+
+// Track responses and their citations
+const responses = new Map();
+
+// Subscribe to messages
+await channel.subscribe((message) => {
+ if (message.action === 'message.create') {
+ console.log('\n[New response]');
+ console.log('Serial:', message.serial);
+ console.log('Content:', message.data);
+ responses.set(message.serial, { content: message.data, citations: [] });
+ }
+});
+
+// Subscribe to individual citation annotations
+await channel.annotations.subscribe((annotation) => {
+ if (annotation.action === 'annotation.create' &&
+ annotation.type === 'citations:multiple.v1') {
+ const { title, citedText, citationType, documentIndex } = annotation.data;
+
+ console.log('\n[Citation received]');
+ console.log(` Source: ${title}`);
+ console.log(` Type: ${citationType}`);
+ console.log(` Document index: ${documentIndex}`);
+ console.log(` Cited text: "${citedText}"`);
+
+ // Store citation for the response
+ const response = responses.get(annotation.messageSerial);
+ if (response) {
+ response.citations.push(annotation.data);
+ }
+ }
+});
+
+console.log('Subscriber ready, waiting for responses and citations...');
+```
+
+
+Run the citation subscriber:
+
+
+```shell
+node citation-subscriber.mjs
+```
+
+
+This subscriber receives the full citation data as each annotation arrives, enabling you to:
+
+- Display the source document title
+- Show the exact text that was cited from each source
+- Highlight cited portions of the response text using the offset positions
+
+## Step 6: Combine with streaming responses
+
+You can combine citations with the [message-per-response](/docs/ai-transport/token-streaming/message-per-response) streaming pattern. Since Anthropic includes `citations_delta` events when streaming, you can publish citations as annotations while the response is still being streamed.
+
+
+```javascript
+import Anthropic from '@anthropic-ai/sdk';
+import Ably from 'ably';
+
+const anthropic = new Anthropic();
+const realtime = new Ably.Realtime({
+ key: '{{API_KEY}}',
+ echoMessages: false
+});
+const channel = realtime.channels.get('ai:{{RANDOM_CHANNEL_NAME}}');
+
+// Track state for streaming
+let msgSerial = null;
+let currentBlockIndex = null;
+let currentOffset = 0;
+
+// Process streaming events
+async function processStreamEvent(event) {
+ switch (event.type) {
+ case 'message_start':
+ // Publish initial empty message
+ const result = await channel.publish({ name: 'response', data: '' });
+ msgSerial = result.serials[0];
+ currentOffset = 0;
+ break;
+
+ case 'content_block_start':
+ if (event.content_block.type === 'text') {
+ currentBlockIndex = event.index;
+ }
+ break;
+
+ case 'content_block_delta':
+ if (event.index === currentBlockIndex) {
+ if (event.delta.type === 'text_delta') {
+ // Append text token
+ channel.appendMessage({ serial: msgSerial, data: event.delta.text });
+ currentOffset += event.delta.text.length;
+ } else if (event.delta.type === 'citations_delta') {
+ // Publish citation annotation
+ const citation = event.delta.citation;
+ let sourceDomain;
+ try {
+ sourceDomain = new URL(citation.source || '').hostname;
+ } catch {
+ sourceDomain = citation.document_title || 'document';
+ }
+
+ await channel.annotations.publish(msgSerial, {
+ type: 'citations:multiple.v1',
+ name: sourceDomain,
+ data: {
+ title: citation.document_title,
+ citedText: citation.cited_text,
+ citationType: citation.type,
+ documentIndex: citation.document_index
+ }
+ });
+ }
+ }
+ break;
+
+ case 'message_stop':
+ console.log('Stream completed!');
+ break;
+ }
+}
+
+// Stream response with citations
+async function streamWithCitations(question, documentContent) {
+ const stream = await anthropic.messages.create({
+ model: "claude-sonnet-4-5",
+ max_tokens: 1024,
+ stream: true,
+ messages: [
+ {
+ role: "user",
+ content: [
+ {
+ type: "document",
+ source: {
+ type: "text",
+ media_type: "text/plain",
+ data: documentContent
+ },
+ title: "Source Document",
+ citations: { enabled: true }
+ },
+ {
+ type: "text",
+ text: question
+ }
+ ]
+ }
+ ]
+ });
+
+ for await (const event of stream) {
+ await processStreamEvent(event);
+ }
+}
+
+// Example usage
+const document = "The James Webb Space Telescope (JWST) launched on December 25, 2021. It is the largest optical telescope in space and is designed to conduct infrared astronomy. The telescope's first full-color images were released on July 12, 2022, revealing unprecedented details of distant galaxies, nebulae, and exoplanet atmospheres.";
+
+await streamWithCitations("What are the latest discoveries from the James Webb Space Telescope?", document);
+```
+
+
+
+
+## Next steps
+
+- Learn more about [citations and message annotations](/docs/ai-transport/messaging/citations)
+- Explore [annotation summaries](/docs/messages/annotations#annotation-summaries) for displaying citation counts
+- Understand how to [retrieve annotations on demand](/docs/messages/annotations#rest-api) via the REST API
+- Combine with [message-per-response streaming](/docs/ai-transport/token-streaming/message-per-response) for live token delivery
diff --git a/src/pages/docs/guides/ai-transport/openai-citations.mdx b/src/pages/docs/guides/ai-transport/openai-citations.mdx
new file mode 100644
index 0000000000..9930e8d371
--- /dev/null
+++ b/src/pages/docs/guides/ai-transport/openai-citations.mdx
@@ -0,0 +1,547 @@
+---
+title: "Guide: Attach citations to OpenAI responses using message annotations"
+meta_description: "Attach source citations to AI responses from the OpenAI Responses API using Ably message annotations."
+meta_keywords: "AI, citations, OpenAI, Responses API, AI transport, Ably, realtime, message annotations, source attribution, web search"
+---
+
+This guide shows you how to attach source citations to AI responses from OpenAI's [Responses API](https://platform.openai.com/docs/api-reference/responses) using Ably [message annotations](/docs/messages/annotations). When OpenAI provides citations from web search results, you can publish them as annotations on Ably messages, enabling clients to display source references alongside AI responses in realtime.
+
+Using Ably to distribute citations enables you to separate citation metadata from response content, display citation summaries updated in realtime, and retrieve detailed citation data on demand.
+
+
+
+## Prerequisites
+
+To follow this guide, you need:
+- Node.js 20 or higher
+- An OpenAI API key
+- An Ably API key
+
+Useful links:
+- [OpenAI Web Search documentation](https://platform.openai.com/docs/guides/tools-web-search)
+- [Ably JavaScript SDK getting started](/docs/getting-started/javascript)
+
+Create a new NPM package, which will contain the publisher and subscriber code:
+
+
+```shell
+mkdir ably-openai-citations && cd ably-openai-citations
+npm init -y
+```
+
+
+Install the required packages using NPM:
+
+
+```shell
+npm install openai@^4 ably@^2
+```
+
+
+
+
+Export your OpenAI API key to the environment, which will be used later in the guide by the OpenAI SDK:
+
+
+```shell
+export OPENAI_API_KEY="your_api_key_here"
+```
+
+
+## Step 1: Enable message annotations
+
+Message annotations require "Message annotations, updates, deletes and appends" to be enabled in a [channel rule](/docs/channels#rules) associated with the channel.
+
+
+
+To enable the channel rule:
+
+1. Go to the [Ably dashboard](https://www.ably.com/dashboard) and select your app.
+2. Navigate to the "Configuration" > "Rules" section from the left-hand navigation bar.
+3. Choose "Add new rule".
+4. Enter a channel name or namespace pattern (e.g. `ai` for all channels starting with `ai:`).
+5. Select the "Message annotations, updates, deletes and appends" option from the list.
+6. Click "Create channel rule".
+
+The examples in this guide use the `ai:` namespace prefix, which assumes you have configured the rule for `ai:*`.
+
+
+
+## Step 2: Get a response with citations from OpenAI
+
+Initialize an OpenAI client and use the [Responses API](https://platform.openai.com/docs/api-reference/responses) with web search enabled. When web search is used, OpenAI includes `url_citation` annotations in the response.
+
+Create a new file `publisher.mjs` with the following contents:
+
+
+```javascript
+import Ably from 'ably';
+import OpenAI from 'openai';
+
+// Initialize OpenAI client
+const openai = new OpenAI();
+
+// Process each response event
+async function processResponse(response) {
+ console.log(JSON.stringify(response, null, 2));
+ // This function is updated in the next sections
+}
+
+// Create response with web search enabled
+async function getOpenAIResponseWithCitations(question) {
+ const response = await openai.responses.create({
+ model: "gpt-4.1",
+ input: question,
+ tools: [{ type: "web_search_preview" }]
+ });
+
+ await processResponse(response);
+}
+
+// Usage example
+getOpenAIResponseWithCitations(
+ "What are the latest discoveries from the James Webb Space Telescope in 2025?"
+);
+```
+
+
+
+
+### Understand OpenAI citation responses
+
+When web search is enabled, OpenAI's Responses API returns responses with `url_citation` annotations embedded in the output. The response includes both the web search call and the message with citations.
+
+The following example shows the response structure when citations are included:
+
+
+```json
+{
+ "id": "resp_abc123",
+ "status": "completed",
+ "output": [
+ {
+ "type": "web_search_call",
+ "id": "ws_456",
+ "status": "completed"
+ },
+ {
+ "type": "message",
+ "id": "msg_789",
+ "role": "assistant",
+ "content": [
+ {
+ "type": "output_text",
+ "text": "The James Webb Space Telescope launched on December 25, 2021 [1]. Its first full-color images were released on July 12, 2022 [2].",
+ "annotations": [
+ {
+ "type": "url_citation",
+ "start_index": 51,
+ "end_index": 54,
+ "url": "https://science.nasa.gov/mission/webb/",
+ "title": "James Webb Space Telescope - NASA Science"
+ },
+ {
+ "type": "url_citation",
+ "start_index": 110,
+ "end_index": 113,
+ "url": "https://en.wikipedia.org/wiki/James_Webb_Space_Telescope",
+ "title": "James Webb Space Telescope - Wikipedia"
+ }
+ ]
+ }
+ ]
+ }
+ ]
+}
+```
+
+
+Each `url_citation` annotation includes:
+
+- `type`: Always `"url_citation"` for web search citations.
+- `start_index`: The character position in the response text where the citation marker begins.
+- `end_index`: The character position where the citation marker ends.
+- `url`: The source URL being cited.
+- `title`: The title of the source page.
+
+
+
+## Step 3: Publish response and citations to Ably
+
+Publish the AI response as an Ably message, then publish each citation as a message annotation referencing the response message's `serial`.
+
+### Initialize the Ably client
+
+Add the Ably import and client initialization to your `publisher.mjs` file:
+
+
+```javascript
+// Initialize Ably Realtime client
+const realtime = new Ably.Realtime({
+ key: '{{API_KEY}}',
+ echoMessages: false
+});
+
+// Create a channel for publishing AI responses
+const channel = realtime.channels.get('ai:{{RANDOM_CHANNEL_NAME}}');
+```
+
+
+The Ably Realtime client maintains a persistent connection to the Ably service, which allows you to publish messages with low latency.
+
+
+
+### Publish response and citations
+
+Update the `processResponse` function to extract the text and citations from the OpenAI response, then publish them to Ably:
+
+
+```javascript
+// Process response and publish to Ably
+async function processResponse(response) {
+ let fullText = '';
+ const citations = [];
+
+ // Extract text and citations from response
+ for (const item of response.output) {
+ if (item.type === 'message') {
+ for (const content of item.content) {
+ if (content.type === 'output_text') {
+ fullText = content.text;
+
+ if (content.annotations) {
+ for (const annotation of content.annotations) {
+ if (annotation.type === 'url_citation') {
+ citations.push({
+ url: annotation.url,
+ title: annotation.title,
+ startIndex: annotation.start_index,
+ endIndex: annotation.end_index
+ });
+ }
+ }
+ }
+ }
+ }
+ }
+ }
+
+ // Publish the AI response message
+ const { serials: [msgSerial] } = await channel.publish('response', fullText);
+ console.log('Published response with serial:', msgSerial);
+
+ // Publish each citation as an annotation
+ for (const citation of citations) {
+ const sourceDomain = new URL(citation.url).hostname;
+
+ await channel.annotations.publish(msgSerial, {
+ type: 'citations:multiple.v1',
+ name: sourceDomain,
+ data: {
+ url: citation.url,
+ title: citation.title,
+ startIndex: citation.startIndex,
+ endIndex: citation.endIndex
+ }
+ });
+ }
+
+ console.log(`Published ${citations.length} citation(s)`);
+}
+```
+
+
+This implementation:
+
+- Extracts the response text from the `output_text` content block
+- Collects all `url_citation` annotations with their URLs, titles, and positions
+- Publishes the response as a single Ably message and captures its `serial`
+- Publishes each citation as an annotation using the [`multiple.v1`](/docs/messages/annotations#multiple) summarization method
+- Uses the source domain as the annotation `name` for grouping in summaries
+
+
+
+Run the publisher to see responses and citations published to Ably:
+
+
+```shell
+node publisher.mjs
+```
+
+
+## Step 4: Subscribe to citation summaries
+
+Create a subscriber that receives AI responses and citation summaries in realtime.
+
+Create a new file `subscriber.mjs` with the following contents:
+
+
+```javascript
+import Ably from 'ably';
+
+// Initialize Ably Realtime client
+const realtime = new Ably.Realtime({ key: '{{API_KEY}}' });
+
+// Get the same channel used by the publisher
+const channel = realtime.channels.get('ai:{{RANDOM_CHANNEL_NAME}}');
+
+// Track responses
+const responses = new Map();
+
+// Subscribe to receive messages and summaries
+await channel.subscribe((message) => {
+ switch (message.action) {
+ case 'message.create':
+ console.log('\n[New response]');
+ console.log('Serial:', message.serial);
+ console.log('Content:', message.data);
+ responses.set(message.serial, { content: message.data, citations: {} });
+ break;
+
+ case 'message.summary':
+ const citationsSummary = message.annotations?.summary['citations:multiple.v1'];
+ if (citationsSummary) {
+ console.log('\n[Citation summary updated]');
+ for (const [source, data] of Object.entries(citationsSummary)) {
+ console.log(` ${source}: ${data.total} citation(s)`);
+ }
+ }
+ break;
+ }
+});
+
+console.log('Subscriber ready, waiting for responses and citations...');
+```
+
+
+Run the subscriber in a separate terminal:
+
+
+```shell
+node subscriber.mjs
+```
+
+
+With the subscriber running, run the publisher in another terminal. You'll see the response appear followed by citation summary updates showing counts grouped by source domain.
+
+## Step 5: Subscribe to individual citations
+
+To access the full citation data for rendering source links or inline markers, subscribe to individual annotation events.
+
+Create a new file `citation-subscriber.mjs` with the following contents:
+
+
+```javascript
+import Ably from 'ably';
+
+// Initialize Ably Realtime client
+const realtime = new Ably.Realtime({ key: '{{API_KEY}}' });
+
+// Get the channel with annotation subscription enabled
+const channel = realtime.channels.get('ai:{{RANDOM_CHANNEL_NAME}}', {
+ modes: ['SUBSCRIBE', 'ANNOTATION_SUBSCRIBE']
+});
+
+// Track responses and their citations
+const responses = new Map();
+
+// Subscribe to messages
+await channel.subscribe((message) => {
+ if (message.action === 'message.create') {
+ console.log('\n[New response]');
+ console.log('Serial:', message.serial);
+ console.log('Content:', message.data);
+ responses.set(message.serial, { content: message.data, citations: [] });
+ }
+});
+
+// Subscribe to individual citation annotations
+await channel.annotations.subscribe((annotation) => {
+ if (annotation.action === 'annotation.create' &&
+ annotation.type === 'citations:multiple.v1') {
+ const { url, title, startIndex, endIndex } = annotation.data;
+
+ console.log('\n[Citation received]');
+ console.log(` Title: ${title}`);
+ console.log(` URL: ${url}`);
+ console.log(` Position: ${startIndex}-${endIndex}`);
+
+ // Store citation for the response
+ const response = responses.get(annotation.messageSerial);
+ if (response) {
+ response.citations.push(annotation.data);
+ }
+ }
+});
+
+console.log('Subscriber ready, waiting for responses and citations...');
+```
+
+
+Run the citation subscriber:
+
+
+```shell
+node citation-subscriber.mjs
+```
+
+
+This subscriber receives the full citation data as each annotation arrives, enabling you to:
+
+- Display clickable source links with titles and URLs
+- Link inline citation markers (like `[1]`) to their sources using the position indices
+- Build a references section with all cited sources
+
+## Step 6: Combine with streaming responses
+
+You can combine citations with the [message-per-response](/docs/ai-transport/token-streaming/message-per-response) streaming pattern. OpenAI's streaming responses include citation annotations in the final `response.output_text.done` event.
+
+
+```javascript
+import OpenAI from 'openai';
+import Ably from 'ably';
+
+const openai = new OpenAI();
+const realtime = new Ably.Realtime({
+ key: '{{API_KEY}}',
+ echoMessages: false
+});
+const channel = realtime.channels.get('ai:{{RANDOM_CHANNEL_NAME}}');
+
+// Track state for streaming
+let msgSerial = null;
+let messageItemId = null;
+
+// Process streaming events
+async function processStreamEvent(event) {
+ switch (event.type) {
+ case 'response.created':
+ // Publish initial empty message
+ const result = await channel.publish({ name: 'response', data: '' });
+ msgSerial = result.serials[0];
+ break;
+
+ case 'response.output_item.added':
+ if (event.item.type === 'message') {
+ messageItemId = event.item.id;
+ }
+ break;
+
+ case 'response.output_text.delta':
+ // Append text token
+ if (event.item_id === messageItemId && msgSerial) {
+ channel.appendMessage({ serial: msgSerial, data: event.delta });
+ }
+ break;
+
+ case 'response.output_text.done':
+ // Process citations when text output is complete
+ if (event.item_id === messageItemId && event.annotations) {
+ for (const annotation of event.annotations) {
+ if (annotation.type === 'url_citation') {
+ const sourceDomain = new URL(annotation.url).hostname;
+
+ await channel.annotations.publish(msgSerial, {
+ type: 'citations:multiple.v1',
+ name: sourceDomain,
+ data: {
+ url: annotation.url,
+ title: annotation.title,
+ startIndex: annotation.start_index,
+ endIndex: annotation.end_index
+ }
+ });
+ }
+ }
+ }
+ break;
+
+ case 'response.completed':
+ console.log('Stream completed!');
+ break;
+ }
+}
+
+// Stream response with web search
+async function streamWithCitations(question) {
+ const stream = await openai.responses.create({
+ model: "gpt-4.1",
+ input: question,
+ tools: [{ type: "web_search_preview" }],
+ stream: true
+ });
+
+ for await (const event of stream) {
+ await processStreamEvent(event);
+ }
+}
+
+// Example usage
+await streamWithCitations(
+ "What are the latest discoveries from the James Webb Space Telescope in 2025?"
+);
+```
+
+
+
+
+## Step 7: Customize search behavior
+
+OpenAI's web search tool supports configuration options to customize search behavior:
+
+
+```javascript
+// Customize search context size
+const response = await openai.responses.create({
+ model: "gpt-4.1",
+ input: "What are the latest AI developments?",
+ tools: [{
+ type: "web_search_preview",
+ search_context_size: "high" // Options: "low", "medium", "high"
+ }]
+});
+
+// Filter to specific domains
+const response = await openai.responses.create({
+ model: "gpt-4.1",
+ input: "Find information about the James Webb Space Telescope",
+ tools: [{
+ type: "web_search_preview",
+ user_location: {
+ type: "approximate",
+ country: "US"
+ }
+ }]
+});
+```
+
+
+The `search_context_size` option controls how much context from search results is provided to the model:
+- `low`: Faster responses with less context
+- `medium`: Balanced approach (default)
+- `high`: More comprehensive context, potentially more citations
+
+## Next steps
+
+- Learn more about [citations and message annotations](/docs/ai-transport/messaging/citations)
+- Explore [annotation summaries](/docs/messages/annotations#annotation-summaries) for displaying citation counts
+- Understand how to [retrieve annotations on demand](/docs/messages/annotations#rest-api) via the REST API
+- Combine with [message-per-response streaming](/docs/ai-transport/token-streaming/message-per-response) for live token delivery