Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
229 changes: 59 additions & 170 deletions assets/scripts/components/conversational-search/index.js
Original file line number Diff line number Diff line change
@@ -1,63 +1,68 @@
import { getConfig } from '../../helpers/getConfig';
import { initializeFeatureFlags, getBooleanFlag, isDatadogEmployee, fetchDatadogUserStatus } from 'scripts/helpers/feature-flags';
import { initializeFeatureFlags, getBooleanFlag, fetchDatadogUserStatus } from 'scripts/helpers/feature-flags';

import { logAction, logError } from './logger';
import { parseMarkdown, inlineRefChips, extractSources, renderMessageWithSources } from './markdown';
import { attachTooltips, buildSourceCards, showSourceTooltip, closeAllSourceTooltips, repositionTooltip } from './sources';
import { addMessageActions, injectCodeCopyButtons } from './actions';
import { streamConversation, fetchConversation, resetTypesenseClient } from './streaming';
import { streamDocsAiChat } from './docsai-client';
import { pickQuestions } from './suggested-questions';

const { env } = document.documentElement.dataset;
const docsConfig = getConfig(env);
const typesenseConfig = docsConfig.typesense;
const docsAiConfig = docsConfig.docsAi;

// --- Providers & feature flags ---------------------------------------------------
const DOCS_AI_ENABLED_FLAG_KEY = 'docs-ai-enabled';

const PROVIDER_INTERNAL = 'internal';
const PROVIDER_TYPESENSE = 'typesense';
// Optimistic render: default on so the UI mounts at DOMContentLoaded — no
// layout shift. If the flag resolves to false we tear the UI down. Right
// tradeoff for a default-on permanent kill switch: degraded UX during a rare
// incident is fine; everyday layout shift is not.
let IS_DOCS_AI_ENABLED = true;

let IS_CONVERSATIONAL_SEARCH_ENABLED = false;
const CONVERSATIONAL_SEARCH_FLAG_KEY = 'docs_conversational_search';

// When true, disables the internal docs-ai provider and uses the external Typesense provider.
// Default: false (internal docs-ai is the primary provider).
const USE_EXTERNAL_PROVIDER_FLAG_KEY = 'docs-ai-use-external-provider';

// Only relevant when provider is typesense.
const DISABLE_STREAMING_FLAG_KEY = 'docs-ai-disable-streaming';

const EXTERNAL_CONV_MODEL_DOCS_STABLE = 'docs-ai-conv-model-v1-stable';
const INTERNAL_CONVERSATION_ID_PREFIX = 'dd_docsai_';

const RENDER_THROTTLE = 50;

// Auto-rotated client-side messages shown while we wait for the first server `thinking` event.
// Aligned with the mapped server messages below so the user sees consistent copy either way.
const LOADING_MESSAGES = [
'Searching documentation…',
'Reviewing relevant pages…',
'Analyzing content…',
'Generating answer…'
'Understanding your question…',
'Searching the docs…',
'Reading the most relevant pages…',
'Drafting your response…'
];

// Maps backend `thinking` event content to user-facing copy.
// `null` hides the event entirely (used for internal optimization steps the user shouldn't see).
// Unknown keys fall through to the original message so new backend stages still surface something.
const THINKING_MESSAGES = {
'Rewriting query...': 'Understanding your question…',
'Searching documentation...': 'Searching the docs…',
'Reviewing relevant pages': 'Reading the most relevant pages…',
'Generating answer...': 'Drafting your response…',
'Something went wrong. Please try again.': 'Something went wrong. Try asking again.'
};

function mapThinkingMessage(serverMessage) {
if (Object.prototype.hasOwnProperty.call(THINKING_MESSAGES, serverMessage)) {
return THINKING_MESSAGES[serverMessage];
}
return serverMessage;
}

let isDatadogUser = false;

initializeFeatureFlags().then(async (client) => {
IS_CONVERSATIONAL_SEARCH_ENABLED = getBooleanFlag(client, CONVERSATIONAL_SEARCH_FLAG_KEY);

if (!IS_CONVERSATIONAL_SEARCH_ENABLED && isDatadogEmployee()) {
IS_CONVERSATIONAL_SEARCH_ENABLED = true;
}

IS_DOCS_AI_ENABLED = getBooleanFlag(client, DOCS_AI_ENABLED_FLAG_KEY, true);
isDatadogUser = await fetchDatadogUserStatus();

if (IS_CONVERSATIONAL_SEARCH_ENABLED) {
document.body.classList.add('conv-search-enabled');
if (IS_DOCS_AI_ENABLED) {
logAction('Conversational Search Impression', {
conversational_search: { action: 'impression', page: window.location.pathname }
});
initConversationalSearch();
} else {
teardownConversationalSearch();
}
});

Expand All @@ -74,9 +79,6 @@ class ConversationalSearch {
this.hasLoggedFirstOpen = false;
this.isSuggestionQuery = false;

this.provider = PROVIDER_INTERNAL;
this.selectedModelId = EXTERNAL_CONV_MODEL_DOCS_STABLE;
this.typesenseStreamingDisabled = true;
// Rewrites the initial user query for better retrieval before answering.
// Only applied on the first message (no history). Follow-ups use history context instead.
this.shouldRewriteQuery = true;
Expand All @@ -87,14 +89,11 @@ class ConversationalSearch {

if (!this.createElements()) return;
this.bindEvents();
this.resolveFlags();
this.ready = true;
}

get ctx() {
return {
provider: this.provider,
selectedModelId: this.selectedModelId,
conversationId: this.conversationId,
isDatadogUser
};
Expand All @@ -111,17 +110,6 @@ class ConversationalSearch {
return `${INTERNAL_CONVERSATION_ID_PREFIX}${Date.now()}_${Math.random().toString(36).slice(2, 11)}`;
}

resolveFlags() {
initializeFeatureFlags().then((client) => {
const useTypesense = getBooleanFlag(client, USE_EXTERNAL_PROVIDER_FLAG_KEY);
this.provider = useTypesense ? PROVIDER_TYPESENSE : PROVIDER_INTERNAL;

if (useTypesense) {
this.typesenseStreamingDisabled = getBooleanFlag(client, DISABLE_STREAMING_FLAG_KEY);
}
}).catch(() => {});
}

// --- DOM setup ---------------------------------------------------------------

createElements() {
Expand Down Expand Up @@ -325,7 +313,7 @@ class ConversationalSearch {
open(trigger = 'entry_button') {
if (!this.hasLoggedFirstOpen) {
this.log('Conversational Search Open', {
conversational_search: { action: 'open_first_time', trigger, provider: this.provider }
conversational_search: { action: 'open_first_time', trigger }
});
this.hasLoggedFirstOpen = true;
}
Expand Down Expand Up @@ -475,19 +463,16 @@ class ConversationalSearch {
this.log('Conversational Search Interaction', {
conversational_search: {
action,
provider: this.provider,
...(this.conversationId && { conversation_id: this.conversationId }),
...extra
}
});
}

logResponse({ query, answer, startTime, streaming = true }) {
logResponse({ query, answer, startTime }) {
this.log('Conversational Search Response', {
conversational_search: {
action: 'response_received',
provider: this.provider,
streaming,
response_length: answer.length,
...(this.conversationId && { conversation_id: this.conversationId }),
latency_ms: Date.now() - startTime
Expand All @@ -510,19 +495,12 @@ class ConversationalSearch {
this.addMessage('user', query);

try {
if (this.provider === PROVIDER_INTERNAL) {
await this.runInternalStream(query);
} else if (this.typesenseStreamingDisabled) {
await this.runTypesenseFetch(query);
} else {
await this.runTypesenseStream(query);
}
await this.runInternalStream(query);
} catch (error) {
if (error.name === 'AbortError' && this.userCancelledRequest) {
this.addResponseContainer().textContent = 'Request cancelled.';
} else {
this.logErr('Conversational Search Response Error', error);
if (this.provider === PROVIDER_TYPESENSE) resetTypesenseClient();
this.addResponseContainer().textContent = 'Sorry, something went wrong. Please try again.';
}
} finally {
Expand All @@ -535,7 +513,7 @@ class ConversationalSearch {
}
}

// --- Internal docs-ai provider -----------------------------------------------
// --- Docs AI streaming -------------------------------------------------------

async runInternalStream(query) {
this.abortController = new AbortController();
Expand Down Expand Up @@ -563,7 +541,8 @@ class ConversationalSearch {
rewriteQuery: isFirstMessage && this.shouldRewriteQuery && !isSuggestion,
signal: this.abortController.signal,
onThinking: (message) => {
loadingIndicator.updateStatus(message);
const mapped = mapThinkingMessage(message);
if (mapped) loadingIndicator.updateStatus(mapped);
},
onToken: (_token, fullMessage) => {
if (!responseContainer) {
Expand Down Expand Up @@ -602,120 +581,17 @@ class ConversationalSearch {
}
}

// --- Typesense provider (streaming) ------------------------------------------

async runTypesenseStream(query) {
this.abortController = new AbortController();
const startTime = Date.now();
const loadingIndicator = this.showLoadingIndicator();

let responseContainer = null;
let accumulatedMessage = '';
let lastRenderTime = 0;

let response;
try {
response = await streamConversation({
typesenseConfig,
query,
modelId: this.selectedModelId,
conversationId: this.conversationId,
signal: this.abortController.signal,
onChunk: (chunk) => {
if (chunk?.conversation_id && !this.conversationId) {
this.conversationId = chunk.conversation_id;
}

if (chunk?.message !== undefined) {
if (!responseContainer) {
loadingIndicator.stop();
responseContainer = this.addResponseContainer();
}

accumulatedMessage += chunk.message;

const now = Date.now();
if (now - lastRenderTime > RENDER_THROTTLE) {
const { displayMarkdown, sources } = extractSources(accumulatedMessage);
responseContainer.innerHTML = inlineRefChips(parseMarkdown(displayMarkdown));
if (sources.length > 0) {
responseContainer.appendChild(buildSourceCards(sources));
}
lastRenderTime = now;
this.scrollToBottom();
}
}
},
onError: (error) => {
this.logErr('Typesense Streaming Error', error);
}
});
} finally {
loadingIndicator.stop();
}

const finalConversationId = response?.results?.[0]?.conversation?.conversation_id;
if (finalConversationId) {
this.conversationId = finalConversationId;
}

if (!responseContainer) {
responseContainer = this.addResponseContainer();
}

if (accumulatedMessage) {
this.finalizeResponse(responseContainer, query, accumulatedMessage, startTime);
} else {
responseContainer.textContent = 'No response received. Please try again.';
}
}

// --- Typesense provider (non-streaming) --------------------------------------

async runTypesenseFetch(query) {
this.abortController = new AbortController();
const startTime = Date.now();
const loadingIndicator = this.showLoadingIndicator();

let response;
try {
response = await fetchConversation({
typesenseConfig,
query,
modelId: this.selectedModelId,
conversationId: this.conversationId,
signal: this.abortController.signal
});
} finally {
loadingIndicator.stop();
}

const conversation = response?.conversation || response?.results?.[0]?.conversation;
if (conversation?.conversation_id) {
this.conversationId = conversation.conversation_id;
}

const answer = conversation?.answer || '';
const responseContainer = this.addResponseContainer();

if (answer) {
this.finalizeResponse(responseContainer, query, answer, startTime, false);
} else {
responseContainer.textContent = 'No response received. Please try again.';
}
}

// --- Shared finalization -----------------------------------------------------

finalizeResponse(container, query, answer, startTime, streaming = true) {
finalizeResponse(container, query, answer, startTime) {
container.innerHTML = renderMessageWithSources(answer, {
attachTooltips,
buildSourceCards
});
injectCodeCopyButtons(container, this.ctx);
addMessageActions(container.parentElement, query, answer, this.ctx);
this.scrollToBottom();
this.logResponse({ query, answer, startTime, streaming });
this.logResponse({ query, answer, startTime });
}
}

Expand All @@ -724,7 +600,8 @@ class ConversationalSearch {
let conversationalSearchInstance = null;

function initConversationalSearch() {
if (!IS_CONVERSATIONAL_SEARCH_ENABLED || conversationalSearchInstance) return;
if (!IS_DOCS_AI_ENABLED || conversationalSearchInstance) return;

const instance = new ConversationalSearch();
if (instance.ready) conversationalSearchInstance = instance;

Expand All @@ -738,12 +615,24 @@ function initConversationalSearch() {
}
}

function teardownConversationalSearch() {
const inst = conversationalSearchInstance;
if (inst) {
inst.floatButton?.remove();
inst.overlay?.remove();
inst.sidebar?.remove();
conversationalSearchInstance = null;
}
document.querySelector('.home-ai-btn')?.remove();
document.querySelector('.home-ai-divider')?.remove();
}

// The minimum length of the query to auto-submit the conversation.
// to avoid submitting short queries that are not meaningful.
const AUTO_SUBMIT_MIN_LENGTH = 10;

function askDocsAI(query, options = {}) {
if (!IS_CONVERSATIONAL_SEARCH_ENABLED || !conversationalSearchInstance) return;
if (!IS_DOCS_AI_ENABLED || !conversationalSearchInstance) return;

const trimmed = (query || '').trim();
const inst = conversationalSearchInstance;
Expand All @@ -767,4 +656,4 @@ if (document.readyState === 'loading') {
initConversationalSearch();
}

export { ConversationalSearch, askDocsAI, CONVERSATIONAL_SEARCH_FLAG_KEY };
export { ConversationalSearch, askDocsAI, DOCS_AI_ENABLED_FLAG_KEY };
6 changes: 2 additions & 4 deletions assets/scripts/components/conversational-search/logger.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
export function logAction(message, data, { selectedModelId, isDatadogUser } = {}) {
export function logAction(message, data, { isDatadogUser } = {}) {
const conversationalSearchData = {
docs_ai: true,
model_id: selectedModelId,
is_datadog_user: isDatadogUser ?? false,
...(data?.conversational_search || {})
};
Expand All @@ -19,10 +18,9 @@ export function logAction(message, data, { selectedModelId, isDatadogUser } = {}
}
}

export function logError(message, error, { selectedModelId, conversationId, isDatadogUser } = {}) {
export function logError(message, error, { conversationId, isDatadogUser } = {}) {
const errorData = {
docs_ai: true,
model_id: selectedModelId,
conversation_id: conversationId,
is_datadog_user: isDatadogUser ?? false,
error_message: error?.message || String(error),
Expand Down
Loading
Loading