diff --git a/.changeset/fresh-doodles-relax.md b/.changeset/fresh-doodles-relax.md new file mode 100644 index 0000000000000..dd696bc0ceec4 --- /dev/null +++ b/.changeset/fresh-doodles-relax.md @@ -0,0 +1,11 @@ +--- +'@directus/ai': minor +'@directus/api': minor +'@directus/app': minor +--- + +Attached prompts, content items, and visual editor elements to AI Assistant Context + +:::notice +To use this feature, update [@directus/visual-editing](https://github.com/directus/visual-editing) to v1.2.0+ on your website. +::: diff --git a/.changeset/olive-trains-eat.md b/.changeset/olive-trains-eat.md new file mode 100644 index 0000000000000..d7c156520e93f --- /dev/null +++ b/.changeset/olive-trains-eat.md @@ -0,0 +1,5 @@ +--- +'@directus/env': patch +--- + +Fixed LDAP DN properties casted as arrays diff --git a/.changeset/solid-ghosts-see.md b/.changeset/solid-ghosts-see.md new file mode 100644 index 0000000000000..be6d7a59b4c74 --- /dev/null +++ b/.changeset/solid-ghosts-see.md @@ -0,0 +1,5 @@ +--- +'@directus/app': minor +--- + +Changed permission-blocked fields from disabled to non-editable appearance diff --git a/api/src/ai/chat/controllers/chat.post.ts b/api/src/ai/chat/controllers/chat.post.ts index b8ff8c155b469..eecc28b5f80e2 100644 --- a/api/src/ai/chat/controllers/chat.post.ts +++ b/api/src/ai/chat/controllers/chat.post.ts @@ -19,7 +19,7 @@ export const aiChatPostHandler: RequestHandler = async (req, res, _next) => { throw new InvalidPayloadError({ reason: fromZodError(parseResult.error).message }); } - const { provider, model, messages: rawMessages, tools: requestedTools, toolApprovals } = parseResult.data; + const { provider, model, messages: rawMessages, tools: requestedTools, toolApprovals, context } = parseResult.data; const aiSettings = res.locals['ai'].settings; @@ -68,9 +68,10 @@ export const aiChatPostHandler: RequestHandler = async (req, res, _next) => { const stream = await createUiStream(validationResult.data, { provider, model, - tools: tools, + tools, aiSettings, systemPrompt: res.locals['ai'].systemPrompt, + ...(context && { context }), onUsage: (usage) => { res.write(`data: ${JSON.stringify({ type: 'data-usage', data: usage })}\n\n`); }, diff --git a/api/src/ai/chat/lib/create-ui-stream.ts b/api/src/ai/chat/lib/create-ui-stream.ts index 068b7fb509693..d304b725546a5 100644 --- a/api/src/ai/chat/lib/create-ui-stream.ts +++ b/api/src/ai/chat/lib/create-ui-stream.ts @@ -16,6 +16,8 @@ import { getProviderOptions, } from '../../providers/index.js'; import { SYSTEM_PROMPT } from '../constants/system-prompt.js'; +import type { ChatContext } from '../models/chat-request.js'; +import { formatContextForSystemPrompt } from '../utils/format-context.js'; export interface CreateUiStreamOptions { provider: ProviderType; @@ -23,12 +25,13 @@ export interface CreateUiStreamOptions { tools: { [x: string]: Tool }; aiSettings: AISettings; systemPrompt?: string; + context?: ChatContext; onUsage?: (usage: Pick) => void | Promise; } export const createUiStream = async ( messages: UIMessage[], - { provider, model, tools, aiSettings, systemPrompt, onUsage }: CreateUiStreamOptions, + { provider, model, tools, aiSettings, systemPrompt, context, onUsage }: CreateUiStreamOptions, ): Promise>, any>> => { const configs = buildProviderConfigs(aiSettings); const providerConfig = configs.find((c) => c.type === provider); @@ -39,17 +42,32 @@ export const createUiStream = async ( const registry = createAIProviderRegistry(configs, aiSettings); - systemPrompt ||= SYSTEM_PROMPT; - + const baseSystemPrompt = systemPrompt || SYSTEM_PROMPT; + const contextBlock = context ? formatContextForSystemPrompt(context) : null; const providerOptions = getProviderOptions(provider, model, aiSettings); + // Compute the full system prompt once to avoid re-computing on each step + const fullSystemPrompt = contextBlock ? baseSystemPrompt + contextBlock : baseSystemPrompt; const stream = streamText({ - system: systemPrompt, + system: baseSystemPrompt, model: registry.languageModel(`${provider}:${model}`), messages: await convertToModelMessages(messages), stopWhen: [stepCountIs(10)], providerOptions, tools, + /** + * prepareStep is called before each AI step to prepare the system prompt. + * When context exists, we override the system prompt to include context attachments. + * This allows the initial system prompt to be simple while ensuring all steps + * (including tool continuation steps) receive the full context. + */ + prepareStep: () => { + if (contextBlock) { + return { system: fullSystemPrompt }; + } + + return {}; + }, onFinish({ usage }) { if (onUsage) { const { inputTokens, outputTokens, totalTokens } = usage; diff --git a/api/src/ai/chat/models/chat-request.ts b/api/src/ai/chat/models/chat-request.ts index 5ca59ab39f73e..edf9b6815e630 100644 --- a/api/src/ai/chat/models/chat-request.ts +++ b/api/src/ai/chat/models/chat-request.ts @@ -27,12 +27,78 @@ export type ChatRequestTool = z.infer; export const ToolApprovalMode = z.enum(['always', 'ask', 'disabled']); export type ToolApprovalMode = z.infer; +const ItemContextData = z.object({ + collection: z.string(), + key: z.union([z.string(), z.number()]), +}); + +const VisualElementContextData = z.object({ + key: z.string(), + collection: z.string(), + item: z.union([z.string(), z.number()]), + fields: z.array(z.string()).optional(), + rect: z + .object({ + top: z.number(), + left: z.number(), + width: z.number(), + height: z.number(), + }) + .optional(), +}); + +const PromptContextData = z.object({ + text: z.string(), + prompt: z.record(z.string(), z.unknown()), + values: z.record(z.string(), z.string()), +}); + +export const ContextAttachment = z.discriminatedUnion('type', [ + z.object({ + type: z.literal('item'), + display: z.string(), + data: ItemContextData, + snapshot: z.record(z.string(), z.unknown()), + }), + z.object({ + type: z.literal('visual-element'), + display: z.string(), + data: VisualElementContextData, + snapshot: z.record(z.string(), z.unknown()), + }), + z.object({ + type: z.literal('prompt'), + display: z.string(), + data: PromptContextData, + snapshot: z.record(z.string(), z.unknown()), + }), +]); + +export type ContextAttachment = z.infer; + +export const PageContext = z.object({ + path: z.string(), + collection: z.string().optional(), + item: z.union([z.string(), z.number()]).optional(), + module: z.string().optional(), +}); + +export type PageContext = z.infer; + +export const ChatContext = z.object({ + attachments: z.array(ContextAttachment).max(10).optional(), + page: PageContext.optional(), +}); + +export type ChatContext = z.infer; + export const ChatRequest = z.intersection( z.discriminatedUnion('provider', [ProviderOpenAi, ProviderAnthropic, ProviderGoogle, ProviderOpenAiCompatible]), z.object({ tools: z.array(ChatRequestTool), messages: z.array(z.looseObject({})), toolApprovals: z.record(z.string(), ToolApprovalMode).optional(), + context: ChatContext.optional(), }), ); diff --git a/api/src/ai/chat/utils/format-context.test.ts b/api/src/ai/chat/utils/format-context.test.ts new file mode 100644 index 0000000000000..e25b7f94bc994 --- /dev/null +++ b/api/src/ai/chat/utils/format-context.test.ts @@ -0,0 +1,306 @@ +import { describe, expect, test } from 'vitest'; +import { formatContextForSystemPrompt } from './format-context.js'; + +describe('formatContextForSystemPrompt', () => { + test('includes current date even with empty context', () => { + const result = formatContextForSystemPrompt({}); + expect(result).toContain(''); + expect(result).toContain('## Current Date'); + expect(result).toMatch(/\d{4}-\d{2}-\d{2}/); + }); + + test('includes current date with empty attachments', () => { + const result = formatContextForSystemPrompt({ attachments: [] }); + expect(result).toContain(''); + expect(result).toContain('## Current Date'); + }); + + test('formats page context correctly', () => { + const result = formatContextForSystemPrompt({ + page: { + path: '/content/posts/123', + collection: 'posts', + item: '123', + module: 'content', + }, + }); + + expect(result).toContain(''); + expect(result).toContain('Path: /content/posts/123'); + expect(result).toContain('Collection: posts'); + expect(result).toContain('Item: 123'); + expect(result).toContain('Module: content'); + expect(result).toContain(''); + }); + + test('formats prompt attachments in custom_instructions block', () => { + const result = formatContextForSystemPrompt({ + attachments: [ + { + type: 'prompt', + display: 'Test Prompt', + data: { + text: 'Be helpful', + prompt: {}, + values: {}, + }, + snapshot: { + text: 'Be helpful', + messages: [{ role: 'user', text: 'Hello' }], + }, + }, + ], + }); + + expect(result).toContain(''); + expect(result).toContain('### Test Prompt'); + expect(result).toContain('Be helpful'); + expect(result).toContain('**user**: Hello'); + expect(result).toContain(''); + }); + + test('formats item attachments in user_context section', () => { + const result = formatContextForSystemPrompt({ + attachments: [ + { + type: 'item', + display: 'My Post', + data: { + collection: 'posts', + key: '123', + }, + snapshot: { + title: 'Hello World', + body: 'Content here', + }, + }, + ], + }); + + expect(result).toContain(''); + expect(result).toContain('[Item: My Post (posts) — key: 123]'); + expect(result).toContain('"title": "Hello World"'); + expect(result).toContain(''); + }); + + test('formats visual elements in visual_editing block', () => { + const result = formatContextForSystemPrompt({ + attachments: [ + { + type: 'visual-element', + display: 'Hero Section', + data: { + key: 'hero-1', + collection: 'sections', + item: '456', + fields: ['title', 'subtitle'], + }, + snapshot: { + title: 'Welcome', + subtitle: 'Hello there', + }, + }, + ], + }); + + expect(result).toContain(''); + expect(result).toContain('### sections/456 — "Hero Section"'); + expect(result).toContain('Editable fields: title, subtitle'); + expect(result).toContain('"title": "Welcome"'); + expect(result).toContain(''); + }); + + test('escapes XML tags in user-controlled display strings', () => { + const result = formatContextForSystemPrompt({ + attachments: [ + { + type: 'item', + display: 'Injected + + + + diff --git a/app/src/ai/components/ai-context-menu.vue b/app/src/ai/components/ai-context-menu.vue new file mode 100644 index 0000000000000..3f1bf6caa166b --- /dev/null +++ b/app/src/ai/components/ai-context-menu.vue @@ -0,0 +1,358 @@ + + + + + diff --git a/app/src/ai/components/ai-context-menu/context-menu-item.vue b/app/src/ai/components/ai-context-menu/context-menu-item.vue new file mode 100644 index 0000000000000..72a95f3f54675 --- /dev/null +++ b/app/src/ai/components/ai-context-menu/context-menu-item.vue @@ -0,0 +1,76 @@ + + + + + diff --git a/app/src/ai/components/ai-context-menu/empty-state.vue b/app/src/ai/components/ai-context-menu/empty-state.vue new file mode 100644 index 0000000000000..da070b26d93cf --- /dev/null +++ b/app/src/ai/components/ai-context-menu/empty-state.vue @@ -0,0 +1,26 @@ + + + + + diff --git a/app/src/ai/components/ai-context-menu/list-view.vue b/app/src/ai/components/ai-context-menu/list-view.vue new file mode 100644 index 0000000000000..cbbac6be14c43 --- /dev/null +++ b/app/src/ai/components/ai-context-menu/list-view.vue @@ -0,0 +1,36 @@ + + + diff --git a/app/src/ai/components/ai-conversation.vue b/app/src/ai/components/ai-conversation.vue index 5b3080877a99d..d069246c8a74d 100644 --- a/app/src/ai/components/ai-conversation.vue +++ b/app/src/ai/components/ai-conversation.vue @@ -75,6 +75,7 @@ function scrollToBottom(behavior: ScrollBehavior = 'smooth') { - + diff --git a/app/src/views/private/components/file-preview.vue b/app/src/views/private/components/file-preview.vue index a30cfbe400e49..f3f0eff66ab89 100644 --- a/app/src/views/private/components/file-preview.vue +++ b/app/src/views/private/components/file-preview.vue @@ -12,6 +12,8 @@ export interface Props { inModal?: boolean; disabled?: boolean; nonEditable?: boolean; + /** Direct source URL, bypasses asset URL computation from file.id */ + src?: string; } const props = withDefaults(defineProps(), { preset: 'system-large-contain' }); @@ -22,12 +24,14 @@ defineEmits<{ const file = toRef(props, 'file'); -const src = computed(() => - getAssetUrl(file.value.id, { +const src = computed(() => { + if (props.src) return props.src; + + return getAssetUrl(file.value.id, { imageKey: props.preset ?? undefined, cacheBuster: file.value.modified_on, - }), -); + }); +}); const type = computed<'image' | 'video' | 'audio' | string>(() => { const mimeType = file.value.type; diff --git a/app/src/views/private/components/live-preview.vue b/app/src/views/private/components/live-preview.vue index a8385db7a5f04..20bdba1f5893e 100644 --- a/app/src/views/private/components/live-preview.vue +++ b/app/src/views/private/components/live-preview.vue @@ -1,5 +1,6 @@