- {revealed ? apiKey.api_key : maskApiKey(apiKey.api_key)}
-
-
-
-
- - UUID: {customMcpServer.uuid} -
- -
- Status:{' '}
-
- Created At:{' '} - {new Date(customMcpServer.created_at).toLocaleString()} -
- -- Description:{' '} - - {customMcpServer.description} - -
- -- {customMcpServer.additionalArgs.join(' ').trim().length > 0 - ? customMcpServer.additionalArgs.join(' ').trim() - : 'No additional arguments set'} --
- {Object.entries(customMcpServer.env).length > 0 - ? Object.entries(customMcpServer.env).map( - ([key, value]) => `${key}=${value}\n` - ) - : 'No environment variables set'} --
- {customMcpServer.codeFileName || 'No code file name set'} -
- {customMcpServer.code && ( -- {customMcpServer.code} -- )} -
- Manage your custom MCP server (python code based) configurations -
-- {flexRender( - header.column.columnDef.header, - header.getContext() - )} - {{ - asc: ' πΌ', - desc: ' π½', - }[header.column.getIsSorted() as string] ?? null} - | - ))} -
---|
- {flexRender(cell.column.columnDef.cell, cell.getContext())} - | - ))} -
- You can use the SSE mode to trigger the inspector directly from your UI. This method allows for a more integrated experience when inspecting tools. -
- -- {sseEndpoint} --
- {urlBasedSseEndpoint} --
- Configure your UI to connect to these endpoints and the inspector will be triggered automatically, providing real-time tool inspection capabilities. -
-- Because MetaMCP is a local proxy and we currently don't support any - cloud hosting of your MCPs. You can use MCP's official inspector to - check what exact tools you will have access to with MetaMCP.The - inspector command is used to start the inspector tool. You can use the - command below to start the inspector tool. In the future we may support - better experience for you to check inspection details directly on our - platform. -
- -- {inspectorCommand} --
No notifications received yet.
- ) : ( -- {JSON.stringify(notification.params, null, 2)} --
- {flexRender( - header.column.columnDef.header, - header.getContext() - )} - | - ))} -
---|
- {flexRender(cell.column.columnDef.cell, cell.getContext())} - | - ))} -
- UUID: {mcpServer.uuid} -
- -
- Status:{' '}
-
- Connection Status:{' '} - - {getConnectionStatusText(connectionStatus)} - -
- -- Created At:{' '} - {new Date(mcpServer.created_at).toLocaleString()} -
- -- Type: {mcpServer.type} -
-- {mcpServer.command} --
- {mcpServer.args.join(' ')} --
- {Object.entries(mcpServer.env).length > 0 - ? Object.entries(mcpServer.env).map( - ([key, value]) => `${key}=${value}\n` - ) - : 'No environment variables set'} --
- {mcpServer.url} --
- {mcpServer.type === McpServerType.SSE ? 'SSE' : 'Streamable HTTP'} endpoint -
-- {flexRender( - header.column.columnDef.header, - header.getContext() - )} - {{ - asc: ' πΌ', - desc: ' π½', - }[header.column.getIsSorted() as string] ?? null} - | - ))} -
---|
- {flexRender(cell.column.columnDef.cell, cell.getContext())} - | - ))} -
- Package: {item.package_name} -
-- Command: {item.command} -
- {item.args && ( -- Example Args: {item.args.join(' ')} -
- )} - {item.envs.length > 0 && ( -- Once you delete a workspace, there is no going back. Please be - careful. -
- -- Once you delete a project, there is no going back. Please be - careful. -
- -- Install uv (uvx) globally -{' '} - - Installation Guide - -
-- Install Node.js (npx) globally -{' '} - - Download Node.js - -
-- You can access directly via SSE endpoint: -
- -- {tokens.map((line, i) => ( -- )} -- {line.map((token, key) => ( - - ))} -- ))} -
- Alternatively, if you cannot set headers, you can use this URL-based endpoint: -
- -- {tokens.map((line, i) => ( -- )} -- {line.map((token, key) => ( - - ))} -- ))} -
- Notice: you can manage your API Keys in the{' '} - - API Keys Page - -
-- For Claude Desktop, locate the configuration file at: -
-- {' '} - ~/Library/Application - Support/Claude/claude_desktop_config.json --
%APPDATA%\Claude\claude_desktop_config.json-
- Generally the JSON Configuration Template will look like this: -
-- {tokens.map((line, i) => ( -- )} -- {line.map((token, key) => ( - - ))} -- ))} -
- For Cursor, you can configure MetaMCP directly in the settings: -
-- {tokens.map((line, i) => ( -- )} -- {line.map((token, key) => ( - - ))} -- ))} -
- For Windows, you can use the following configuration options: -
- -- You can use the following command for Cursor: -
- -- {tokens.map((line, i) => ( -- )} -- {line.map((token, key) => ( - - ))} -- ))} -
- Or configure it using json: -
- -- {tokens.map((line, i) => ( -- )} -- {line.map((token, key) => ( - - ))} -- ))} -
- You can also use the following command to start a standalone SSE server: -
- -- {tokens.map((line, i) => ( -- )} -- {line.map((token, key) => ( - - ))} -- ))} -
- Then use following json configuration: -
- -- {tokens.map((line, i) => ( -- )} -- {line.map((token, key) => ( - - ))} -- ))} -
- We recommend to use Smithery to run MCPs in docker on cloud for max compatibility. To setup Smithery CLI on Windows check this out: https://smithery.ai/docs/smithery-cli. -
- -- Visit MetaMCP server listing directly on Smithery: https://smithery.ai/server/@metatool-ai/mcp-server-metamcp -
- -- For Smithery on Windows, you can also use the following configuration options: -
- -- You can run the Smithery command directly in your terminal: -
- -- {tokens.map((line, i) => ( -- )} -- {line.map((token, key) => ( - - ))} -- ))} -
- Or configure it in your Claude Desktop configuration file: -
- -- {tokens.map((line, i) => ( -- )} -- {line.map((token, key) => ( - - ))} -- ))} -
- View and analyze your tool execution history -
-- Tool Logs are currently disabled. Enable this feature to view tool execution history. -
-- {flexRender( - header.column.columnDef.header, - header.getContext() - )} - | - ))} -
---|
- {flexRender(cell.column.columnDef.cell, cell.getContext())} - | - ))} -
- Manage all tools across your active MCP servers -
- {hasToolsManagement && ( -- Tool Management is currently disabled. Enable it to manage your tools. -
-- {flexRender( - header.column.columnDef.header, - header.getContext() - )} - | - ))} -
---|
- {flexRender(cell.column.columnDef.cell, cell.getContext())} - | - ))} -
(`codes/${uuid}`, () =>
- getCode(uuid)
- );
-
- const debouncedUpdateCode = useCallback(() => {
- return debounce(async (value: string) => {
- if (!code) return;
- await updateCode(uuid, code.fileName, value);
- mutate();
- }, 500);
- }, [code, uuid, mutate])();
-
- // Cleanup debounced function on unmount
- useEffect(() => {
- return () => {
- debouncedUpdateCode.cancel();
- };
- }, [debouncedUpdateCode]);
-
- const handleEditorChange = (value: string | undefined) => {
- if (!value) return;
- debouncedUpdateCode(value);
- };
-
- if (!code) {
- return (
-
- Loading...
-
- );
- }
-
- return (
-
-
-
- );
-}
diff --git a/app/(sidebar-layout)/editor/page.tsx b/app/(sidebar-layout)/editor/page.tsx
deleted file mode 100644
index 431fe789..00000000
--- a/app/(sidebar-layout)/editor/page.tsx
+++ /dev/null
@@ -1,7 +0,0 @@
-export default function EditorPage() {
- return (
-
- Select or create a new file to get started
-
- );
-}
diff --git a/app/(sidebar-layout)/layout.tsx b/app/(sidebar-layout)/layout.tsx
deleted file mode 100644
index 8cca5adc..00000000
--- a/app/(sidebar-layout)/layout.tsx
+++ /dev/null
@@ -1,11 +0,0 @@
-'use client';
-
-import SidebarLayout from '@/components/sidebar-layout';
-
-export default function LoggedInLayout({
- children,
-}: Readonly<{
- children: React.ReactNode;
-}>) {
- return {children} ;
-}
diff --git a/app/actions/api-keys.ts b/app/actions/api-keys.ts
deleted file mode 100644
index 02c05c54..00000000
--- a/app/actions/api-keys.ts
+++ /dev/null
@@ -1,72 +0,0 @@
-'use server';
-
-import { and, eq } from 'drizzle-orm';
-import { customAlphabet } from 'nanoid';
-
-import { db } from '@/db';
-import { apiKeysTable } from '@/db/schema';
-import { ApiKey } from '@/types/api-key';
-
-const nanoid = customAlphabet(
- '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz',
- 64
-);
-
-export async function createApiKey(projectUuid: string, name?: string) {
- const newApiKey = `sk_mt_${nanoid(64)}`;
-
- const apiKey = await db
- .insert(apiKeysTable)
- .values({
- project_uuid: projectUuid,
- api_key: newApiKey,
- name,
- })
- .returning();
-
- return apiKey[0] as ApiKey;
-}
-
-export async function getFirstApiKey(projectUuid: string) {
- if (!projectUuid) {
- return null;
- }
-
- let apiKey = await db.query.apiKeysTable.findFirst({
- where: eq(apiKeysTable.project_uuid, projectUuid),
- });
-
- if (!apiKey) {
- const newApiKey = `sk_mt_${nanoid(64)}`;
- await db.insert(apiKeysTable).values({
- project_uuid: projectUuid,
- api_key: newApiKey,
- });
-
- apiKey = await db.query.apiKeysTable.findFirst({
- where: eq(apiKeysTable.project_uuid, projectUuid),
- });
- }
-
- return apiKey as ApiKey;
-}
-
-export async function getProjectApiKeys(projectUuid: string) {
- const apiKeys = await db
- .select()
- .from(apiKeysTable)
- .where(eq(apiKeysTable.project_uuid, projectUuid));
-
- return apiKeys as ApiKey[];
-}
-
-export async function deleteApiKey(projectUuid: string, apiKeyUuid: string) {
- await db
- .delete(apiKeysTable)
- .where(
- and(
- eq(apiKeysTable.uuid, apiKeyUuid),
- eq(apiKeysTable.project_uuid, projectUuid)
- )
- );
-}
diff --git a/app/actions/code.ts b/app/actions/code.ts
deleted file mode 100644
index 7e873071..00000000
--- a/app/actions/code.ts
+++ /dev/null
@@ -1,52 +0,0 @@
-'use server';
-
-import { desc, eq } from 'drizzle-orm';
-
-import { db } from '@/db';
-import { codesTable } from '@/db/schema';
-
-export async function getCodes() {
- return await db
- .select()
- .from(codesTable)
- .orderBy(desc(codesTable.created_at));
-}
-
-export async function getCode(uuid: string) {
- const results = await db
- .select()
- .from(codesTable)
- .where(eq(codesTable.uuid, uuid));
- return results[0];
-}
-
-export async function createCode(fileName: string, code: string) {
- const results = await db
- .insert(codesTable)
- .values({
- fileName,
- code,
- })
- .returning();
- return results[0];
-}
-
-export async function updateCode(uuid: string, fileName: string, code: string) {
- const results = await db
- .update(codesTable)
- .set({
- fileName,
- code,
- })
- .where(eq(codesTable.uuid, uuid))
- .returning();
- return results[0];
-}
-
-export async function deleteCode(uuid: string) {
- const results = await db
- .delete(codesTable)
- .where(eq(codesTable.uuid, uuid))
- .returning();
- return results[0];
-}
diff --git a/app/actions/custom-mcp-servers.ts b/app/actions/custom-mcp-servers.ts
deleted file mode 100644
index 873fbfe5..00000000
--- a/app/actions/custom-mcp-servers.ts
+++ /dev/null
@@ -1,164 +0,0 @@
-'use server';
-
-import { and, desc, eq, or } from 'drizzle-orm';
-
-import { db } from '@/db';
-import {
- codesTable,
- customMcpServersTable,
- McpServerStatus,
-} from '@/db/schema';
-import { CustomMcpServer } from '@/types/custom-mcp-server';
-import {
- CreateCustomMcpServerData,
- UpdateCustomMcpServerData,
-} from '@/types/custom-mcp-server';
-
-export async function getCustomMcpServers(profileUuid: string) {
- const servers = await db
- .select({
- uuid: customMcpServersTable.uuid,
- name: customMcpServersTable.name,
- description: customMcpServersTable.description,
- code_uuid: customMcpServersTable.code_uuid,
- additionalArgs: customMcpServersTable.additionalArgs,
- env: customMcpServersTable.env,
- created_at: customMcpServersTable.created_at,
- profile_uuid: customMcpServersTable.profile_uuid,
- status: customMcpServersTable.status,
- code: codesTable.code,
- codeFileName: codesTable.fileName,
- })
- .from(customMcpServersTable)
- .leftJoin(codesTable, eq(customMcpServersTable.code_uuid, codesTable.uuid))
- .where(
- and(
- eq(customMcpServersTable.profile_uuid, profileUuid),
- or(
- eq(customMcpServersTable.status, McpServerStatus.ACTIVE),
- eq(customMcpServersTable.status, McpServerStatus.INACTIVE)
- )
- )
- )
- .orderBy(desc(customMcpServersTable.created_at));
-
- return servers as CustomMcpServer[];
-}
-
-export async function getCustomMcpServerByUuid(
- profileUuid: string,
- uuid: string
-): Promise {
- const server = await db
- .select({
- uuid: customMcpServersTable.uuid,
- name: customMcpServersTable.name,
- description: customMcpServersTable.description,
- code_uuid: customMcpServersTable.code_uuid,
- additionalArgs: customMcpServersTable.additionalArgs,
- env: customMcpServersTable.env,
- created_at: customMcpServersTable.created_at,
- profile_uuid: customMcpServersTable.profile_uuid,
- status: customMcpServersTable.status,
- code: codesTable.code,
- codeFileName: codesTable.fileName,
- })
- .from(customMcpServersTable)
- .leftJoin(codesTable, eq(customMcpServersTable.code_uuid, codesTable.uuid))
- .where(
- and(
- eq(customMcpServersTable.uuid, uuid),
- eq(customMcpServersTable.profile_uuid, profileUuid)
- )
- )
- .limit(1);
-
- if (server.length === 0) {
- return null;
- }
-
- return server[0] as CustomMcpServer;
-}
-
-export async function deleteCustomMcpServerByUuid(
- profileUuid: string,
- uuid: string
-): Promise {
- // First get the code_uuid
- const server = await db
- .select({ code_uuid: customMcpServersTable.code_uuid })
- .from(customMcpServersTable)
- .where(
- and(
- eq(customMcpServersTable.uuid, uuid),
- eq(customMcpServersTable.profile_uuid, profileUuid)
- )
- )
- .limit(1);
-
- if (server.length > 0) {
- // Delete the custom MCP server first
- await db
- .delete(customMcpServersTable)
- .where(
- and(
- eq(customMcpServersTable.uuid, uuid),
- eq(customMcpServersTable.profile_uuid, profileUuid)
- )
- );
- }
-}
-
-export async function toggleCustomMcpServerStatus(
- profileUuid: string,
- uuid: string,
- newStatus: McpServerStatus
-): Promise {
- await db
- .update(customMcpServersTable)
- .set({ status: newStatus })
- .where(
- and(
- eq(customMcpServersTable.uuid, uuid),
- eq(customMcpServersTable.profile_uuid, profileUuid)
- )
- );
-}
-
-export async function createCustomMcpServer(
- profileUuid: string,
- data: CreateCustomMcpServerData
-) {
- const [server] = await db
- .insert(customMcpServersTable)
- .values({
- profile_uuid: profileUuid,
- name: data.name,
- description: data.description || '',
- code_uuid: data.code_uuid,
- additionalArgs: data.additionalArgs || [],
- env: data.env || {},
- status: McpServerStatus.ACTIVE,
- })
- .returning();
-
- return server;
-}
-
-export async function updateCustomMcpServer(
- profileUuid: string,
- uuid: string,
- data: UpdateCustomMcpServerData
-): Promise {
- await db
- .update(customMcpServersTable)
- .set({
- ...data,
- })
- .where(
- and(
- eq(customMcpServersTable.uuid, uuid),
- eq(customMcpServersTable.profile_uuid, profileUuid)
- )
- );
-}
diff --git a/app/actions/mcp-servers.ts b/app/actions/mcp-servers.ts
deleted file mode 100644
index 23cd326e..00000000
--- a/app/actions/mcp-servers.ts
+++ /dev/null
@@ -1,171 +0,0 @@
-'use server';
-
-import { and, desc, eq, or } from 'drizzle-orm';
-
-import { db } from '@/db';
-import { mcpServersTable, McpServerStatus, McpServerType } from '@/db/schema';
-import { McpServer } from '@/types/mcp-server';
-
-export async function getMcpServers(
- profileUuid: string,
- status?: McpServerStatus
-) {
- // Return empty array if profile UUID is empty
- if (!profileUuid) {
- return [];
- }
-
- const servers = await db
- .select()
- .from(mcpServersTable)
- .where(
- and(
- eq(mcpServersTable.profile_uuid, profileUuid),
- status
- ? eq(mcpServersTable.status, status)
- : or(
- eq(mcpServersTable.status, McpServerStatus.ACTIVE),
- eq(mcpServersTable.status, McpServerStatus.INACTIVE)
- )
- )
- )
- .orderBy(desc(mcpServersTable.created_at));
-
- return servers as McpServer[];
-}
-
-export async function getMcpServerByUuid(
- profileUuid: string,
- uuid: string
-): Promise {
- const server = await db.query.mcpServersTable.findFirst({
- where: and(
- eq(mcpServersTable.uuid, uuid),
- eq(mcpServersTable.profile_uuid, profileUuid)
- ),
- });
- return server;
-}
-
-export async function deleteMcpServerByUuid(
- profileUuid: string,
- uuid: string
-): Promise {
- await db
- .delete(mcpServersTable)
- .where(
- and(
- eq(mcpServersTable.uuid, uuid),
- eq(mcpServersTable.profile_uuid, profileUuid)
- )
- );
-}
-
-export async function toggleMcpServerStatus(
- profileUuid: string,
- uuid: string,
- newStatus: McpServerStatus
-): Promise {
- await db
- .update(mcpServersTable)
- .set({ status: newStatus })
- .where(
- and(
- eq(mcpServersTable.uuid, uuid),
- eq(mcpServersTable.profile_uuid, profileUuid)
- )
- );
-}
-
-export async function updateMcpServer(
- profileUuid: string,
- uuid: string,
- data: {
- name?: string;
- description?: string;
- command?: string;
- args?: string[];
- env?: { [key: string]: string };
- url?: string;
- type?: McpServerType;
- }
-): Promise {
- await db
- .update(mcpServersTable)
- .set({
- ...data,
- })
- .where(
- and(
- eq(mcpServersTable.uuid, uuid),
- eq(mcpServersTable.profile_uuid, profileUuid)
- )
- );
-}
-
-export async function createMcpServer(
- profileUuid: string,
- data: {
- uuid?: string;
- name: string;
- description: string;
- command?: string;
- args: string[];
- env: { [key: string]: string };
- url?: string;
- type?: McpServerType;
- }
-): Promise {
- const [server] = await db
- .insert(mcpServersTable)
- .values({
- ...data,
- profile_uuid: profileUuid,
- })
- .returning();
-
- return server as McpServer;
-}
-
-export async function bulkImportMcpServers(
- data: {
- mcpServers: {
- [name: string]: {
- command?: string;
- args?: string[];
- env?: { [key: string]: string };
- description?: string;
- url?: string;
- type?: McpServerType;
- };
- };
- },
- profileUuid?: string | null
-) {
- if (!profileUuid) {
- throw new Error('Current workspace not found');
- }
-
- const { mcpServers } = data;
-
- const serverEntries = Object.entries(mcpServers);
-
- for (const [name, serverConfig] of serverEntries) {
- const serverData = {
- name,
- description: serverConfig.description || '',
- command: serverConfig.command || null,
- args: serverConfig.args || [],
- env: serverConfig.env || {},
- url: serverConfig.url || null,
- type: serverConfig.type || McpServerType.STDIO,
- profile_uuid: profileUuid,
- status: McpServerStatus.ACTIVE,
- };
-
- // Insert the server into the database
- await db.insert(mcpServersTable).values(serverData);
- }
-
- return { success: true, count: serverEntries.length };
-}
diff --git a/app/actions/oauth.ts b/app/actions/oauth.ts
deleted file mode 100644
index 889102e5..00000000
--- a/app/actions/oauth.ts
+++ /dev/null
@@ -1,54 +0,0 @@
-'use server';
-
-import {
- OAuthClientInformation,
- OAuthTokens,
-} from '@modelcontextprotocol/sdk/shared/auth.js';
-import { eq } from 'drizzle-orm';
-
-import { db } from '@/db';
-import { oauthSessionsTable } from '@/db/schema';
-
-export async function saveOAuthSession({
- mcpServerUuid,
- clientInformation,
- tokens,
- codeVerifier,
-}: {
- mcpServerUuid: string;
- clientInformation?: OAuthClientInformation;
- tokens?: OAuthTokens;
- codeVerifier?: string;
-}) {
- // Check if session exists
- const existingSession = await db.query.oauthSessionsTable.findFirst({
- where: eq(oauthSessionsTable.mcp_server_uuid, mcpServerUuid),
- });
-
- if (existingSession) {
- // Update existing session
- await db
- .update(oauthSessionsTable)
- .set({
- ...(clientInformation && { client_information: clientInformation }),
- ...(tokens && { tokens }),
- ...(codeVerifier && { code_verifier: codeVerifier }),
- updated_at: new Date(),
- })
- .where(eq(oauthSessionsTable.mcp_server_uuid, mcpServerUuid));
- } else if (clientInformation) {
- // Create new session (require client_information for creation)
- await db.insert(oauthSessionsTable).values({
- mcp_server_uuid: mcpServerUuid,
- client_information: clientInformation,
- ...(tokens && { tokens }),
- ...(codeVerifier && { code_verifier: codeVerifier }),
- });
- }
-}
-
-export async function getOAuthSession(mcpServerUuid: string) {
- return await db.query.oauthSessionsTable.findFirst({
- where: eq(oauthSessionsTable.mcp_server_uuid, mcpServerUuid),
- });
-}
diff --git a/app/actions/profiles.ts b/app/actions/profiles.ts
deleted file mode 100644
index 8a2eb20e..00000000
--- a/app/actions/profiles.ts
+++ /dev/null
@@ -1,224 +0,0 @@
-'use server';
-
-import { eq } from 'drizzle-orm';
-
-import { db } from '@/db';
-import { ProfileCapability, profilesTable, WorkspaceMode } from '@/db/schema';
-import { projectsTable } from '@/db/schema';
-
-export async function createProfile(
- currentProjectUuid: string,
- name: string,
- mode: string = 'default'
-) {
- // Set capabilities based on mode
- const capabilities =
- mode === 'compatibility'
- ? [ProfileCapability.TOOLS_MANAGEMENT, ProfileCapability.TOOL_LOGS]
- : [];
-
- // Map the mode string to WorkspaceMode enum
- const workspaceMode =
- mode === 'compatibility' ? WorkspaceMode.LOCAL : WorkspaceMode.REMOTE;
-
- const profile = await db
- .insert(profilesTable)
- .values({
- name,
- project_uuid: currentProjectUuid,
- enabled_capabilities: capabilities,
- workspace_mode: workspaceMode,
- })
- .returning();
-
- return profile[0];
-}
-
-export async function getProfile(profileUuid: string) {
- const profile = await db
- .select()
- .from(profilesTable)
- .where(eq(profilesTable.uuid, profileUuid))
- .limit(1);
-
- if (profile.length === 0) {
- throw new Error('Profile not found');
- }
-
- return profile[0];
-}
-
-export async function getProfiles(currentProjectUuid: string) {
- const profiles = await db
- .select()
- .from(profilesTable)
- .where(eq(profilesTable.project_uuid, currentProjectUuid));
-
- return profiles;
-}
-
-export async function getProjectActiveProfile(currentProjectUuid: string) {
- const project = await db
- .select()
- .from(projectsTable)
- .where(eq(projectsTable.uuid, currentProjectUuid))
- .limit(1);
-
- if (project.length === 0) {
- throw new Error('Project not found');
- }
-
- const currentProject = project[0];
-
- // Try to get active profile if set
- if (currentProject.active_profile_uuid) {
- const activeProfile = await db
- .select()
- .from(profilesTable)
- .where(eq(profilesTable.uuid, currentProject.active_profile_uuid))
- .limit(1);
-
- if (activeProfile.length > 0) {
- return activeProfile[0];
- }
- }
-
- // If no active profile or not found, get all profiles
- const profiles = await db
- .select()
- .from(profilesTable)
- .where(eq(profilesTable.project_uuid, currentProjectUuid));
-
- // If there are profiles, use the first one and set it as active
- if (profiles.length > 0) {
- await db
- .update(projectsTable)
- .set({ active_profile_uuid: profiles[0].uuid })
- .where(eq(projectsTable.uuid, currentProjectUuid));
-
- return profiles[0];
- }
-
- // If no profiles exist, create a default one
- const defaultProfile = await db
- .insert(profilesTable)
- .values({
- name: 'Default Workspace',
- project_uuid: currentProjectUuid,
- enabled_capabilities: [], // Default mode has no special capabilities
- })
- .returning();
-
- // Set it as active
- await db
- .update(projectsTable)
- .set({ active_profile_uuid: defaultProfile[0].uuid })
- .where(eq(projectsTable.uuid, currentProjectUuid));
-
- return defaultProfile[0];
-}
-
-export async function setProfileActive(
- projectUuid: string,
- profileUuid: string
-) {
- const project = await db
- .select()
- .from(projectsTable)
- .where(eq(projectsTable.uuid, projectUuid))
- .limit(1);
-
- if (project.length === 0) {
- throw new Error('Project not found');
- }
-
- const updatedProject = await db
- .update(projectsTable)
- .set({ active_profile_uuid: profileUuid })
- .where(eq(projectsTable.uuid, projectUuid))
- .returning();
-
- if (updatedProject.length === 0) {
- throw new Error('Project not found');
- }
-}
-
-export async function updateProfileName(profileUuid: string, newName: string) {
- const profile = await db
- .select()
- .from(profilesTable)
- .where(eq(profilesTable.uuid, profileUuid))
- .limit(1);
-
- if (profile.length === 0) {
- throw new Error('Profile not found');
- }
-
- const updatedProfile = await db
- .update(profilesTable)
- .set({ name: newName })
- .where(eq(profilesTable.uuid, profileUuid))
- .returning();
-
- return updatedProfile[0];
-}
-
-export async function deleteProfile(profileUuid: string) {
- const profile = await db
- .select()
- .from(profilesTable)
- .where(eq(profilesTable.uuid, profileUuid))
- .limit(1);
-
- if (profile.length === 0) {
- throw new Error('Profile not found');
- }
-
- // Check if this is the last profile
- const profileCount = await db.select().from(profilesTable);
-
- if (profileCount.length === 1) {
- throw new Error('Cannot delete the last profile');
- }
-
- await db.delete(profilesTable).where(eq(profilesTable.uuid, profileUuid));
-
- return { success: true };
-}
-
-export async function setActiveProfile(profileUuid: string) {
- const profile = await db
- .select()
- .from(profilesTable)
- .where(eq(profilesTable.uuid, profileUuid))
- .limit(1);
-
- if (profile.length === 0) {
- throw new Error('Profile not found');
- }
-
- return profile[0];
-}
-
-export async function updateProfileCapabilities(
- profileUuid: string,
- capabilities: ProfileCapability[]
-) {
- const profile = await db
- .select()
- .from(profilesTable)
- .where(eq(profilesTable.uuid, profileUuid))
- .limit(1);
-
- if (profile.length === 0) {
- throw new Error('Profile not found');
- }
-
- const updatedProfile = await db
- .update(profilesTable)
- .set({ enabled_capabilities: capabilities })
- .where(eq(profilesTable.uuid, profileUuid))
- .returning();
-
- return updatedProfile[0];
-}
diff --git a/app/actions/projects.ts b/app/actions/projects.ts
deleted file mode 100644
index 7998accf..00000000
--- a/app/actions/projects.ts
+++ /dev/null
@@ -1,120 +0,0 @@
-'use server';
-
-import { eq } from 'drizzle-orm';
-
-import { db } from '@/db';
-import { profilesTable, projectsTable } from '@/db/schema';
-
-export async function createProject(name: string) {
- return await db.transaction(async (tx) => {
- // First create the project with a temporary self-referential UUID
- const [project] = await tx
- .insert(projectsTable)
- .values({
- name,
- active_profile_uuid: null,
- })
- .returning();
-
- // Create the profile with the actual project UUID
- const [profile] = await tx
- .insert(profilesTable)
- .values({
- name: 'Default Workspace',
- project_uuid: project.uuid,
- enabled_capabilities: [], // Default mode has no special capabilities
- })
- .returning();
-
- // Update the project with the correct profile UUID
- const [updatedProject] = await tx
- .update(projectsTable)
- .set({ active_profile_uuid: profile.uuid })
- .where(eq(projectsTable.uuid, project.uuid))
- .returning();
-
- return updatedProject;
- });
-}
-
-export async function getProject(projectUuid: string) {
- const project = await db
- .select()
- .from(projectsTable)
- .where(eq(projectsTable.uuid, projectUuid))
- .limit(1);
-
- if (project.length === 0) {
- throw new Error('Project not found');
- }
-
- return project[0];
-}
-
-export async function getProjects() {
- let projects = await db.select().from(projectsTable);
-
- if (projects.length === 0) {
- const defaultProject = await createProject('Default Project');
- projects = [defaultProject];
- }
-
- return projects;
-}
-
-export async function updateProjectName(projectUuid: string, newName: string) {
- const project = await db
- .select()
- .from(projectsTable)
- .where(eq(projectsTable.uuid, projectUuid))
- .limit(1);
-
- if (project.length === 0) {
- throw new Error('Project not found');
- }
-
- const updatedProject = await db
- .update(projectsTable)
- .set({ name: newName })
- .where(eq(projectsTable.uuid, projectUuid))
- .returning();
-
- return updatedProject[0];
-}
-
-export async function deleteProject(projectUuid: string) {
- const project = await db
- .select()
- .from(projectsTable)
- .where(eq(projectsTable.uuid, projectUuid))
- .limit(1);
-
- if (project.length === 0) {
- throw new Error('Project not found');
- }
-
- // Check if this is the last project
- const projectCount = await db.select().from(projectsTable);
-
- if (projectCount.length === 1) {
- throw new Error('Cannot delete the last project');
- }
-
- await db.delete(projectsTable).where(eq(projectsTable.uuid, projectUuid));
-
- return { success: true };
-}
-
-export async function setActiveProject(projectUuid: string) {
- const project = await db
- .select()
- .from(projectsTable)
- .where(eq(projectsTable.uuid, projectUuid))
- .limit(1);
-
- if (project.length === 0) {
- throw new Error('Project not found');
- }
-
- return project[0];
-}
diff --git a/app/actions/tool-execution-logs.ts b/app/actions/tool-execution-logs.ts
deleted file mode 100644
index e987abe5..00000000
--- a/app/actions/tool-execution-logs.ts
+++ /dev/null
@@ -1,155 +0,0 @@
-'use server';
-
-import { and, desc, eq, inArray, sql } from 'drizzle-orm';
-
-import { db } from '@/db';
-import {
- mcpServersTable,
- toolExecutionLogsTable,
- ToolExecutionStatus,
-} from '@/db/schema';
-
-export type ToolExecutionLog = {
- id: number;
- mcp_server_uuid: string | null;
- tool_name: string;
- payload: Record;
- result: any;
- status: ToolExecutionStatus;
- error_message: string | null;
- execution_time_ms: string | null;
- created_at: Date;
- mcp_server_name?: string;
-};
-
-type GetToolExecutionLogsOptions = {
- limit?: number;
- offset?: number;
- mcpServerUuids?: string[];
- toolNames?: string[];
- statuses?: ToolExecutionStatus[];
- currentProfileUuid: string;
-};
-
-export async function getToolExecutionLogs({
- limit = 50,
- offset = 0,
- mcpServerUuids,
- toolNames,
- statuses,
- currentProfileUuid,
-}: GetToolExecutionLogsOptions): Promise<{
- logs: ToolExecutionLog[];
- total: number;
-}> {
- // Return early if no profile UUID is provided
- if (!currentProfileUuid) {
- return { logs: [], total: 0 };
- }
-
- // Build the where conditions
- const whereConditions = [];
-
- // Filter by MCP servers that belong to the current profile
- const allowedMcpServers = await db
- .select({ uuid: mcpServersTable.uuid })
- .from(mcpServersTable)
- .where(eq(mcpServersTable.profile_uuid, currentProfileUuid));
-
- const allowedMcpServerUuids = allowedMcpServers.map((server) => server.uuid);
-
- if (allowedMcpServerUuids.length > 0) {
- whereConditions.push(
- inArray(toolExecutionLogsTable.mcp_server_uuid, allowedMcpServerUuids)
- );
- }
-
- // Apply additional filters if provided
- if (mcpServerUuids && mcpServerUuids.length > 0) {
- whereConditions.push(
- inArray(toolExecutionLogsTable.mcp_server_uuid, mcpServerUuids)
- );
- }
-
- if (toolNames && toolNames.length > 0) {
- whereConditions.push(inArray(toolExecutionLogsTable.tool_name, toolNames));
- }
-
- if (statuses && statuses.length > 0) {
- whereConditions.push(inArray(toolExecutionLogsTable.status, statuses));
- }
-
- // Combine all conditions with AND
- const whereClause =
- whereConditions.length > 0 ? and(...whereConditions) : undefined;
-
- // Get total count
- const [{ count }] = await db
- .select({ count: sql`count(*)` })
- .from(toolExecutionLogsTable)
- .where(whereClause);
-
- // Get logs with joined MCP server names
- const logs = await db
- .select({
- id: toolExecutionLogsTable.id,
- mcp_server_uuid: toolExecutionLogsTable.mcp_server_uuid,
- tool_name: toolExecutionLogsTable.tool_name,
- payload: toolExecutionLogsTable.payload,
- result: toolExecutionLogsTable.result,
- status: toolExecutionLogsTable.status,
- error_message: toolExecutionLogsTable.error_message,
- execution_time_ms: toolExecutionLogsTable.execution_time_ms,
- created_at: toolExecutionLogsTable.created_at,
- mcp_server_name: mcpServersTable.name,
- })
- .from(toolExecutionLogsTable)
- .leftJoin(
- mcpServersTable,
- eq(toolExecutionLogsTable.mcp_server_uuid, mcpServersTable.uuid)
- )
- .where(whereClause)
- .orderBy(desc(toolExecutionLogsTable.id))
- .limit(limit)
- .offset(offset);
-
- return {
- logs: logs.map((log) => ({
- ...log,
- mcp_server_name: log.mcp_server_name || 'Unknown Server',
- })) as ToolExecutionLog[],
- total: count,
- };
-}
-
-export async function getToolNames(
- currentProfileUuid: string
-): Promise {
- // Return empty array if profile UUID is empty
- if (!currentProfileUuid) {
- return [];
- }
-
- // Get allowed MCP servers
- const allowedMcpServers = await db
- .select({ uuid: mcpServersTable.uuid })
- .from(mcpServersTable)
- .where(eq(mcpServersTable.profile_uuid, currentProfileUuid));
-
- const allowedMcpServerUuids = allowedMcpServers.map((server) => server.uuid);
-
- if (allowedMcpServerUuids.length === 0) {
- return [];
- }
-
- // Get unique tool names
- const result = await db
- .selectDistinct({ tool_name: toolExecutionLogsTable.tool_name })
- .from(toolExecutionLogsTable)
- .where(
- inArray(toolExecutionLogsTable.mcp_server_uuid, allowedMcpServerUuids)
- )
- .orderBy(toolExecutionLogsTable.tool_name);
-
- return result.map((r) => r.tool_name);
-}
diff --git a/app/actions/tools.ts b/app/actions/tools.ts
deleted file mode 100644
index ceb813f3..00000000
--- a/app/actions/tools.ts
+++ /dev/null
@@ -1,68 +0,0 @@
-'use server';
-
-import { eq, sql } from 'drizzle-orm';
-
-import { db } from '@/db';
-import { ToggleStatus, toolsTable } from '@/db/schema';
-import { Tool } from '@/types/tool';
-
-export async function getToolsByMcpServerUuid(
- mcpServerUuid: string
-): Promise {
- const tools = await db
- .select()
- .from(toolsTable)
- .where(eq(toolsTable.mcp_server_uuid, mcpServerUuid))
- .orderBy(toolsTable.name);
-
- return tools as Tool[];
-}
-
-export async function toggleToolStatus(
- toolUuid: string,
- status: ToggleStatus
-): Promise {
- await db
- .update(toolsTable)
- .set({ status: status })
- .where(eq(toolsTable.uuid, toolUuid));
-}
-
-export async function saveToolsToDatabase(
- mcpServerUuid: string,
- tools: Array<{
- name: string;
- description?: string;
- inputSchema: Record;
- }>
-): Promise<{ success: boolean; count: number }> {
- if (!tools || tools.length === 0) {
- return { success: true, count: 0 };
- }
-
- // Format tools for database insertion
- const toolsToInsert = tools.map((tool) => ({
- name: tool.name,
- description: tool.description || '',
- toolSchema: {
- type: 'object' as const,
- ...tool.inputSchema,
- },
- mcp_server_uuid: mcpServerUuid,
- }));
-
- // Batch insert all tools with upsert
- const results = await db
- .insert(toolsTable)
- .values(toolsToInsert)
- .onConflictDoUpdate({
- target: [toolsTable.mcp_server_uuid, toolsTable.name],
- set: {
- description: sql`excluded.description`,
- toolSchema: sql`excluded.tool_schema`,
- },
- })
- .returning();
-
- return { success: true, count: results.length };
-}
diff --git a/app/api/auth.ts b/app/api/auth.ts
deleted file mode 100644
index eb67f487..00000000
--- a/app/api/auth.ts
+++ /dev/null
@@ -1,50 +0,0 @@
-import { eq } from 'drizzle-orm';
-import { NextResponse } from 'next/server';
-
-import { db } from '@/db';
-import { apiKeysTable } from '@/db/schema';
-
-import { getProjectActiveProfile } from '../actions/profiles';
-
-export async function authenticateApiKey(request: Request) {
- const authHeader = request.headers.get('authorization');
- if (!authHeader || !authHeader.startsWith('Bearer ')) {
- return {
- error: NextResponse.json(
- { error: 'Authorization header with Bearer token is required' },
- { status: 401 }
- ),
- };
- }
-
- const apiKey = authHeader.substring(7).trim(); // Remove 'Bearer ' prefix
- const apiKeyRecord = await db
- .select()
- .from(apiKeysTable)
- .where(eq(apiKeysTable.api_key, apiKey))
- .limit(1);
-
- if (apiKeyRecord.length === 0) {
- return {
- error: NextResponse.json({ error: 'Invalid API key' }, { status: 401 }),
- };
- }
-
- const activeProfile = await getProjectActiveProfile(
- apiKeyRecord[0].project_uuid
- );
- if (!activeProfile) {
- return {
- error: NextResponse.json(
- { error: 'No active profile found for this API key' },
- { status: 401 }
- ),
- };
- }
-
- return {
- success: true,
- apiKey: apiKeyRecord[0],
- activeProfile,
- };
-}
diff --git a/app/api/custom-mcp-servers/route.ts b/app/api/custom-mcp-servers/route.ts
deleted file mode 100644
index da9e6171..00000000
--- a/app/api/custom-mcp-servers/route.ts
+++ /dev/null
@@ -1,87 +0,0 @@
-import { and, desc, eq, or } from 'drizzle-orm';
-import { NextResponse } from 'next/server';
-
-import { db } from '@/db';
-import {
- codesTable,
- customMcpServersTable,
- McpServerStatus,
-} from '@/db/schema';
-
-import { authenticateApiKey } from '../auth';
-
-export async function GET(request: Request) {
- try {
- const auth = await authenticateApiKey(request);
- if (auth.error) return auth.error;
-
- const customMcpServers = await db
- .select({
- uuid: customMcpServersTable.uuid,
- name: customMcpServersTable.name,
- description: customMcpServersTable.description,
- code_uuid: customMcpServersTable.code_uuid,
- additionalArgs: customMcpServersTable.additionalArgs,
- env: customMcpServersTable.env,
- created_at: customMcpServersTable.created_at,
- profile_uuid: customMcpServersTable.profile_uuid,
- status: customMcpServersTable.status,
- code: codesTable.code,
- codeFileName: codesTable.fileName,
- })
- .from(customMcpServersTable)
- .leftJoin(
- codesTable,
- eq(customMcpServersTable.code_uuid, codesTable.uuid)
- )
- .where(
- and(
- eq(customMcpServersTable.profile_uuid, auth.activeProfile.uuid),
- or(
- eq(customMcpServersTable.status, McpServerStatus.ACTIVE),
- eq(customMcpServersTable.status, McpServerStatus.INACTIVE)
- )
- )
- )
- .orderBy(desc(customMcpServersTable.created_at));
-
- return NextResponse.json(customMcpServers);
- } catch (error) {
- console.error(error);
- return NextResponse.json(
- { error: 'Failed to fetch custom MCP servers' },
- { status: 500 }
- );
- }
-}
-
-export async function POST(request: Request) {
- try {
- const auth = await authenticateApiKey(request);
- if (auth.error) return auth.error;
-
- const body = await request.json();
- const { name, description, code_uuid, additionalArgs, env } = body;
-
- const [newCustomMcpServer] = await db
- .insert(customMcpServersTable)
- .values({
- name,
- description,
- code_uuid,
- additionalArgs,
- env,
- status: McpServerStatus.ACTIVE,
- profile_uuid: auth.activeProfile.uuid,
- })
- .returning();
-
- return NextResponse.json(newCustomMcpServer);
- } catch (error) {
- console.error(error);
- return NextResponse.json(
- { error: 'Failed to create custom MCP server' },
- { status: 500 }
- );
- }
-}
diff --git a/app/api/mcp-servers/route.ts b/app/api/mcp-servers/route.ts
deleted file mode 100644
index 3f2cc561..00000000
--- a/app/api/mcp-servers/route.ts
+++ /dev/null
@@ -1,81 +0,0 @@
-import { and, eq } from 'drizzle-orm';
-import { NextResponse } from 'next/server';
-
-import { db } from '@/db';
-import {
- mcpServersTable,
- McpServerStatus,
- oauthSessionsTable,
-} from '@/db/schema';
-
-import { authenticateApiKey } from '../auth';
-
-export async function GET(request: Request) {
- try {
- const auth = await authenticateApiKey(request);
- if (auth.error) return auth.error;
-
- const activeMcpServers = await db
- .select({
- server: mcpServersTable,
- tokens: oauthSessionsTable.tokens,
- })
- .from(mcpServersTable)
- .leftJoin(
- oauthSessionsTable,
- eq(mcpServersTable.uuid, oauthSessionsTable.mcp_server_uuid)
- )
- .where(
- and(
- eq(mcpServersTable.status, McpServerStatus.ACTIVE),
- eq(mcpServersTable.profile_uuid, auth.activeProfile.uuid)
- )
- );
-
- // Map the result to include tokens if they exist
- const result = activeMcpServers.map(({ server, tokens }) => ({
- ...server,
- oauth_tokens: tokens || null,
- }));
-
- return NextResponse.json(result);
- } catch (error) {
- console.error(error);
- return NextResponse.json(
- { error: 'Failed to fetch active MCP servers' },
- { status: 500 }
- );
- }
-}
-
-export async function POST(request: Request) {
- try {
- const auth = await authenticateApiKey(request);
- if (auth.error) return auth.error;
-
- const body = await request.json();
- const { uuid, name, description, command, args, env, status } = body;
-
- const newMcpServer = await db
- .insert(mcpServersTable)
- .values({
- uuid,
- name,
- description,
- command,
- args,
- env,
- status,
- profile_uuid: auth.activeProfile.uuid,
- })
- .returning();
-
- return NextResponse.json(newMcpServer[0]);
- } catch (error) {
- console.error(error);
- return NextResponse.json(
- { error: 'Failed to create MCP server' },
- { status: 500 }
- );
- }
-}
diff --git a/app/api/profile-capabilities/route.ts b/app/api/profile-capabilities/route.ts
deleted file mode 100644
index 67564d55..00000000
--- a/app/api/profile-capabilities/route.ts
+++ /dev/null
@@ -1,36 +0,0 @@
-import { eq } from 'drizzle-orm';
-import { NextResponse } from 'next/server';
-
-import { db } from '@/db';
-import { profilesTable } from '@/db/schema';
-
-import { authenticateApiKey } from '../auth';
-
-export async function GET(request: Request) {
- try {
- const auth = await authenticateApiKey(request);
- if (auth.error) return auth.error;
-
- const profile = await db
- .select({
- enabled_capabilities: profilesTable.enabled_capabilities,
- })
- .from(profilesTable)
- .where(eq(profilesTable.uuid, auth.activeProfile.uuid))
- .limit(1);
-
- if (profile.length === 0) {
- return NextResponse.json({ error: 'Profile not found' }, { status: 404 });
- }
-
- return NextResponse.json({
- profileCapabilities: profile[0].enabled_capabilities,
- });
- } catch (error) {
- console.error(error);
- return NextResponse.json(
- { error: 'Failed to fetch profile capabilities' },
- { status: 500 }
- );
- }
-}
diff --git a/app/api/tool-execution-logs/[id]/route.ts b/app/api/tool-execution-logs/[id]/route.ts
deleted file mode 100644
index 7a4ad1d8..00000000
--- a/app/api/tool-execution-logs/[id]/route.ts
+++ /dev/null
@@ -1,68 +0,0 @@
-import { eq } from 'drizzle-orm';
-import { NextRequest, NextResponse } from 'next/server';
-
-import { db } from '@/db';
-import { toolExecutionLogsTable } from '@/db/schema';
-
-import { authenticateApiKey } from '../../auth';
-
-export async function PUT(
- request: NextRequest,
- { params }: { params: Promise<{ id: string }> }
-) {
- try {
- const auth = await authenticateApiKey(request);
- if (auth.error) return auth.error;
-
- const { id: logId } = await params;
-
- if (!logId || isNaN(parseInt(logId))) {
- return NextResponse.json(
- { error: 'Valid log ID is required' },
- { status: 400 }
- );
- }
-
- const body = await request.json();
- const { result, status, error_message, execution_time_ms } = body;
-
- // Create update object with only the fields provided
- const updateData: Partial = {};
-
- if (result !== undefined) updateData.result = result;
- if (status !== undefined) updateData.status = status;
- if (error_message !== undefined) updateData.error_message = error_message;
- if (execution_time_ms !== undefined)
- updateData.execution_time_ms = execution_time_ms;
-
- // If no fields to update, return error
- if (Object.keys(updateData).length === 0) {
- return NextResponse.json(
- { error: 'No fields to update' },
- { status: 400 }
- );
- }
-
- // Update the tool execution log entry
- const updatedLog = await db
- .update(toolExecutionLogsTable)
- .set(updateData)
- .where(eq(toolExecutionLogsTable.id, parseInt(logId)))
- .returning();
-
- if (updatedLog.length === 0) {
- return NextResponse.json(
- { error: 'Tool execution log not found' },
- { status: 404 }
- );
- }
-
- return NextResponse.json(updatedLog[0]);
- } catch (error) {
- console.error(error);
- return NextResponse.json(
- { error: 'Failed to update tool execution log' },
- { status: 500 }
- );
- }
-}
diff --git a/app/api/tool-execution-logs/route.ts b/app/api/tool-execution-logs/route.ts
deleted file mode 100644
index 57120655..00000000
--- a/app/api/tool-execution-logs/route.ts
+++ /dev/null
@@ -1,80 +0,0 @@
-import { and, eq } from 'drizzle-orm';
-import { NextResponse } from 'next/server';
-
-import { db } from '@/db';
-import {
- mcpServersTable,
- toolExecutionLogsTable,
- ToolExecutionStatus,
-} from '@/db/schema';
-
-import { authenticateApiKey } from '../auth';
-
-export async function POST(request: Request) {
- try {
- const auth = await authenticateApiKey(request);
- if (auth.error) return auth.error;
-
- const body = await request.json();
- const {
- mcp_server_uuid,
- tool_name,
- payload,
- result,
- status,
- error_message,
- execution_time_ms,
- } = body;
-
- // Validate required fields
- if (!tool_name) {
- return NextResponse.json(
- { error: 'Tool name is required' },
- { status: 400 }
- );
- }
-
- // If mcp_server_uuid is provided, verify it belongs to the authenticated user's active profile
- if (mcp_server_uuid) {
- const mcpServer = await db
- .select()
- .from(mcpServersTable)
- .where(
- and(
- eq(mcpServersTable.uuid, mcp_server_uuid),
- eq(mcpServersTable.profile_uuid, auth.activeProfile.uuid)
- )
- )
- .limit(1);
-
- if (mcpServer.length === 0) {
- return NextResponse.json(
- { error: 'MCP server not found or does not belong to your profile' },
- { status: 404 }
- );
- }
- }
-
- // Create new tool execution log entry
- const newToolExecutionLog = await db
- .insert(toolExecutionLogsTable)
- .values({
- mcp_server_uuid: mcp_server_uuid || null,
- tool_name,
- payload: payload || {},
- result: result || null,
- status: status || ToolExecutionStatus.PENDING,
- error_message: error_message || null,
- execution_time_ms: execution_time_ms || null,
- })
- .returning();
-
- return NextResponse.json(newToolExecutionLog[0]);
- } catch (error) {
- console.error(error);
- return NextResponse.json(
- { error: 'Failed to create tool execution log' },
- { status: 500 }
- );
- }
-}
diff --git a/app/api/tools/route.ts b/app/api/tools/route.ts
deleted file mode 100644
index 2c2dbb03..00000000
--- a/app/api/tools/route.ts
+++ /dev/null
@@ -1,133 +0,0 @@
-import { sql } from 'drizzle-orm';
-import { NextResponse } from 'next/server';
-
-import { db } from '@/db';
-import { mcpServersTable, toolsTable } from '@/db/schema';
-
-import { authenticateApiKey } from '../auth';
-
-export async function POST(request: Request) {
- try {
- const auth = await authenticateApiKey(request);
- if (auth.error) return auth.error;
-
- const body = await request.json();
- const { tools } = body;
-
- // Validate that tools is an array
- if (!Array.isArray(tools) || tools.length === 0) {
- return NextResponse.json(
- { error: 'Request must include a non-empty array of tools' },
- { status: 400 }
- );
- }
-
- // Validate required fields for all tools and prepare for batch insertion
- const validTools = [];
- const errors = [];
-
- for (const tool of tools) {
- const { name, description, toolSchema, mcp_server_uuid } = tool;
-
- // Validate required fields for each tool
- if (!name || !toolSchema || !mcp_server_uuid) {
- errors.push({
- tool,
- error:
- 'Missing required fields: name, toolSchema, or mcp_server_uuid',
- });
- continue;
- }
-
- validTools.push({
- name,
- description,
- toolSchema,
- mcp_server_uuid,
- });
- }
-
- // Batch insert all valid tools with upsert
- let results: any[] = [];
- if (validTools.length > 0) {
- try {
- results = await db
- .insert(toolsTable)
- .values(validTools)
- .onConflictDoUpdate({
- target: [toolsTable.mcp_server_uuid, toolsTable.name],
- set: {
- description: sql`excluded.description`,
- toolSchema: sql`excluded.tool_schema`,
- },
- })
- .returning();
- } catch (error: any) {
- // Handle database errors for the batch operation
- console.error('Database error:', error);
- return NextResponse.json(
- {
- error: 'Failed to process tools request',
- details:
- error.code === '23503'
- ? 'One or more MCP servers not found or not associated with the active profile'
- : 'Database error occurred',
- },
- { status: 500 }
- );
- }
- }
-
- return NextResponse.json({
- results,
- errors,
- success: results.length > 0,
- failureCount: errors.length,
- successCount: results.length,
- });
- } catch (error) {
- console.error(error);
- return NextResponse.json(
- { error: 'Failed to process tools request' },
- { status: 500 }
- );
- }
-}
-
-export async function GET(request: Request) {
- try {
- const auth = await authenticateApiKey(request);
- if (auth.error) return auth.error;
-
- const { searchParams } = new URL(request.url);
- const status = searchParams.get('status');
-
- // Join with mcp_servers table to filter by profile_uuid
- const query = db
- .select({
- mcp_server_uuid: toolsTable.mcp_server_uuid,
- name: toolsTable.name,
- status: toolsTable.status,
- })
- .from(toolsTable)
- .innerJoin(
- mcpServersTable,
- sql`${toolsTable.mcp_server_uuid} = ${mcpServersTable.uuid}`
- )
- .where(
- sql`${mcpServersTable.profile_uuid} = ${auth.activeProfile.uuid}${
- status ? sql` AND ${toolsTable.status} = ${status}` : sql``
- }`
- );
-
- const results = await query;
-
- return NextResponse.json({ results });
- } catch (error) {
- console.error(error);
- return NextResponse.json(
- { error: 'Failed to fetch tools' },
- { status: 500 }
- );
- }
-}
diff --git a/app/globals.css b/app/globals.css
deleted file mode 100644
index d4528b52..00000000
--- a/app/globals.css
+++ /dev/null
@@ -1,88 +0,0 @@
-@tailwind base;
-@tailwind components;
-@tailwind utilities;
-
-body {
- font-family: Arial, Helvetica, sans-serif;
-}
-
-@layer base {
- :root {
- --background: 0 0% 100%;
- --foreground: 240 10% 3.9%;
- --card: 0 0% 100%;
- --card-foreground: 240 10% 3.9%;
- --popover: 0 0% 100%;
- --popover-foreground: 240 10% 3.9%;
- --primary: 240 5.9% 10%;
- --primary-foreground: 0 0% 98%;
- --secondary: 240 4.8% 95.9%;
- --secondary-foreground: 240 5.9% 10%;
- --muted: 240 4.8% 95.9%;
- --muted-foreground: 240 3.8% 46.1%;
- --accent: 240 4.8% 95.9%;
- --accent-foreground: 240 5.9% 10%;
- --destructive: 0 84.2% 60.2%;
- --destructive-foreground: 0 0% 98%;
- --border: 240 5.9% 90%;
- --input: 240 5.9% 90%;
- --ring: 240 10% 3.9%;
- --chart-1: 12 76% 61%;
- --chart-2: 173 58% 39%;
- --chart-3: 197 37% 24%;
- --chart-4: 43 74% 66%;
- --chart-5: 27 87% 67%;
- --radius: 0.5rem;
- --sidebar-background: 0 0% 98%;
- --sidebar-foreground: 240 5.3% 26.1%;
- --sidebar-primary: 240 5.9% 10%;
- --sidebar-primary-foreground: 0 0% 98%;
- --sidebar-accent: 240 4.8% 95.9%;
- --sidebar-accent-foreground: 240 5.9% 10%;
- --sidebar-border: 220 13% 91%;
- --sidebar-ring: 217.2 91.2% 59.8%;
- }
- .dark {
- --background: 240 10% 3.9%;
- --foreground: 0 0% 98%;
- --card: 240 10% 3.9%;
- --card-foreground: 0 0% 98%;
- --popover: 240 10% 3.9%;
- --popover-foreground: 0 0% 98%;
- --primary: 0 0% 98%;
- --primary-foreground: 240 5.9% 10%;
- --secondary: 240 3.7% 15.9%;
- --secondary-foreground: 0 0% 98%;
- --muted: 240 3.7% 15.9%;
- --muted-foreground: 240 5% 64.9%;
- --accent: 240 3.7% 15.9%;
- --accent-foreground: 0 0% 98%;
- --destructive: 0 62.8% 30.6%;
- --destructive-foreground: 0 0% 98%;
- --border: 240 3.7% 15.9%;
- --input: 240 3.7% 15.9%;
- --ring: 240 4.9% 83.9%;
- --chart-1: 220 70% 50%;
- --chart-2: 160 60% 45%;
- --chart-3: 30 80% 55%;
- --chart-4: 280 65% 60%;
- --chart-5: 340 75% 55%;
- --sidebar-background: 240 5.9% 10%;
- --sidebar-foreground: 240 4.8% 95.9%;
- --sidebar-primary: 224.3 76.3% 48%;
- --sidebar-primary-foreground: 0 0% 100%;
- --sidebar-accent: 240 3.7% 15.9%;
- --sidebar-accent-foreground: 240 4.8% 95.9%;
- --sidebar-border: 240 3.7% 15.9%;
- --sidebar-ring: 217.2 91.2% 59.8%;
- }
-}
-
-@layer base {
- * {
- @apply border-border;
- }
- body {
- @apply bg-background text-foreground;
- }
-}
diff --git a/app/layout.tsx b/app/layout.tsx
deleted file mode 100644
index 4e1f563d..00000000
--- a/app/layout.tsx
+++ /dev/null
@@ -1,37 +0,0 @@
-import './globals.css';
-
-import type { Metadata } from 'next';
-import { Geist, Geist_Mono } from 'next/font/google';
-
-import { Toaster } from '@/components/ui/toaster';
-
-const geistSans = Geist({
- variable: '--font-geist-sans',
- subsets: ['latin'],
-});
-
-const geistMono = Geist_Mono({
- variable: '--font-geist-mono',
- subsets: ['latin'],
-});
-
-export const metadata: Metadata = {
- title: 'MetaMCP Dashboard',
- description: 'The internal tool for MetaMCP.',
-};
-
-export default function RootLayout({
- children,
-}: Readonly<{
- children: React.ReactNode;
-}>) {
- return (
-
-
- {children}
-
-
-
- );
-}
diff --git a/app/oauth/callback/page.tsx b/app/oauth/callback/page.tsx
deleted file mode 100644
index 2ba3df06..00000000
--- a/app/oauth/callback/page.tsx
+++ /dev/null
@@ -1,13 +0,0 @@
-'use client';
-
-import { Suspense } from 'react';
-
-import OAuthCallback from '@/components/OAuthCallback';
-
-export default function OAuthCallbackPage() {
- return (
- Loading...}>
-
-
- );
-}
diff --git a/app/page.tsx b/app/page.tsx
deleted file mode 100644
index 523ac89d..00000000
--- a/app/page.tsx
+++ /dev/null
@@ -1,5 +0,0 @@
-import { redirect } from 'next/navigation';
-
-export default function Home() {
- redirect('/mcp-servers');
-}
diff --git a/apps/backend/.gitignore b/apps/backend/.gitignore
new file mode 100644
index 00000000..8bb63f54
--- /dev/null
+++ b/apps/backend/.gitignore
@@ -0,0 +1,12 @@
+node_modules/
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+.env
+.env.local
+.env.development.local
+.env.test.local
+.env.production.local
+*.log
+dist/
+*.tsbuildinfo
\ No newline at end of file
diff --git a/apps/backend/drizzle.config.ts b/apps/backend/drizzle.config.ts
new file mode 100644
index 00000000..f8141a31
--- /dev/null
+++ b/apps/backend/drizzle.config.ts
@@ -0,0 +1,12 @@
+/* eslint-disable @typescript-eslint/no-non-null-assertion */
+
+import { defineConfig } from "drizzle-kit";
+export default defineConfig({
+ out: "./drizzle",
+ schema: "./src/db/schema.ts",
+ dialect: "postgresql",
+ dbCredentials: {
+ // @ts-expect-error outside dir
+ url: process.env.DATABASE_URL!,
+ },
+});
diff --git a/apps/backend/drizzle/0000_greedy_tiger_shark.sql b/apps/backend/drizzle/0000_greedy_tiger_shark.sql
new file mode 100644
index 00000000..e809d7fe
--- /dev/null
+++ b/apps/backend/drizzle/0000_greedy_tiger_shark.sql
@@ -0,0 +1,171 @@
+CREATE TYPE "public"."mcp_server_status" AS ENUM('ACTIVE', 'INACTIVE');--> statement-breakpoint
+CREATE TYPE "public"."mcp_server_type" AS ENUM('STDIO', 'SSE', 'STREAMABLE_HTTP');--> statement-breakpoint
+CREATE TABLE "accounts" (
+ "id" text PRIMARY KEY NOT NULL,
+ "account_id" text NOT NULL,
+ "provider_id" text NOT NULL,
+ "user_id" text NOT NULL,
+ "access_token" text,
+ "refresh_token" text,
+ "id_token" text,
+ "access_token_expires_at" timestamp with time zone,
+ "refresh_token_expires_at" timestamp with time zone,
+ "scope" text,
+ "password" text,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "updated_at" timestamp with time zone DEFAULT now() NOT NULL
+);
+--> statement-breakpoint
+CREATE TABLE "api_keys" (
+ "uuid" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
+ "name" text NOT NULL,
+ "key" text NOT NULL,
+ "user_id" text NOT NULL,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "is_active" boolean DEFAULT true NOT NULL,
+ CONSTRAINT "api_keys_key_unique" UNIQUE("key"),
+ CONSTRAINT "api_keys_name_per_user_idx" UNIQUE("user_id","name")
+);
+--> statement-breakpoint
+CREATE TABLE "config" (
+ "id" text PRIMARY KEY NOT NULL,
+ "value" text NOT NULL,
+ "description" text,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "updated_at" timestamp with time zone DEFAULT now() NOT NULL
+);
+--> statement-breakpoint
+CREATE TABLE "endpoints" (
+ "uuid" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
+ "name" text NOT NULL,
+ "description" text,
+ "namespace_uuid" uuid NOT NULL,
+ "enable_api_key_auth" boolean DEFAULT true NOT NULL,
+ "use_query_param_auth" boolean DEFAULT false NOT NULL,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "updated_at" timestamp with time zone DEFAULT now() NOT NULL,
+ CONSTRAINT "endpoints_name_unique" UNIQUE("name"),
+ CONSTRAINT "endpoints_name_unique_idx" UNIQUE("name")
+);
+--> statement-breakpoint
+CREATE TABLE "mcp_servers" (
+ "uuid" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
+ "name" text NOT NULL,
+ "description" text,
+ "type" "mcp_server_type" DEFAULT 'STDIO' NOT NULL,
+ "command" text,
+ "args" text[] DEFAULT '{}'::text[] NOT NULL,
+ "env" jsonb DEFAULT '{}'::jsonb NOT NULL,
+ "url" text,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "bearer_token" text,
+ CONSTRAINT "mcp_servers_name_unique_idx" UNIQUE("name")
+);
+--> statement-breakpoint
+CREATE TABLE "namespace_server_mappings" (
+ "uuid" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
+ "namespace_uuid" uuid NOT NULL,
+ "mcp_server_uuid" uuid NOT NULL,
+ "status" "mcp_server_status" DEFAULT 'ACTIVE' NOT NULL,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ CONSTRAINT "namespace_server_mappings_unique_idx" UNIQUE("namespace_uuid","mcp_server_uuid")
+);
+--> statement-breakpoint
+CREATE TABLE "namespace_tool_mappings" (
+ "uuid" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
+ "namespace_uuid" uuid NOT NULL,
+ "tool_uuid" uuid NOT NULL,
+ "mcp_server_uuid" uuid NOT NULL,
+ "status" "mcp_server_status" DEFAULT 'ACTIVE' NOT NULL,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ CONSTRAINT "namespace_tool_mappings_unique_idx" UNIQUE("namespace_uuid","tool_uuid")
+);
+--> statement-breakpoint
+CREATE TABLE "namespaces" (
+ "uuid" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
+ "name" text NOT NULL,
+ "description" text,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "updated_at" timestamp with time zone DEFAULT now() NOT NULL,
+ CONSTRAINT "namespaces_name_unique" UNIQUE("name")
+);
+--> statement-breakpoint
+CREATE TABLE "oauth_sessions" (
+ "uuid" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
+ "mcp_server_uuid" uuid NOT NULL,
+ "client_information" jsonb DEFAULT '{}'::jsonb NOT NULL,
+ "tokens" jsonb,
+ "code_verifier" text,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "updated_at" timestamp with time zone DEFAULT now() NOT NULL,
+ CONSTRAINT "oauth_sessions_unique_per_server_idx" UNIQUE("mcp_server_uuid")
+);
+--> statement-breakpoint
+CREATE TABLE "sessions" (
+ "id" text PRIMARY KEY NOT NULL,
+ "expires_at" timestamp with time zone NOT NULL,
+ "token" text NOT NULL,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "updated_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "ip_address" text,
+ "user_agent" text,
+ "user_id" text NOT NULL,
+ CONSTRAINT "sessions_token_unique" UNIQUE("token")
+);
+--> statement-breakpoint
+CREATE TABLE "tools" (
+ "uuid" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
+ "name" text NOT NULL,
+ "description" text,
+ "tool_schema" jsonb NOT NULL,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "updated_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "mcp_server_uuid" uuid NOT NULL,
+ CONSTRAINT "tools_unique_tool_name_per_server_idx" UNIQUE("mcp_server_uuid","name")
+);
+--> statement-breakpoint
+CREATE TABLE "users" (
+ "id" text PRIMARY KEY NOT NULL,
+ "name" text NOT NULL,
+ "email" text NOT NULL,
+ "email_verified" boolean DEFAULT false NOT NULL,
+ "image" text,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "updated_at" timestamp with time zone DEFAULT now() NOT NULL,
+ CONSTRAINT "users_email_unique" UNIQUE("email")
+);
+--> statement-breakpoint
+CREATE TABLE "verifications" (
+ "id" text PRIMARY KEY NOT NULL,
+ "identifier" text NOT NULL,
+ "value" text NOT NULL,
+ "expires_at" timestamp with time zone NOT NULL,
+ "created_at" timestamp with time zone DEFAULT now() NOT NULL,
+ "updated_at" timestamp with time zone DEFAULT now() NOT NULL
+);
+--> statement-breakpoint
+ALTER TABLE "accounts" ADD CONSTRAINT "accounts_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
+ALTER TABLE "api_keys" ADD CONSTRAINT "api_keys_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
+ALTER TABLE "endpoints" ADD CONSTRAINT "endpoints_namespace_uuid_namespaces_uuid_fk" FOREIGN KEY ("namespace_uuid") REFERENCES "public"."namespaces"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
+ALTER TABLE "namespace_server_mappings" ADD CONSTRAINT "namespace_server_mappings_namespace_uuid_namespaces_uuid_fk" FOREIGN KEY ("namespace_uuid") REFERENCES "public"."namespaces"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
+ALTER TABLE "namespace_server_mappings" ADD CONSTRAINT "namespace_server_mappings_mcp_server_uuid_mcp_servers_uuid_fk" FOREIGN KEY ("mcp_server_uuid") REFERENCES "public"."mcp_servers"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
+ALTER TABLE "namespace_tool_mappings" ADD CONSTRAINT "namespace_tool_mappings_namespace_uuid_namespaces_uuid_fk" FOREIGN KEY ("namespace_uuid") REFERENCES "public"."namespaces"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
+ALTER TABLE "namespace_tool_mappings" ADD CONSTRAINT "namespace_tool_mappings_tool_uuid_tools_uuid_fk" FOREIGN KEY ("tool_uuid") REFERENCES "public"."tools"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
+ALTER TABLE "namespace_tool_mappings" ADD CONSTRAINT "namespace_tool_mappings_mcp_server_uuid_mcp_servers_uuid_fk" FOREIGN KEY ("mcp_server_uuid") REFERENCES "public"."mcp_servers"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
+ALTER TABLE "oauth_sessions" ADD CONSTRAINT "oauth_sessions_mcp_server_uuid_mcp_servers_uuid_fk" FOREIGN KEY ("mcp_server_uuid") REFERENCES "public"."mcp_servers"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
+ALTER TABLE "sessions" ADD CONSTRAINT "sessions_user_id_users_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."users"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
+ALTER TABLE "tools" ADD CONSTRAINT "tools_mcp_server_uuid_mcp_servers_uuid_fk" FOREIGN KEY ("mcp_server_uuid") REFERENCES "public"."mcp_servers"("uuid") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
+CREATE INDEX "api_keys_user_id_idx" ON "api_keys" USING btree ("user_id");--> statement-breakpoint
+CREATE INDEX "api_keys_key_idx" ON "api_keys" USING btree ("key");--> statement-breakpoint
+CREATE INDEX "api_keys_is_active_idx" ON "api_keys" USING btree ("is_active");--> statement-breakpoint
+CREATE INDEX "endpoints_namespace_uuid_idx" ON "endpoints" USING btree ("namespace_uuid");--> statement-breakpoint
+CREATE INDEX "mcp_servers_type_idx" ON "mcp_servers" USING btree ("type");--> statement-breakpoint
+CREATE INDEX "namespace_server_mappings_namespace_uuid_idx" ON "namespace_server_mappings" USING btree ("namespace_uuid");--> statement-breakpoint
+CREATE INDEX "namespace_server_mappings_mcp_server_uuid_idx" ON "namespace_server_mappings" USING btree ("mcp_server_uuid");--> statement-breakpoint
+CREATE INDEX "namespace_server_mappings_status_idx" ON "namespace_server_mappings" USING btree ("status");--> statement-breakpoint
+CREATE INDEX "namespace_tool_mappings_namespace_uuid_idx" ON "namespace_tool_mappings" USING btree ("namespace_uuid");--> statement-breakpoint
+CREATE INDEX "namespace_tool_mappings_tool_uuid_idx" ON "namespace_tool_mappings" USING btree ("tool_uuid");--> statement-breakpoint
+CREATE INDEX "namespace_tool_mappings_mcp_server_uuid_idx" ON "namespace_tool_mappings" USING btree ("mcp_server_uuid");--> statement-breakpoint
+CREATE INDEX "namespace_tool_mappings_status_idx" ON "namespace_tool_mappings" USING btree ("status");--> statement-breakpoint
+CREATE INDEX "oauth_sessions_mcp_server_uuid_idx" ON "oauth_sessions" USING btree ("mcp_server_uuid");--> statement-breakpoint
+CREATE INDEX "tools_mcp_server_uuid_idx" ON "tools" USING btree ("mcp_server_uuid");
\ No newline at end of file
diff --git a/drizzle/meta/0016_snapshot.json b/apps/backend/drizzle/meta/0000_snapshot.json
similarity index 54%
rename from drizzle/meta/0016_snapshot.json
rename to apps/backend/drizzle/meta/0000_snapshot.json
index 1af275f4..191f1f6b 100644
--- a/drizzle/meta/0016_snapshot.json
+++ b/apps/backend/drizzle/meta/0000_snapshot.json
@@ -1,9 +1,116 @@
{
- "id": "c972e957-94b9-400f-b993-60c8934b3f94",
- "prevId": "032fac23-c733-4a95-bf70-33d2cace0d85",
+ "id": "f3c5452b-2b28-4b96-9d2c-c4da571f728a",
+ "prevId": "00000000-0000-0000-0000-000000000000",
"version": "7",
"dialect": "postgresql",
"tables": {
+ "public.accounts": {
+ "name": "accounts",
+ "schema": "",
+ "columns": {
+ "id": {
+ "name": "id",
+ "type": "text",
+ "primaryKey": true,
+ "notNull": true
+ },
+ "account_id": {
+ "name": "account_id",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": true
+ },
+ "provider_id": {
+ "name": "provider_id",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": true
+ },
+ "user_id": {
+ "name": "user_id",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": true
+ },
+ "access_token": {
+ "name": "access_token",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": false
+ },
+ "refresh_token": {
+ "name": "refresh_token",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": false
+ },
+ "id_token": {
+ "name": "id_token",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": false
+ },
+ "access_token_expires_at": {
+ "name": "access_token_expires_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": false
+ },
+ "refresh_token_expires_at": {
+ "name": "refresh_token_expires_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": false
+ },
+ "scope": {
+ "name": "scope",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": false
+ },
+ "password": {
+ "name": "password",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": false
+ },
+ "created_at": {
+ "name": "created_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true,
+ "default": "now()"
+ },
+ "updated_at": {
+ "name": "updated_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true,
+ "default": "now()"
+ }
+ },
+ "indexes": {},
+ "foreignKeys": {
+ "accounts_user_id_users_id_fk": {
+ "name": "accounts_user_id_users_id_fk",
+ "tableFrom": "accounts",
+ "tableTo": "users",
+ "columnsFrom": [
+ "user_id"
+ ],
+ "columnsTo": [
+ "id"
+ ],
+ "onDelete": "cascade",
+ "onUpdate": "no action"
+ }
+ },
+ "compositePrimaryKeys": {},
+ "uniqueConstraints": {},
+ "policies": {},
+ "checkConstraints": {},
+ "isRLSEnabled": false
+ },
"public.api_keys": {
"name": "api_keys",
"schema": "",
@@ -15,24 +122,23 @@
"notNull": true,
"default": "gen_random_uuid()"
},
- "project_uuid": {
- "name": "project_uuid",
- "type": "uuid",
+ "name": {
+ "name": "name",
+ "type": "text",
"primaryKey": false,
"notNull": true
},
- "api_key": {
- "name": "api_key",
+ "key": {
+ "name": "key",
"type": "text",
"primaryKey": false,
"notNull": true
},
- "name": {
- "name": "name",
+ "user_id": {
+ "name": "user_id",
"type": "text",
"primaryKey": false,
- "notNull": false,
- "default": "'API Key'"
+ "notNull": true
},
"created_at": {
"name": "created_at",
@@ -40,14 +146,51 @@
"primaryKey": false,
"notNull": true,
"default": "now()"
+ },
+ "is_active": {
+ "name": "is_active",
+ "type": "boolean",
+ "primaryKey": false,
+ "notNull": true,
+ "default": true
}
},
"indexes": {
- "api_keys_project_uuid_idx": {
- "name": "api_keys_project_uuid_idx",
+ "api_keys_user_id_idx": {
+ "name": "api_keys_user_id_idx",
+ "columns": [
+ {
+ "expression": "user_id",
+ "isExpression": false,
+ "asc": true,
+ "nulls": "last"
+ }
+ ],
+ "isUnique": false,
+ "concurrently": false,
+ "method": "btree",
+ "with": {}
+ },
+ "api_keys_key_idx": {
+ "name": "api_keys_key_idx",
+ "columns": [
+ {
+ "expression": "key",
+ "isExpression": false,
+ "asc": true,
+ "nulls": "last"
+ }
+ ],
+ "isUnique": false,
+ "concurrently": false,
+ "method": "btree",
+ "with": {}
+ },
+ "api_keys_is_active_idx": {
+ "name": "api_keys_is_active_idx",
"columns": [
{
- "expression": "project_uuid",
+ "expression": "is_active",
"isExpression": false,
"asc": true,
"nulls": "last"
@@ -60,48 +203,63 @@
}
},
"foreignKeys": {
- "api_keys_project_uuid_projects_uuid_fk": {
- "name": "api_keys_project_uuid_projects_uuid_fk",
+ "api_keys_user_id_users_id_fk": {
+ "name": "api_keys_user_id_users_id_fk",
"tableFrom": "api_keys",
- "tableTo": "projects",
+ "tableTo": "users",
"columnsFrom": [
- "project_uuid"
+ "user_id"
],
"columnsTo": [
- "uuid"
+ "id"
],
"onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
- "uniqueConstraints": {},
+ "uniqueConstraints": {
+ "api_keys_key_unique": {
+ "name": "api_keys_key_unique",
+ "nullsNotDistinct": false,
+ "columns": [
+ "key"
+ ]
+ },
+ "api_keys_name_per_user_idx": {
+ "name": "api_keys_name_per_user_idx",
+ "nullsNotDistinct": false,
+ "columns": [
+ "user_id",
+ "name"
+ ]
+ }
+ },
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
- "public.codes": {
- "name": "codes",
+ "public.config": {
+ "name": "config",
"schema": "",
"columns": {
- "uuid": {
- "name": "uuid",
- "type": "uuid",
+ "id": {
+ "name": "id",
+ "type": "text",
"primaryKey": true,
- "notNull": true,
- "default": "gen_random_uuid()"
+ "notNull": true
},
- "file_name": {
- "name": "file_name",
+ "value": {
+ "name": "value",
"type": "text",
"primaryKey": false,
"notNull": true
},
- "code": {
- "name": "code",
+ "description": {
+ "name": "description",
"type": "text",
"primaryKey": false,
- "notNull": true
+ "notNull": false
},
"created_at": {
"name": "created_at",
@@ -109,6 +267,13 @@
"primaryKey": false,
"notNull": true,
"default": "now()"
+ },
+ "updated_at": {
+ "name": "updated_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true,
+ "default": "now()"
}
},
"indexes": {},
@@ -119,8 +284,8 @@
"checkConstraints": {},
"isRLSEnabled": false
},
- "public.custom_mcp_servers": {
- "name": "custom_mcp_servers",
+ "public.endpoints": {
+ "name": "endpoints",
"schema": "",
"columns": {
"uuid": {
@@ -142,25 +307,25 @@
"primaryKey": false,
"notNull": false
},
- "code_uuid": {
- "name": "code_uuid",
+ "namespace_uuid": {
+ "name": "namespace_uuid",
"type": "uuid",
"primaryKey": false,
"notNull": true
},
- "additional_args": {
- "name": "additional_args",
- "type": "text[]",
+ "enable_api_key_auth": {
+ "name": "enable_api_key_auth",
+ "type": "boolean",
"primaryKey": false,
"notNull": true,
- "default": "'{}'::text[]"
+ "default": true
},
- "env": {
- "name": "env",
- "type": "jsonb",
+ "use_query_param_auth": {
+ "name": "use_query_param_auth",
+ "type": "boolean",
"primaryKey": false,
"notNull": true,
- "default": "'{}'::jsonb"
+ "default": false
},
"created_at": {
"name": "created_at",
@@ -169,42 +334,20 @@
"notNull": true,
"default": "now()"
},
- "profile_uuid": {
- "name": "profile_uuid",
- "type": "uuid",
- "primaryKey": false,
- "notNull": true
- },
- "status": {
- "name": "status",
- "type": "mcp_server_status",
- "typeSchema": "public",
+ "updated_at": {
+ "name": "updated_at",
+ "type": "timestamp with time zone",
"primaryKey": false,
"notNull": true,
- "default": "'ACTIVE'"
+ "default": "now()"
}
},
"indexes": {
- "custom_mcp_servers_status_idx": {
- "name": "custom_mcp_servers_status_idx",
- "columns": [
- {
- "expression": "status",
- "isExpression": false,
- "asc": true,
- "nulls": "last"
- }
- ],
- "isUnique": false,
- "concurrently": false,
- "method": "btree",
- "with": {}
- },
- "custom_mcp_servers_profile_uuid_idx": {
- "name": "custom_mcp_servers_profile_uuid_idx",
+ "endpoints_namespace_uuid_idx": {
+ "name": "endpoints_namespace_uuid_idx",
"columns": [
{
- "expression": "profile_uuid",
+ "expression": "namespace_uuid",
"isExpression": false,
"asc": true,
"nulls": "last"
@@ -217,35 +360,37 @@
}
},
"foreignKeys": {
- "custom_mcp_servers_code_uuid_codes_uuid_fk": {
- "name": "custom_mcp_servers_code_uuid_codes_uuid_fk",
- "tableFrom": "custom_mcp_servers",
- "tableTo": "codes",
- "columnsFrom": [
- "code_uuid"
- ],
- "columnsTo": [
- "uuid"
- ],
- "onDelete": "no action",
- "onUpdate": "no action"
- },
- "custom_mcp_servers_profile_uuid_profiles_uuid_fk": {
- "name": "custom_mcp_servers_profile_uuid_profiles_uuid_fk",
- "tableFrom": "custom_mcp_servers",
- "tableTo": "profiles",
+ "endpoints_namespace_uuid_namespaces_uuid_fk": {
+ "name": "endpoints_namespace_uuid_namespaces_uuid_fk",
+ "tableFrom": "endpoints",
+ "tableTo": "namespaces",
"columnsFrom": [
- "profile_uuid"
+ "namespace_uuid"
],
"columnsTo": [
"uuid"
],
- "onDelete": "no action",
+ "onDelete": "cascade",
"onUpdate": "no action"
}
},
"compositePrimaryKeys": {},
- "uniqueConstraints": {},
+ "uniqueConstraints": {
+ "endpoints_name_unique": {
+ "name": "endpoints_name_unique",
+ "nullsNotDistinct": false,
+ "columns": [
+ "name"
+ ]
+ },
+ "endpoints_name_unique_idx": {
+ "name": "endpoints_name_unique_idx",
+ "nullsNotDistinct": false,
+ "columns": [
+ "name"
+ ]
+ }
+ },
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
@@ -314,52 +459,14 @@
"notNull": true,
"default": "now()"
},
- "profile_uuid": {
- "name": "profile_uuid",
- "type": "uuid",
- "primaryKey": false,
- "notNull": true
- },
- "status": {
- "name": "status",
- "type": "mcp_server_status",
- "typeSchema": "public",
+ "bearer_token": {
+ "name": "bearer_token",
+ "type": "text",
"primaryKey": false,
- "notNull": true,
- "default": "'ACTIVE'"
+ "notNull": false
}
},
"indexes": {
- "mcp_servers_status_idx": {
- "name": "mcp_servers_status_idx",
- "columns": [
- {
- "expression": "status",
- "isExpression": false,
- "asc": true,
- "nulls": "last"
- }
- ],
- "isUnique": false,
- "concurrently": false,
- "method": "btree",
- "with": {}
- },
- "mcp_servers_profile_uuid_idx": {
- "name": "mcp_servers_profile_uuid_idx",
- "columns": [
- {
- "expression": "profile_uuid",
- "isExpression": false,
- "asc": true,
- "nulls": "last"
- }
- ],
- "isUnique": false,
- "concurrently": false,
- "method": "btree",
- "with": {}
- },
"mcp_servers_type_idx": {
"name": "mcp_servers_type_idx",
"columns": [
@@ -376,29 +483,23 @@
"with": {}
}
},
- "foreignKeys": {
- "mcp_servers_profile_uuid_profiles_uuid_fk": {
- "name": "mcp_servers_profile_uuid_profiles_uuid_fk",
- "tableFrom": "mcp_servers",
- "tableTo": "profiles",
- "columnsFrom": [
- "profile_uuid"
- ],
- "columnsTo": [
- "uuid"
- ],
- "onDelete": "no action",
- "onUpdate": "no action"
+ "foreignKeys": {},
+ "compositePrimaryKeys": {},
+ "uniqueConstraints": {
+ "mcp_servers_name_unique_idx": {
+ "name": "mcp_servers_name_unique_idx",
+ "nullsNotDistinct": false,
+ "columns": [
+ "name"
+ ]
}
},
- "compositePrimaryKeys": {},
- "uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
- "public.oauth_sessions": {
- "name": "oauth_sessions",
+ "public.namespace_server_mappings": {
+ "name": "namespace_server_mappings",
"schema": "",
"columns": {
"uuid": {
@@ -408,29 +509,25 @@
"notNull": true,
"default": "gen_random_uuid()"
},
- "mcp_server_uuid": {
- "name": "mcp_server_uuid",
+ "namespace_uuid": {
+ "name": "namespace_uuid",
"type": "uuid",
"primaryKey": false,
"notNull": true
},
- "client_information": {
- "name": "client_information",
- "type": "jsonb",
+ "mcp_server_uuid": {
+ "name": "mcp_server_uuid",
+ "type": "uuid",
"primaryKey": false,
"notNull": true
},
- "tokens": {
- "name": "tokens",
- "type": "jsonb",
- "primaryKey": false,
- "notNull": false
- },
- "code_verifier": {
- "name": "code_verifier",
- "type": "text",
+ "status": {
+ "name": "status",
+ "type": "mcp_server_status",
+ "typeSchema": "public",
"primaryKey": false,
- "notNull": false
+ "notNull": true,
+ "default": "'ACTIVE'"
},
"created_at": {
"name": "created_at",
@@ -438,18 +535,26 @@
"primaryKey": false,
"notNull": true,
"default": "now()"
- },
- "updated_at": {
- "name": "updated_at",
- "type": "timestamp with time zone",
- "primaryKey": false,
- "notNull": true,
- "default": "now()"
}
},
"indexes": {
- "oauth_sessions_mcp_server_uuid_idx": {
- "name": "oauth_sessions_mcp_server_uuid_idx",
+ "namespace_server_mappings_namespace_uuid_idx": {
+ "name": "namespace_server_mappings_namespace_uuid_idx",
+ "columns": [
+ {
+ "expression": "namespace_uuid",
+ "isExpression": false,
+ "asc": true,
+ "nulls": "last"
+ }
+ ],
+ "isUnique": false,
+ "concurrently": false,
+ "method": "btree",
+ "with": {}
+ },
+ "namespace_server_mappings_mcp_server_uuid_idx": {
+ "name": "namespace_server_mappings_mcp_server_uuid_idx",
"columns": [
{
"expression": "mcp_server_uuid",
@@ -462,12 +567,40 @@
"concurrently": false,
"method": "btree",
"with": {}
+ },
+ "namespace_server_mappings_status_idx": {
+ "name": "namespace_server_mappings_status_idx",
+ "columns": [
+ {
+ "expression": "status",
+ "isExpression": false,
+ "asc": true,
+ "nulls": "last"
+ }
+ ],
+ "isUnique": false,
+ "concurrently": false,
+ "method": "btree",
+ "with": {}
}
},
"foreignKeys": {
- "oauth_sessions_mcp_server_uuid_mcp_servers_uuid_fk": {
- "name": "oauth_sessions_mcp_server_uuid_mcp_servers_uuid_fk",
- "tableFrom": "oauth_sessions",
+ "namespace_server_mappings_namespace_uuid_namespaces_uuid_fk": {
+ "name": "namespace_server_mappings_namespace_uuid_namespaces_uuid_fk",
+ "tableFrom": "namespace_server_mappings",
+ "tableTo": "namespaces",
+ "columnsFrom": [
+ "namespace_uuid"
+ ],
+ "columnsTo": [
+ "uuid"
+ ],
+ "onDelete": "cascade",
+ "onUpdate": "no action"
+ },
+ "namespace_server_mappings_mcp_server_uuid_mcp_servers_uuid_fk": {
+ "name": "namespace_server_mappings_mcp_server_uuid_mcp_servers_uuid_fk",
+ "tableFrom": "namespace_server_mappings",
"tableTo": "mcp_servers",
"columnsFrom": [
"mcp_server_uuid"
@@ -481,10 +614,11 @@
},
"compositePrimaryKeys": {},
"uniqueConstraints": {
- "oauth_sessions_unique_per_server_idx": {
- "name": "oauth_sessions_unique_per_server_idx",
+ "namespace_server_mappings_unique_idx": {
+ "name": "namespace_server_mappings_unique_idx",
"nullsNotDistinct": false,
"columns": [
+ "namespace_uuid",
"mcp_server_uuid"
]
}
@@ -493,8 +627,8 @@
"checkConstraints": {},
"isRLSEnabled": false
},
- "public.profiles": {
- "name": "profiles",
+ "public.namespace_tool_mappings": {
+ "name": "namespace_tool_mappings",
"schema": "",
"columns": {
"uuid": {
@@ -504,32 +638,31 @@
"notNull": true,
"default": "gen_random_uuid()"
},
- "name": {
- "name": "name",
- "type": "text",
+ "namespace_uuid": {
+ "name": "namespace_uuid",
+ "type": "uuid",
"primaryKey": false,
"notNull": true
},
- "project_uuid": {
- "name": "project_uuid",
+ "tool_uuid": {
+ "name": "tool_uuid",
"type": "uuid",
"primaryKey": false,
"notNull": true
},
- "enabled_capabilities": {
- "name": "enabled_capabilities",
- "type": "profile_capability[]",
+ "mcp_server_uuid": {
+ "name": "mcp_server_uuid",
+ "type": "uuid",
"primaryKey": false,
- "notNull": true,
- "default": "'{}'::profile_capability[]"
+ "notNull": true
},
- "workspace_mode": {
- "name": "workspace_mode",
- "type": "workspace_mode",
+ "status": {
+ "name": "status",
+ "type": "mcp_server_status",
"typeSchema": "public",
"primaryKey": false,
"notNull": true,
- "default": "'LOCAL'"
+ "default": "'ACTIVE'"
},
"created_at": {
"name": "created_at",
@@ -540,11 +673,56 @@
}
},
"indexes": {
- "profiles_project_uuid_idx": {
- "name": "profiles_project_uuid_idx",
+ "namespace_tool_mappings_namespace_uuid_idx": {
+ "name": "namespace_tool_mappings_namespace_uuid_idx",
+ "columns": [
+ {
+ "expression": "namespace_uuid",
+ "isExpression": false,
+ "asc": true,
+ "nulls": "last"
+ }
+ ],
+ "isUnique": false,
+ "concurrently": false,
+ "method": "btree",
+ "with": {}
+ },
+ "namespace_tool_mappings_tool_uuid_idx": {
+ "name": "namespace_tool_mappings_tool_uuid_idx",
+ "columns": [
+ {
+ "expression": "tool_uuid",
+ "isExpression": false,
+ "asc": true,
+ "nulls": "last"
+ }
+ ],
+ "isUnique": false,
+ "concurrently": false,
+ "method": "btree",
+ "with": {}
+ },
+ "namespace_tool_mappings_mcp_server_uuid_idx": {
+ "name": "namespace_tool_mappings_mcp_server_uuid_idx",
+ "columns": [
+ {
+ "expression": "mcp_server_uuid",
+ "isExpression": false,
+ "asc": true,
+ "nulls": "last"
+ }
+ ],
+ "isUnique": false,
+ "concurrently": false,
+ "method": "btree",
+ "with": {}
+ },
+ "namespace_tool_mappings_status_idx": {
+ "name": "namespace_tool_mappings_status_idx",
"columns": [
{
- "expression": "project_uuid",
+ "expression": "status",
"isExpression": false,
"asc": true,
"nulls": "last"
@@ -557,12 +735,38 @@
}
},
"foreignKeys": {
- "profiles_project_uuid_projects_uuid_fk": {
- "name": "profiles_project_uuid_projects_uuid_fk",
- "tableFrom": "profiles",
- "tableTo": "projects",
+ "namespace_tool_mappings_namespace_uuid_namespaces_uuid_fk": {
+ "name": "namespace_tool_mappings_namespace_uuid_namespaces_uuid_fk",
+ "tableFrom": "namespace_tool_mappings",
+ "tableTo": "namespaces",
+ "columnsFrom": [
+ "namespace_uuid"
+ ],
+ "columnsTo": [
+ "uuid"
+ ],
+ "onDelete": "cascade",
+ "onUpdate": "no action"
+ },
+ "namespace_tool_mappings_tool_uuid_tools_uuid_fk": {
+ "name": "namespace_tool_mappings_tool_uuid_tools_uuid_fk",
+ "tableFrom": "namespace_tool_mappings",
+ "tableTo": "tools",
+ "columnsFrom": [
+ "tool_uuid"
+ ],
+ "columnsTo": [
+ "uuid"
+ ],
+ "onDelete": "cascade",
+ "onUpdate": "no action"
+ },
+ "namespace_tool_mappings_mcp_server_uuid_mcp_servers_uuid_fk": {
+ "name": "namespace_tool_mappings_mcp_server_uuid_mcp_servers_uuid_fk",
+ "tableFrom": "namespace_tool_mappings",
+ "tableTo": "mcp_servers",
"columnsFrom": [
- "project_uuid"
+ "mcp_server_uuid"
],
"columnsTo": [
"uuid"
@@ -572,13 +776,22 @@
}
},
"compositePrimaryKeys": {},
- "uniqueConstraints": {},
+ "uniqueConstraints": {
+ "namespace_tool_mappings_unique_idx": {
+ "name": "namespace_tool_mappings_unique_idx",
+ "nullsNotDistinct": false,
+ "columns": [
+ "namespace_uuid",
+ "tool_uuid"
+ ]
+ }
+ },
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
- "public.projects": {
- "name": "projects",
+ "public.namespaces": {
+ "name": "namespaces",
"schema": "",
"columns": {
"uuid": {
@@ -594,6 +807,12 @@
"primaryKey": false,
"notNull": true
},
+ "description": {
+ "name": "description",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": false
+ },
"created_at": {
"name": "created_at",
"type": "timestamp with time zone",
@@ -601,86 +820,62 @@
"notNull": true,
"default": "now()"
},
- "active_profile_uuid": {
- "name": "active_profile_uuid",
- "type": "uuid",
+ "updated_at": {
+ "name": "updated_at",
+ "type": "timestamp with time zone",
"primaryKey": false,
- "notNull": false
+ "notNull": true,
+ "default": "now()"
}
},
"indexes": {},
- "foreignKeys": {
- "projects_active_profile_uuid_profiles_uuid_fk": {
- "name": "projects_active_profile_uuid_profiles_uuid_fk",
- "tableFrom": "projects",
- "tableTo": "profiles",
- "columnsFrom": [
- "active_profile_uuid"
- ],
- "columnsTo": [
- "uuid"
- ],
- "onDelete": "no action",
- "onUpdate": "no action"
+ "foreignKeys": {},
+ "compositePrimaryKeys": {},
+ "uniqueConstraints": {
+ "namespaces_name_unique": {
+ "name": "namespaces_name_unique",
+ "nullsNotDistinct": false,
+ "columns": [
+ "name"
+ ]
}
},
- "compositePrimaryKeys": {},
- "uniqueConstraints": {},
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
},
- "public.tool_execution_logs": {
- "name": "tool_execution_logs",
+ "public.oauth_sessions": {
+ "name": "oauth_sessions",
"schema": "",
"columns": {
- "id": {
- "name": "id",
- "type": "serial",
+ "uuid": {
+ "name": "uuid",
+ "type": "uuid",
"primaryKey": true,
- "notNull": true
+ "notNull": true,
+ "default": "gen_random_uuid()"
},
"mcp_server_uuid": {
"name": "mcp_server_uuid",
"type": "uuid",
"primaryKey": false,
- "notNull": false
- },
- "tool_name": {
- "name": "tool_name",
- "type": "text",
- "primaryKey": false,
"notNull": true
},
- "payload": {
- "name": "payload",
+ "client_information": {
+ "name": "client_information",
"type": "jsonb",
"primaryKey": false,
"notNull": true,
"default": "'{}'::jsonb"
},
- "result": {
- "name": "result",
+ "tokens": {
+ "name": "tokens",
"type": "jsonb",
"primaryKey": false,
"notNull": false
},
- "status": {
- "name": "status",
- "type": "tool_execution_status",
- "typeSchema": "public",
- "primaryKey": false,
- "notNull": true,
- "default": "'PENDING'"
- },
- "error_message": {
- "name": "error_message",
- "type": "text",
- "primaryKey": false,
- "notNull": false
- },
- "execution_time_ms": {
- "name": "execution_time_ms",
+ "code_verifier": {
+ "name": "code_verifier",
"type": "text",
"primaryKey": false,
"notNull": false
@@ -691,11 +886,18 @@
"primaryKey": false,
"notNull": true,
"default": "now()"
+ },
+ "updated_at": {
+ "name": "updated_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true,
+ "default": "now()"
}
},
"indexes": {
- "tool_execution_logs_mcp_server_uuid_idx": {
- "name": "tool_execution_logs_mcp_server_uuid_idx",
+ "oauth_sessions_mcp_server_uuid_idx": {
+ "name": "oauth_sessions_mcp_server_uuid_idx",
"columns": [
{
"expression": "mcp_server_uuid",
@@ -708,42 +910,12 @@
"concurrently": false,
"method": "btree",
"with": {}
- },
- "tool_execution_logs_tool_name_idx": {
- "name": "tool_execution_logs_tool_name_idx",
- "columns": [
- {
- "expression": "tool_name",
- "isExpression": false,
- "asc": true,
- "nulls": "last"
- }
- ],
- "isUnique": false,
- "concurrently": false,
- "method": "btree",
- "with": {}
- },
- "tool_execution_logs_created_at_idx": {
- "name": "tool_execution_logs_created_at_idx",
- "columns": [
- {
- "expression": "created_at",
- "isExpression": false,
- "asc": true,
- "nulls": "last"
- }
- ],
- "isUnique": false,
- "concurrently": false,
- "method": "btree",
- "with": {}
}
},
"foreignKeys": {
- "tool_execution_logs_mcp_server_uuid_mcp_servers_uuid_fk": {
- "name": "tool_execution_logs_mcp_server_uuid_mcp_servers_uuid_fk",
- "tableFrom": "tool_execution_logs",
+ "oauth_sessions_mcp_server_uuid_mcp_servers_uuid_fk": {
+ "name": "oauth_sessions_mcp_server_uuid_mcp_servers_uuid_fk",
+ "tableFrom": "oauth_sessions",
"tableTo": "mcp_servers",
"columnsFrom": [
"mcp_server_uuid"
@@ -756,7 +928,100 @@
}
},
"compositePrimaryKeys": {},
- "uniqueConstraints": {},
+ "uniqueConstraints": {
+ "oauth_sessions_unique_per_server_idx": {
+ "name": "oauth_sessions_unique_per_server_idx",
+ "nullsNotDistinct": false,
+ "columns": [
+ "mcp_server_uuid"
+ ]
+ }
+ },
+ "policies": {},
+ "checkConstraints": {},
+ "isRLSEnabled": false
+ },
+ "public.sessions": {
+ "name": "sessions",
+ "schema": "",
+ "columns": {
+ "id": {
+ "name": "id",
+ "type": "text",
+ "primaryKey": true,
+ "notNull": true
+ },
+ "expires_at": {
+ "name": "expires_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true
+ },
+ "token": {
+ "name": "token",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": true
+ },
+ "created_at": {
+ "name": "created_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true,
+ "default": "now()"
+ },
+ "updated_at": {
+ "name": "updated_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true,
+ "default": "now()"
+ },
+ "ip_address": {
+ "name": "ip_address",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": false
+ },
+ "user_agent": {
+ "name": "user_agent",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": false
+ },
+ "user_id": {
+ "name": "user_id",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": true
+ }
+ },
+ "indexes": {},
+ "foreignKeys": {
+ "sessions_user_id_users_id_fk": {
+ "name": "sessions_user_id_users_id_fk",
+ "tableFrom": "sessions",
+ "tableTo": "users",
+ "columnsFrom": [
+ "user_id"
+ ],
+ "columnsTo": [
+ "id"
+ ],
+ "onDelete": "cascade",
+ "onUpdate": "no action"
+ }
+ },
+ "compositePrimaryKeys": {},
+ "uniqueConstraints": {
+ "sessions_token_unique": {
+ "name": "sessions_token_unique",
+ "nullsNotDistinct": false,
+ "columns": [
+ "token"
+ ]
+ }
+ },
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
@@ -797,19 +1062,18 @@
"notNull": true,
"default": "now()"
},
+ "updated_at": {
+ "name": "updated_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true,
+ "default": "now()"
+ },
"mcp_server_uuid": {
"name": "mcp_server_uuid",
"type": "uuid",
"primaryKey": false,
"notNull": true
- },
- "status": {
- "name": "status",
- "type": "toggle_status",
- "typeSchema": "public",
- "primaryKey": false,
- "notNull": true,
- "default": "'ACTIVE'"
}
},
"indexes": {
@@ -858,6 +1122,123 @@
"policies": {},
"checkConstraints": {},
"isRLSEnabled": false
+ },
+ "public.users": {
+ "name": "users",
+ "schema": "",
+ "columns": {
+ "id": {
+ "name": "id",
+ "type": "text",
+ "primaryKey": true,
+ "notNull": true
+ },
+ "name": {
+ "name": "name",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": true
+ },
+ "email": {
+ "name": "email",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": true
+ },
+ "email_verified": {
+ "name": "email_verified",
+ "type": "boolean",
+ "primaryKey": false,
+ "notNull": true,
+ "default": false
+ },
+ "image": {
+ "name": "image",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": false
+ },
+ "created_at": {
+ "name": "created_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true,
+ "default": "now()"
+ },
+ "updated_at": {
+ "name": "updated_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true,
+ "default": "now()"
+ }
+ },
+ "indexes": {},
+ "foreignKeys": {},
+ "compositePrimaryKeys": {},
+ "uniqueConstraints": {
+ "users_email_unique": {
+ "name": "users_email_unique",
+ "nullsNotDistinct": false,
+ "columns": [
+ "email"
+ ]
+ }
+ },
+ "policies": {},
+ "checkConstraints": {},
+ "isRLSEnabled": false
+ },
+ "public.verifications": {
+ "name": "verifications",
+ "schema": "",
+ "columns": {
+ "id": {
+ "name": "id",
+ "type": "text",
+ "primaryKey": true,
+ "notNull": true
+ },
+ "identifier": {
+ "name": "identifier",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": true
+ },
+ "value": {
+ "name": "value",
+ "type": "text",
+ "primaryKey": false,
+ "notNull": true
+ },
+ "expires_at": {
+ "name": "expires_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true
+ },
+ "created_at": {
+ "name": "created_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true,
+ "default": "now()"
+ },
+ "updated_at": {
+ "name": "updated_at",
+ "type": "timestamp with time zone",
+ "primaryKey": false,
+ "notNull": true,
+ "default": "now()"
+ }
+ },
+ "indexes": {},
+ "foreignKeys": {},
+ "compositePrimaryKeys": {},
+ "uniqueConstraints": {},
+ "policies": {},
+ "checkConstraints": {},
+ "isRLSEnabled": false
}
},
"enums": {
@@ -866,9 +1247,7 @@
"schema": "public",
"values": [
"ACTIVE",
- "INACTIVE",
- "SUGGESTED",
- "DECLINED"
+ "INACTIVE"
]
},
"public.mcp_server_type": {
@@ -879,39 +1258,6 @@
"SSE",
"STREAMABLE_HTTP"
]
- },
- "public.profile_capability": {
- "name": "profile_capability",
- "schema": "public",
- "values": [
- "TOOLS_MANAGEMENT",
- "TOOL_LOGS"
- ]
- },
- "public.toggle_status": {
- "name": "toggle_status",
- "schema": "public",
- "values": [
- "ACTIVE",
- "INACTIVE"
- ]
- },
- "public.tool_execution_status": {
- "name": "tool_execution_status",
- "schema": "public",
- "values": [
- "SUCCESS",
- "ERROR",
- "PENDING"
- ]
- },
- "public.workspace_mode": {
- "name": "workspace_mode",
- "schema": "public",
- "values": [
- "REMOTE",
- "LOCAL"
- ]
}
},
"schemas": {},
diff --git a/apps/backend/drizzle/meta/_journal.json b/apps/backend/drizzle/meta/_journal.json
new file mode 100644
index 00000000..5d40cdeb
--- /dev/null
+++ b/apps/backend/drizzle/meta/_journal.json
@@ -0,0 +1,13 @@
+{
+ "version": "7",
+ "dialect": "postgresql",
+ "entries": [
+ {
+ "idx": 0,
+ "version": "7",
+ "when": 1750769623042,
+ "tag": "0000_greedy_tiger_shark",
+ "breakpoints": true
+ }
+ ]
+}
\ No newline at end of file
diff --git a/apps/backend/eslint.config.js b/apps/backend/eslint.config.js
new file mode 100644
index 00000000..dbd5e5c0
--- /dev/null
+++ b/apps/backend/eslint.config.js
@@ -0,0 +1,3 @@
+import { expressConfig } from "@repo/eslint-config/express";
+
+export default expressConfig;
diff --git a/apps/backend/package.json b/apps/backend/package.json
new file mode 100644
index 00000000..071e5b7d
--- /dev/null
+++ b/apps/backend/package.json
@@ -0,0 +1,55 @@
+{
+ "name": "backend",
+ "version": "1.0.0",
+ "type": "module",
+ "description": "Bare minimum Express 5.1 backend with TypeScript",
+ "main": "dist/index.js",
+ "scripts": {
+ "build": "tsup",
+ "build:dev": "tsup --sourcemap",
+ "build:watch": "tsup --watch",
+ "start": "node dist/index.js",
+ "dev": "tsx watch src/index.ts",
+ "clean": "rm -rf dist",
+ "lint": "eslint . --max-warnings 0",
+ "lint:fix": "eslint . --fix",
+ "db:generate": "dotenv -e .env -- drizzle-kit generate",
+ "db:migrate": "dotenv -e .env -- drizzle-kit migrate"
+ },
+ "dependencies": {
+ "@modelcontextprotocol/sdk": "^1.12.3",
+ "@repo/trpc": "workspace:*",
+ "@repo/zod-types": "workspace:*",
+ "@trpc/server": "^11.4.1",
+ "basic-auth": "^2.0.1",
+ "better-auth": "^1.2.9",
+ "cors": "^2.8.5",
+ "dotenv": "^16.5.0",
+ "drizzle-orm": "^0.44.2",
+ "express": "^5.1.0",
+ "helmet": "^8.1.0",
+ "nanoid": "^5.1.5",
+ "pg": "^8.16.0",
+ "shell-quote": "^1.8.3",
+ "spawn-rx": "^5.1.2",
+ "zod": "^3.25.64"
+ },
+ "devDependencies": {
+ "@repo/eslint-config": "workspace:*",
+ "@types/basic-auth": "^1.1.8",
+ "@types/cors": "^2.8.19",
+ "@types/express": "^4.17.21",
+ "@types/node": "^20.0.0",
+ "@types/pg": "^8.15.4",
+ "@types/shell-quote": "^1.7.5",
+ "dotenv-cli": "^8.0.0",
+ "drizzle-kit": "^0.31.1",
+ "eslint": "^9.28.0",
+ "tsup": "^8.5.0",
+ "tsx": "^4.20.3",
+ "typescript": "^5.0.0"
+ },
+ "keywords": [],
+ "author": "",
+ "license": "ISC"
+}
diff --git a/apps/backend/src/auth.ts b/apps/backend/src/auth.ts
new file mode 100644
index 00000000..d8eee2b8
--- /dev/null
+++ b/apps/backend/src/auth.ts
@@ -0,0 +1,71 @@
+import { betterAuth } from "better-auth";
+import { drizzleAdapter } from "better-auth/adapters/drizzle";
+
+import { db } from "./db/index";
+import * as schema from "./db/schema";
+import { configService } from "./lib/config.service";
+
+// Provide default values for development
+if (!process.env.BETTER_AUTH_SECRET) {
+ throw new Error("BETTER_AUTH_SECRET environment variable is required");
+}
+const BETTER_AUTH_SECRET = process.env.BETTER_AUTH_SECRET;
+const BETTER_AUTH_URL = process.env.BETTER_AUTH_URL || "http://localhost:12008";
+
+export const auth = betterAuth({
+ secret: BETTER_AUTH_SECRET,
+ baseURL: BETTER_AUTH_URL,
+ database: drizzleAdapter(db, {
+ provider: "pg",
+ schema: {
+ user: schema.usersTable,
+ session: schema.sessionsTable,
+ account: schema.accountsTable,
+ verification: schema.verificationsTable,
+ },
+ }),
+ emailAndPassword: {
+ enabled: true,
+ requireEmailVerification: false, // Set to true if you want email verification
+ },
+ session: {
+ expiresIn: 60 * 60 * 24 * 7, // 7 days
+ updateAge: 60 * 60 * 24, // 1 day (how often to update the session)
+ },
+ user: {
+ additionalFields: {
+ emailVerified: {
+ type: "boolean",
+ defaultValue: false,
+ },
+ },
+ },
+ advanced: {
+ crossSubDomainCookies: {
+ enabled: true,
+ },
+ },
+ logger: {
+ level: "debug", // Enable debug logging
+ },
+ databaseHooks: {
+ user: {
+ create: {
+ before: async (user) => {
+ // Check if signup is disabled
+ const isSignupDisabled = await configService.isSignupDisabled();
+ if (isSignupDisabled) {
+ throw new Error("New user registration is currently disabled.");
+ }
+ return { data: user };
+ },
+ },
+ },
+ },
+});
+
+console.log("β Better Auth instance created successfully");
+
+export type Session = typeof auth.$Infer.Session;
+// Note: User type needs to be inferred from Session.user
+export type User = typeof auth.$Infer.Session.user;
diff --git a/apps/backend/src/db/index.ts b/apps/backend/src/db/index.ts
new file mode 100644
index 00000000..08651872
--- /dev/null
+++ b/apps/backend/src/db/index.ts
@@ -0,0 +1,11 @@
+import "dotenv/config";
+
+import { drizzle } from "drizzle-orm/node-postgres";
+
+import * as schema from "./schema";
+
+if (!process.env.DATABASE_URL) {
+ throw new Error("DATABASE_URL is not set");
+}
+
+export const db = drizzle(process.env.DATABASE_URL, { schema });
diff --git a/apps/backend/src/db/repositories/api-keys.repo.ts b/apps/backend/src/db/repositories/api-keys.repo.ts
new file mode 100644
index 00000000..706ec1e4
--- /dev/null
+++ b/apps/backend/src/db/repositories/api-keys.repo.ts
@@ -0,0 +1,158 @@
+import { ApiKeyCreateInput, ApiKeyUpdateInput } from "@repo/zod-types";
+import { and, desc, eq } from "drizzle-orm";
+import { customAlphabet } from "nanoid";
+
+import { db } from "../index";
+import { apiKeysTable } from "../schema";
+
+const nanoid = customAlphabet(
+ "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz",
+ 64,
+);
+
+export class ApiKeysRepository {
+ /**
+ * Generate a new API key with the specified format: sk_mt_{64-char-nanoid}
+ */
+ private generateApiKey(): string {
+ const keyPart = nanoid();
+ const key = `sk_mt_${keyPart}`;
+
+ return key;
+ }
+
+ async create(input: ApiKeyCreateInput): Promise<{
+ uuid: string;
+ name: string;
+ key: string;
+ user_id: string;
+ created_at: Date;
+ }> {
+ const key = this.generateApiKey();
+
+ const [createdApiKey] = await db
+ .insert(apiKeysTable)
+ .values({
+ name: input.name,
+ key: key,
+ user_id: input.user_id,
+ is_active: input.is_active ?? true,
+ })
+ .returning({
+ uuid: apiKeysTable.uuid,
+ name: apiKeysTable.name,
+ user_id: apiKeysTable.user_id,
+ created_at: apiKeysTable.created_at,
+ });
+
+ if (!createdApiKey) {
+ throw new Error("Failed to create API key");
+ }
+
+ return {
+ ...createdApiKey,
+ key, // Return the actual key
+ };
+ }
+
+ async findByUserId(userId: string) {
+ return await db
+ .select({
+ uuid: apiKeysTable.uuid,
+ name: apiKeysTable.name,
+ key: apiKeysTable.key,
+ created_at: apiKeysTable.created_at,
+ is_active: apiKeysTable.is_active,
+ })
+ .from(apiKeysTable)
+ .where(eq(apiKeysTable.user_id, userId))
+ .orderBy(desc(apiKeysTable.created_at));
+ }
+
+ async findByUuid(uuid: string, userId: string) {
+ const [apiKey] = await db
+ .select({
+ uuid: apiKeysTable.uuid,
+ name: apiKeysTable.name,
+ key: apiKeysTable.key,
+ created_at: apiKeysTable.created_at,
+ is_active: apiKeysTable.is_active,
+ user_id: apiKeysTable.user_id,
+ })
+ .from(apiKeysTable)
+ .where(
+ and(eq(apiKeysTable.uuid, uuid), eq(apiKeysTable.user_id, userId)),
+ );
+
+ return apiKey;
+ }
+
+ async validateApiKey(key: string): Promise<{
+ valid: boolean;
+ user_id?: string;
+ key_uuid?: string;
+ }> {
+ const [apiKey] = await db
+ .select({
+ uuid: apiKeysTable.uuid,
+ user_id: apiKeysTable.user_id,
+ is_active: apiKeysTable.is_active,
+ })
+ .from(apiKeysTable)
+ .where(eq(apiKeysTable.key, key));
+
+ if (!apiKey) {
+ return { valid: false };
+ }
+
+ // Check if key is active
+ if (!apiKey.is_active) {
+ return { valid: false };
+ }
+
+ return {
+ valid: true,
+ user_id: apiKey.user_id,
+ key_uuid: apiKey.uuid,
+ };
+ }
+
+ async update(uuid: string, userId: string, input: ApiKeyUpdateInput) {
+ const [updatedApiKey] = await db
+ .update(apiKeysTable)
+ .set({
+ ...(input.name && { name: input.name }),
+ ...(input.is_active !== undefined && { is_active: input.is_active }),
+ })
+ .where(and(eq(apiKeysTable.uuid, uuid), eq(apiKeysTable.user_id, userId)))
+ .returning({
+ uuid: apiKeysTable.uuid,
+ name: apiKeysTable.name,
+ key: apiKeysTable.key,
+ created_at: apiKeysTable.created_at,
+ is_active: apiKeysTable.is_active,
+ });
+
+ if (!updatedApiKey) {
+ throw new Error("Failed to update API key or API key not found");
+ }
+
+ return updatedApiKey;
+ }
+
+ async delete(uuid: string, userId: string) {
+ const [deletedApiKey] = await db
+ .delete(apiKeysTable)
+ .where(and(eq(apiKeysTable.uuid, uuid), eq(apiKeysTable.user_id, userId)))
+ .returning({
+ uuid: apiKeysTable.uuid,
+ name: apiKeysTable.name,
+ });
+
+ if (!deletedApiKey) {
+ throw new Error("Failed to delete API key or API key not found");
+ }
+
+ return deletedApiKey;
+ }
+}
diff --git a/apps/backend/src/db/repositories/config.repo.ts b/apps/backend/src/db/repositories/config.repo.ts
new file mode 100644
index 00000000..f3efeab1
--- /dev/null
+++ b/apps/backend/src/db/repositories/config.repo.ts
@@ -0,0 +1,50 @@
+import { eq } from "drizzle-orm";
+
+import { db } from "../index";
+import { configTable } from "../schema";
+
+export const configRepo = {
+ async getConfig(
+ id: string,
+ ): Promise<
+ { id: string; value: string; description?: string | null } | undefined
+ > {
+ const result = await db
+ .select()
+ .from(configTable)
+ .where(eq(configTable.id, id));
+ return result[0];
+ },
+
+ async setConfig(
+ id: string,
+ value: string,
+ description?: string,
+ ): Promise {
+ await db
+ .insert(configTable)
+ .values({
+ id,
+ value,
+ description,
+ })
+ .onConflictDoUpdate({
+ target: configTable.id,
+ set: {
+ value,
+ description,
+ updated_at: new Date(),
+ },
+ });
+ },
+
+ async getAllConfigs(): Promise<
+ Array<{ id: string; value: string; description?: string | null }>
+ > {
+ return await db.select().from(configTable);
+ },
+
+ async deleteConfig(id: string): Promise {
+ await db.delete(configTable).where(eq(configTable.id, id));
+ },
+};
diff --git a/apps/backend/src/db/repositories/endpoints.repo.ts b/apps/backend/src/db/repositories/endpoints.repo.ts
new file mode 100644
index 00000000..e17c8a22
--- /dev/null
+++ b/apps/backend/src/db/repositories/endpoints.repo.ts
@@ -0,0 +1,180 @@
+import {
+ DatabaseEndpoint,
+ DatabaseEndpointWithNamespace,
+ EndpointCreateInput,
+ EndpointUpdateInput,
+} from "@repo/zod-types";
+import { desc, eq } from "drizzle-orm";
+
+import { db } from "../index";
+import { endpointsTable, namespacesTable } from "../schema";
+
+export class EndpointsRepository {
+ async create(input: EndpointCreateInput): Promise {
+ const [createdEndpoint] = await db
+ .insert(endpointsTable)
+ .values({
+ name: input.name,
+ description: input.description,
+ namespace_uuid: input.namespace_uuid,
+ enable_api_key_auth: input.enable_api_key_auth ?? true,
+ use_query_param_auth: input.use_query_param_auth ?? false,
+ })
+ .returning();
+
+ if (!createdEndpoint) {
+ throw new Error("Failed to create endpoint");
+ }
+
+ return createdEndpoint;
+ }
+
+ async findAll(): Promise {
+ return await db
+ .select({
+ uuid: endpointsTable.uuid,
+ name: endpointsTable.name,
+ description: endpointsTable.description,
+ namespace_uuid: endpointsTable.namespace_uuid,
+ enable_api_key_auth: endpointsTable.enable_api_key_auth,
+ use_query_param_auth: endpointsTable.use_query_param_auth,
+ created_at: endpointsTable.created_at,
+ updated_at: endpointsTable.updated_at,
+ })
+ .from(endpointsTable)
+ .orderBy(desc(endpointsTable.created_at));
+ }
+
+ async findAllWithNamespaces(): Promise {
+ const endpointsData = await db
+ .select({
+ // Endpoint fields
+ uuid: endpointsTable.uuid,
+ name: endpointsTable.name,
+ description: endpointsTable.description,
+ namespace_uuid: endpointsTable.namespace_uuid,
+ enable_api_key_auth: endpointsTable.enable_api_key_auth,
+ use_query_param_auth: endpointsTable.use_query_param_auth,
+ created_at: endpointsTable.created_at,
+ updated_at: endpointsTable.updated_at,
+ // Namespace fields
+ namespace: {
+ uuid: namespacesTable.uuid,
+ name: namespacesTable.name,
+ description: namespacesTable.description,
+ created_at: namespacesTable.created_at,
+ updated_at: namespacesTable.updated_at,
+ },
+ })
+ .from(endpointsTable)
+ .innerJoin(
+ namespacesTable,
+ eq(endpointsTable.namespace_uuid, namespacesTable.uuid),
+ )
+ .orderBy(desc(endpointsTable.created_at));
+
+ return endpointsData;
+ }
+
+ async findByUuid(uuid: string): Promise {
+ const [endpoint] = await db
+ .select({
+ uuid: endpointsTable.uuid,
+ name: endpointsTable.name,
+ description: endpointsTable.description,
+ namespace_uuid: endpointsTable.namespace_uuid,
+ enable_api_key_auth: endpointsTable.enable_api_key_auth,
+ use_query_param_auth: endpointsTable.use_query_param_auth,
+ created_at: endpointsTable.created_at,
+ updated_at: endpointsTable.updated_at,
+ })
+ .from(endpointsTable)
+ .where(eq(endpointsTable.uuid, uuid));
+
+ return endpoint;
+ }
+
+ async findByUuidWithNamespace(
+ uuid: string,
+ ): Promise {
+ const [endpointData] = await db
+ .select({
+ // Endpoint fields
+ uuid: endpointsTable.uuid,
+ name: endpointsTable.name,
+ description: endpointsTable.description,
+ namespace_uuid: endpointsTable.namespace_uuid,
+ enable_api_key_auth: endpointsTable.enable_api_key_auth,
+ use_query_param_auth: endpointsTable.use_query_param_auth,
+ created_at: endpointsTable.created_at,
+ updated_at: endpointsTable.updated_at,
+ // Namespace fields
+ namespace: {
+ uuid: namespacesTable.uuid,
+ name: namespacesTable.name,
+ description: namespacesTable.description,
+ created_at: namespacesTable.created_at,
+ updated_at: namespacesTable.updated_at,
+ },
+ })
+ .from(endpointsTable)
+ .innerJoin(
+ namespacesTable,
+ eq(endpointsTable.namespace_uuid, namespacesTable.uuid),
+ )
+ .where(eq(endpointsTable.uuid, uuid));
+
+ return endpointData;
+ }
+
+ async findByName(name: string): Promise {
+ const [endpoint] = await db
+ .select({
+ uuid: endpointsTable.uuid,
+ name: endpointsTable.name,
+ description: endpointsTable.description,
+ namespace_uuid: endpointsTable.namespace_uuid,
+ enable_api_key_auth: endpointsTable.enable_api_key_auth,
+ use_query_param_auth: endpointsTable.use_query_param_auth,
+ created_at: endpointsTable.created_at,
+ updated_at: endpointsTable.updated_at,
+ })
+ .from(endpointsTable)
+ .where(eq(endpointsTable.name, name));
+
+ return endpoint;
+ }
+
+ async deleteByUuid(uuid: string): Promise {
+ const [deletedEndpoint] = await db
+ .delete(endpointsTable)
+ .where(eq(endpointsTable.uuid, uuid))
+ .returning();
+
+ return deletedEndpoint;
+ }
+
+ async update(input: EndpointUpdateInput): Promise {
+ const [updatedEndpoint] = await db
+ .update(endpointsTable)
+ .set({
+ name: input.name,
+ description: input.description,
+ namespace_uuid: input.namespace_uuid,
+ enable_api_key_auth: input.enable_api_key_auth,
+ use_query_param_auth: input.use_query_param_auth,
+ updated_at: new Date(),
+ })
+ .where(eq(endpointsTable.uuid, input.uuid))
+ .returning();
+
+ if (!updatedEndpoint) {
+ throw new Error("Failed to update endpoint");
+ }
+
+ return updatedEndpoint;
+ }
+}
+
+// Export the repository instance
+export const endpointsRepository = new EndpointsRepository();
diff --git a/apps/backend/src/db/repositories/index.ts b/apps/backend/src/db/repositories/index.ts
new file mode 100644
index 00000000..b046b628
--- /dev/null
+++ b/apps/backend/src/db/repositories/index.ts
@@ -0,0 +1,8 @@
+export * from "./namespaces.repo";
+export * from "./namespace-mappings.repo";
+export * from "./endpoints.repo";
+export * from "./mcp-servers.repo";
+export * from "./tools.repo";
+export * from "./oauth-sessions.repo";
+export * from "./api-keys.repo";
+export { configRepo } from "./config.repo";
diff --git a/apps/backend/src/db/repositories/mcp-servers.repo.ts b/apps/backend/src/db/repositories/mcp-servers.repo.ts
new file mode 100644
index 00000000..36b085d2
--- /dev/null
+++ b/apps/backend/src/db/repositories/mcp-servers.repo.ts
@@ -0,0 +1,186 @@
+import {
+ DatabaseMcpServer,
+ McpServerCreateInput,
+ McpServerUpdateInput,
+} from "@repo/zod-types";
+import { desc, eq } from "drizzle-orm";
+
+import { db } from "../index";
+import { mcpServersTable } from "../schema";
+
+export class McpServersRepository {
+ async create(input: McpServerCreateInput): Promise {
+ try {
+ const [createdServer] = await db
+ .insert(mcpServersTable)
+ .values(input)
+ .returning();
+
+ return createdServer;
+ } catch (error: any) {
+ console.error("Database error in create:", error);
+
+ // Handle DrizzleQueryError structure - the actual PostgreSQL error might be in error.cause
+ const pgError = error.cause || error;
+
+ // Handle unique constraint violation for server name
+ if (
+ (pgError?.code === "23505" || error?.code === "23505") &&
+ (pgError?.constraint === "mcp_servers_name_unique_idx" ||
+ error?.constraint_name === "mcp_servers_name_unique_idx" ||
+ pgError?.constraint_name === "mcp_servers_name_unique_idx")
+ ) {
+ throw new Error(
+ `Server name "${input.name}" already exists. Server names must be unique.`,
+ );
+ }
+
+ // Handle regex constraint violation for server name
+ if (
+ (pgError?.code === "23514" || error?.code === "23514") &&
+ (pgError?.constraint === "mcp_servers_name_regex_check" ||
+ error?.constraint_name === "mcp_servers_name_regex_check" ||
+ pgError?.constraint_name === "mcp_servers_name_regex_check")
+ ) {
+ throw new Error(
+ `Server name "${input.name}" is invalid. Server names must only contain letters, numbers, underscores, and hyphens.`,
+ );
+ }
+
+ // For any other database errors, throw a generic user-friendly message
+ throw new Error(
+ "Failed to create MCP server. Please check your input and try again.",
+ );
+ }
+ }
+
+ async findAll(): Promise {
+ return await db
+ .select()
+ .from(mcpServersTable)
+ .orderBy(desc(mcpServersTable.created_at));
+ }
+
+ async findByUuid(uuid: string): Promise {
+ const [server] = await db
+ .select()
+ .from(mcpServersTable)
+ .where(eq(mcpServersTable.uuid, uuid))
+ .limit(1);
+
+ return server;
+ }
+
+ async findByName(name: string): Promise {
+ const [server] = await db
+ .select()
+ .from(mcpServersTable)
+ .where(eq(mcpServersTable.name, name))
+ .limit(1);
+
+ return server;
+ }
+
+ async deleteByUuid(uuid: string): Promise {
+ const [deletedServer] = await db
+ .delete(mcpServersTable)
+ .where(eq(mcpServersTable.uuid, uuid))
+ .returning();
+
+ return deletedServer;
+ }
+
+ async update(
+ input: McpServerUpdateInput,
+ ): Promise {
+ const { uuid, ...updateData } = input;
+
+ try {
+ const [updatedServer] = await db
+ .update(mcpServersTable)
+ .set(updateData)
+ .where(eq(mcpServersTable.uuid, uuid))
+ .returning();
+
+ return updatedServer;
+ } catch (error: any) {
+ console.error("Database error in update:", error);
+
+ // Handle DrizzleQueryError structure - the actual PostgreSQL error might be in error.cause
+ const pgError = error.cause || error;
+
+ // Handle unique constraint violation for server name
+ if (
+ (pgError?.code === "23505" || error?.code === "23505") &&
+ (pgError?.constraint === "mcp_servers_name_unique_idx" ||
+ error?.constraint_name === "mcp_servers_name_unique_idx" ||
+ pgError?.constraint_name === "mcp_servers_name_unique_idx")
+ ) {
+ throw new Error(
+ `Server name "${input.name}" already exists. Server names must be unique.`,
+ );
+ }
+
+ // Handle regex constraint violation for server name
+ if (
+ (pgError?.code === "23514" || error?.code === "23514") &&
+ (pgError?.constraint === "mcp_servers_name_regex_check" ||
+ error?.constraint_name === "mcp_servers_name_regex_check" ||
+ pgError?.constraint_name === "mcp_servers_name_regex_check")
+ ) {
+ throw new Error(
+ `Server name "${input.name}" is invalid. Server names must only contain letters, numbers, underscores, and hyphens.`,
+ );
+ }
+
+ // For any other database errors, throw a generic user-friendly message
+ throw new Error(
+ "Failed to update MCP server. Please check your input and try again.",
+ );
+ }
+ }
+
+ async bulkCreate(
+ servers: McpServerCreateInput[],
+ ): Promise {
+ try {
+ return await db.insert(mcpServersTable).values(servers).returning();
+ } catch (error: any) {
+ console.error("Database error in bulkCreate:", error);
+
+ // Handle DrizzleQueryError structure - the actual PostgreSQL error might be in error.cause
+ const pgError = error.cause || error;
+
+ // Handle unique constraint violation for server name
+ if (
+ (pgError?.code === "23505" || error?.code === "23505") &&
+ (pgError?.constraint === "mcp_servers_name_unique_idx" ||
+ error?.constraint_name === "mcp_servers_name_unique_idx" ||
+ pgError?.constraint_name === "mcp_servers_name_unique_idx")
+ ) {
+ throw new Error(
+ "One or more server names already exist. Server names must be unique.",
+ );
+ }
+
+ // Handle regex constraint violation for server name
+ if (
+ (pgError?.code === "23514" || error?.code === "23514") &&
+ (pgError?.constraint === "mcp_servers_name_regex_check" ||
+ error?.constraint_name === "mcp_servers_name_regex_check" ||
+ pgError?.constraint_name === "mcp_servers_name_regex_check")
+ ) {
+ throw new Error(
+ "One or more server names are invalid. Server names must only contain letters, numbers, underscores, and hyphens.",
+ );
+ }
+
+ // For any other database errors, throw a generic user-friendly message
+ throw new Error(
+ "Failed to bulk create MCP servers. Please check your input and try again.",
+ );
+ }
+ }
+}
+
+export const mcpServersRepository = new McpServersRepository();
diff --git a/apps/backend/src/db/repositories/namespace-mappings.repo.ts b/apps/backend/src/db/repositories/namespace-mappings.repo.ts
new file mode 100644
index 00000000..0ef34849
--- /dev/null
+++ b/apps/backend/src/db/repositories/namespace-mappings.repo.ts
@@ -0,0 +1,123 @@
+import {
+ NamespaceServerStatusUpdate,
+ NamespaceToolStatusUpdate,
+} from "@repo/zod-types";
+import { and, eq, sql } from "drizzle-orm";
+
+import { db } from "../index";
+import {
+ namespaceServerMappingsTable,
+ namespaceToolMappingsTable,
+} from "../schema";
+
+export class NamespaceMappingsRepository {
+ async updateServerStatus(input: NamespaceServerStatusUpdate) {
+ const [updatedMapping] = await db
+ .update(namespaceServerMappingsTable)
+ .set({
+ status: input.status,
+ })
+ .where(
+ and(
+ eq(namespaceServerMappingsTable.namespace_uuid, input.namespaceUuid),
+ eq(namespaceServerMappingsTable.mcp_server_uuid, input.serverUuid),
+ ),
+ )
+ .returning();
+
+ return updatedMapping;
+ }
+
+ async updateToolStatus(input: NamespaceToolStatusUpdate) {
+ const [updatedMapping] = await db
+ .update(namespaceToolMappingsTable)
+ .set({
+ status: input.status,
+ })
+ .where(
+ and(
+ eq(namespaceToolMappingsTable.namespace_uuid, input.namespaceUuid),
+ eq(namespaceToolMappingsTable.tool_uuid, input.toolUuid),
+ eq(namespaceToolMappingsTable.mcp_server_uuid, input.serverUuid),
+ ),
+ )
+ .returning();
+
+ return updatedMapping;
+ }
+
+ async findServerMapping(namespaceUuid: string, serverUuid: string) {
+ const [mapping] = await db
+ .select()
+ .from(namespaceServerMappingsTable)
+ .where(
+ and(
+ eq(namespaceServerMappingsTable.namespace_uuid, namespaceUuid),
+ eq(namespaceServerMappingsTable.mcp_server_uuid, serverUuid),
+ ),
+ );
+
+ return mapping;
+ }
+
+ async findToolMapping(
+ namespaceUuid: string,
+ toolUuid: string,
+ serverUuid: string,
+ ) {
+ const [mapping] = await db
+ .select()
+ .from(namespaceToolMappingsTable)
+ .where(
+ and(
+ eq(namespaceToolMappingsTable.namespace_uuid, namespaceUuid),
+ eq(namespaceToolMappingsTable.tool_uuid, toolUuid),
+ eq(namespaceToolMappingsTable.mcp_server_uuid, serverUuid),
+ ),
+ );
+
+ return mapping;
+ }
+
+ /**
+ * Bulk upsert namespace tool mappings for a namespace
+ * Used when refreshing tools from MetaMCP connection
+ */
+ async bulkUpsertNamespaceToolMappings(input: {
+ namespaceUuid: string;
+ toolMappings: Array<{
+ toolUuid: string;
+ serverUuid: string;
+ status?: "ACTIVE" | "INACTIVE";
+ }>;
+ }) {
+ if (!input.toolMappings || input.toolMappings.length === 0) {
+ return [];
+ }
+
+ const mappingsToInsert = input.toolMappings.map((mapping) => ({
+ namespace_uuid: input.namespaceUuid,
+ tool_uuid: mapping.toolUuid,
+ mcp_server_uuid: mapping.serverUuid,
+ status: (mapping.status || "ACTIVE") as "ACTIVE" | "INACTIVE",
+ }));
+
+ // Upsert the mappings - if they exist, update the status; if not, insert them
+ return await db
+ .insert(namespaceToolMappingsTable)
+ .values(mappingsToInsert)
+ .onConflictDoUpdate({
+ target: [
+ namespaceToolMappingsTable.namespace_uuid,
+ namespaceToolMappingsTable.tool_uuid,
+ ],
+ set: {
+ status: sql`excluded.status`,
+ mcp_server_uuid: sql`excluded.mcp_server_uuid`,
+ },
+ })
+ .returning();
+ }
+}
+
+export const namespaceMappingsRepository = new NamespaceMappingsRepository();
diff --git a/apps/backend/src/db/repositories/namespaces.repo.ts b/apps/backend/src/db/repositories/namespaces.repo.ts
new file mode 100644
index 00000000..2e83c82b
--- /dev/null
+++ b/apps/backend/src/db/repositories/namespaces.repo.ts
@@ -0,0 +1,259 @@
+import {
+ DatabaseNamespace,
+ DatabaseNamespaceTool,
+ DatabaseNamespaceWithServers,
+ NamespaceCreateInput,
+ NamespaceUpdateInput,
+} from "@repo/zod-types";
+import { desc, eq, inArray } from "drizzle-orm";
+
+import { db } from "../index";
+import {
+ mcpServersTable,
+ namespaceServerMappingsTable,
+ namespacesTable,
+ namespaceToolMappingsTable,
+ toolsTable,
+} from "../schema";
+
+export class NamespacesRepository {
+ async create(input: NamespaceCreateInput): Promise {
+ return await db.transaction(async (tx) => {
+ // Create the namespace
+ const [createdNamespace] = await tx
+ .insert(namespacesTable)
+ .values({
+ name: input.name,
+ description: input.description,
+ })
+ .returning();
+
+ if (!createdNamespace) {
+ throw new Error("Failed to create namespace");
+ }
+
+ // If mcp server UUIDs are provided, create the mappings with default ACTIVE status
+ if (input.mcpServerUuids && input.mcpServerUuids.length > 0) {
+ const mappings = input.mcpServerUuids.map((serverUuid) => ({
+ namespace_uuid: createdNamespace.uuid,
+ mcp_server_uuid: serverUuid,
+ status: "ACTIVE" as const,
+ }));
+
+ await tx.insert(namespaceServerMappingsTable).values(mappings);
+
+ // Also create namespace-tool mappings for all tools of the selected servers
+ const serverTools = await tx
+ .select({
+ uuid: toolsTable.uuid,
+ mcp_server_uuid: toolsTable.mcp_server_uuid,
+ })
+ .from(toolsTable)
+ .where(inArray(toolsTable.mcp_server_uuid, input.mcpServerUuids));
+
+ if (serverTools.length > 0) {
+ const toolMappings = serverTools.map((tool) => ({
+ namespace_uuid: createdNamespace.uuid,
+ tool_uuid: tool.uuid,
+ mcp_server_uuid: tool.mcp_server_uuid,
+ status: "ACTIVE" as const,
+ }));
+
+ await tx.insert(namespaceToolMappingsTable).values(toolMappings);
+ }
+ }
+
+ return createdNamespace;
+ });
+ }
+
+ async findAll(): Promise {
+ return await db
+ .select({
+ uuid: namespacesTable.uuid,
+ name: namespacesTable.name,
+ description: namespacesTable.description,
+ created_at: namespacesTable.created_at,
+ updated_at: namespacesTable.updated_at,
+ })
+ .from(namespacesTable)
+ .orderBy(desc(namespacesTable.created_at));
+ }
+
+ async findByUuid(uuid: string): Promise {
+ const [namespace] = await db
+ .select({
+ uuid: namespacesTable.uuid,
+ name: namespacesTable.name,
+ description: namespacesTable.description,
+ created_at: namespacesTable.created_at,
+ updated_at: namespacesTable.updated_at,
+ })
+ .from(namespacesTable)
+ .where(eq(namespacesTable.uuid, uuid));
+
+ return namespace;
+ }
+
+ async findByUuidWithServers(
+ uuid: string,
+ ): Promise {
+ // First, get the namespace
+ const namespace = await this.findByUuid(uuid);
+
+ if (!namespace) {
+ return null;
+ }
+
+ // Then, get servers associated with this namespace
+ const serversData = await db
+ .select({
+ uuid: mcpServersTable.uuid,
+ name: mcpServersTable.name,
+ description: mcpServersTable.description,
+ type: mcpServersTable.type,
+ command: mcpServersTable.command,
+ args: mcpServersTable.args,
+ url: mcpServersTable.url,
+ env: mcpServersTable.env,
+ bearerToken: mcpServersTable.bearerToken,
+ created_at: mcpServersTable.created_at,
+ status: namespaceServerMappingsTable.status,
+ })
+ .from(mcpServersTable)
+ .innerJoin(
+ namespaceServerMappingsTable,
+ eq(mcpServersTable.uuid, namespaceServerMappingsTable.mcp_server_uuid),
+ )
+ .where(eq(namespaceServerMappingsTable.namespace_uuid, uuid));
+
+ // Format the servers without date conversion
+ const servers = serversData.map((server) => ({
+ uuid: server.uuid,
+ name: server.name,
+ description: server.description,
+ type: server.type,
+ command: server.command,
+ args: server.args || [],
+ url: server.url,
+ env: server.env || {},
+ bearerToken: server.bearerToken,
+ created_at: server.created_at,
+ status: server.status,
+ }));
+
+ return {
+ ...namespace,
+ servers,
+ };
+ }
+
+ async findToolsByNamespaceUuid(
+ namespaceUuid: string,
+ ): Promise {
+ const toolsData = await db
+ .select({
+ // Tool fields
+ uuid: toolsTable.uuid,
+ name: toolsTable.name,
+ description: toolsTable.description,
+ toolSchema: toolsTable.toolSchema,
+ created_at: toolsTable.created_at,
+ updated_at: toolsTable.updated_at,
+ mcp_server_uuid: toolsTable.mcp_server_uuid,
+ // Server fields
+ serverName: mcpServersTable.name,
+ serverUuid: mcpServersTable.uuid,
+ status: namespaceToolMappingsTable.status,
+ })
+ .from(toolsTable)
+ .innerJoin(
+ namespaceToolMappingsTable,
+ eq(toolsTable.uuid, namespaceToolMappingsTable.tool_uuid),
+ )
+ .innerJoin(
+ mcpServersTable,
+ eq(toolsTable.mcp_server_uuid, mcpServersTable.uuid),
+ )
+ .where(eq(namespaceToolMappingsTable.namespace_uuid, namespaceUuid))
+ .orderBy(desc(toolsTable.created_at));
+
+ return toolsData;
+ }
+
+ async deleteByUuid(uuid: string): Promise {
+ const [deletedNamespace] = await db
+ .delete(namespacesTable)
+ .where(eq(namespacesTable.uuid, uuid))
+ .returning();
+
+ return deletedNamespace;
+ }
+
+ async update(input: NamespaceUpdateInput): Promise {
+ return await db.transaction(async (tx) => {
+ // Update the namespace
+ const [updatedNamespace] = await tx
+ .update(namespacesTable)
+ .set({
+ name: input.name,
+ description: input.description,
+ updated_at: new Date(),
+ })
+ .where(eq(namespacesTable.uuid, input.uuid))
+ .returning();
+
+ if (!updatedNamespace) {
+ throw new Error("Namespace not found");
+ }
+
+ // If mcpServerUuids are provided, update the mappings
+ if (input.mcpServerUuids) {
+ // Delete existing server mappings
+ await tx
+ .delete(namespaceServerMappingsTable)
+ .where(eq(namespaceServerMappingsTable.namespace_uuid, input.uuid));
+
+ // Delete existing tool mappings
+ await tx
+ .delete(namespaceToolMappingsTable)
+ .where(eq(namespaceToolMappingsTable.namespace_uuid, input.uuid));
+
+ // Create new server mappings if any servers are specified
+ if (input.mcpServerUuids.length > 0) {
+ const serverMappings = input.mcpServerUuids.map((serverUuid) => ({
+ namespace_uuid: input.uuid,
+ mcp_server_uuid: serverUuid,
+ status: "ACTIVE" as const,
+ }));
+
+ await tx.insert(namespaceServerMappingsTable).values(serverMappings);
+
+ // Also create namespace-tool mappings for all tools of the selected servers
+ const serverTools = await tx
+ .select({
+ uuid: toolsTable.uuid,
+ mcp_server_uuid: toolsTable.mcp_server_uuid,
+ })
+ .from(toolsTable)
+ .where(inArray(toolsTable.mcp_server_uuid, input.mcpServerUuids));
+
+ if (serverTools.length > 0) {
+ const toolMappings = serverTools.map((tool) => ({
+ namespace_uuid: input.uuid,
+ tool_uuid: tool.uuid,
+ mcp_server_uuid: tool.mcp_server_uuid,
+ status: "ACTIVE" as const,
+ }));
+
+ await tx.insert(namespaceToolMappingsTable).values(toolMappings);
+ }
+ }
+ }
+
+ return updatedNamespace;
+ });
+ }
+}
+
+export const namespacesRepository = new NamespacesRepository();
diff --git a/apps/backend/src/db/repositories/oauth-sessions.repo.ts b/apps/backend/src/db/repositories/oauth-sessions.repo.ts
new file mode 100644
index 00000000..083a3634
--- /dev/null
+++ b/apps/backend/src/db/repositories/oauth-sessions.repo.ts
@@ -0,0 +1,90 @@
+import {
+ DatabaseOAuthSession,
+ OAuthSessionCreateInput,
+ OAuthSessionUpdateInput,
+} from "@repo/zod-types";
+import { eq, sql } from "drizzle-orm";
+
+import { db } from "../index";
+import { oauthSessionsTable } from "../schema";
+
+export class OAuthSessionsRepository {
+ async findByMcpServerUuid(
+ mcpServerUuid: string,
+ ): Promise {
+ const [session] = await db
+ .select()
+ .from(oauthSessionsTable)
+ .where(eq(oauthSessionsTable.mcp_server_uuid, mcpServerUuid))
+ .limit(1);
+
+ return session;
+ }
+
+ async create(input: OAuthSessionCreateInput): Promise {
+ const [createdSession] = await db
+ .insert(oauthSessionsTable)
+ .values({
+ mcp_server_uuid: input.mcp_server_uuid,
+ ...(input.client_information && {
+ client_information: input.client_information,
+ }),
+ ...(input.tokens && { tokens: input.tokens }),
+ ...(input.code_verifier && { code_verifier: input.code_verifier }),
+ })
+ .returning();
+
+ return createdSession;
+ }
+
+ async update(
+ input: OAuthSessionUpdateInput,
+ ): Promise {
+ const [updatedSession] = await db
+ .update(oauthSessionsTable)
+ .set({
+ ...(input.client_information && {
+ client_information: input.client_information,
+ }),
+ ...(input.tokens && { tokens: input.tokens }),
+ ...(input.code_verifier && { code_verifier: input.code_verifier }),
+ updated_at: sql`NOW()`,
+ })
+ .where(eq(oauthSessionsTable.mcp_server_uuid, input.mcp_server_uuid))
+ .returning();
+
+ return updatedSession;
+ }
+
+ async upsert(input: OAuthSessionUpdateInput): Promise {
+ // Check if session exists
+ const existingSession = await this.findByMcpServerUuid(
+ input.mcp_server_uuid,
+ );
+
+ if (existingSession) {
+ // Update existing session
+ const updatedSession = await this.update(input);
+ if (!updatedSession) {
+ throw new Error("Failed to update OAuth session");
+ }
+ return updatedSession;
+ } else {
+ // Create new session
+ return await this.create(input);
+ }
+ }
+
+ async deleteByMcpServerUuid(
+ mcpServerUuid: string,
+ ): Promise {
+ const [deletedSession] = await db
+ .delete(oauthSessionsTable)
+ .where(eq(oauthSessionsTable.mcp_server_uuid, mcpServerUuid))
+ .returning();
+
+ return deletedSession;
+ }
+}
+
+export const oauthSessionsRepository = new OAuthSessionsRepository();
diff --git a/apps/backend/src/db/repositories/tools.repo.ts b/apps/backend/src/db/repositories/tools.repo.ts
new file mode 100644
index 00000000..ef88d7c0
--- /dev/null
+++ b/apps/backend/src/db/repositories/tools.repo.ts
@@ -0,0 +1,77 @@
+import {
+ DatabaseTool,
+ ToolCreateInput,
+ ToolUpsertInput,
+} from "@repo/zod-types";
+import { eq, sql } from "drizzle-orm";
+
+import { db } from "../index";
+import { toolsTable } from "../schema";
+
+export class ToolsRepository {
+ async findByMcpServerUuid(mcpServerUuid: string): Promise {
+ return await db
+ .select()
+ .from(toolsTable)
+ .where(eq(toolsTable.mcp_server_uuid, mcpServerUuid))
+ .orderBy(toolsTable.name);
+ }
+
+ async create(input: ToolCreateInput): Promise {
+ const [createdTool] = await db.insert(toolsTable).values(input).returning();
+
+ return createdTool;
+ }
+
+ async bulkUpsert(input: ToolUpsertInput): Promise {
+ if (!input.tools || input.tools.length === 0) {
+ return [];
+ }
+
+ // Format tools for database insertion
+ const toolsToInsert = input.tools.map((tool) => ({
+ name: tool.name,
+ description: tool.description || "",
+ toolSchema: {
+ type: "object" as const,
+ ...tool.inputSchema,
+ },
+ mcp_server_uuid: input.mcpServerUuid,
+ }));
+
+ // Batch insert all tools with upsert
+ return await db
+ .insert(toolsTable)
+ .values(toolsToInsert)
+ .onConflictDoUpdate({
+ target: [toolsTable.mcp_server_uuid, toolsTable.name],
+ set: {
+ description: sql`excluded.description`,
+ toolSchema: sql`excluded.tool_schema`,
+ updated_at: new Date(),
+ },
+ })
+ .returning();
+ }
+
+ async findByUuid(uuid: string): Promise {
+ const [tool] = await db
+ .select()
+ .from(toolsTable)
+ .where(eq(toolsTable.uuid, uuid))
+ .limit(1);
+
+ return tool;
+ }
+
+ async deleteByUuid(uuid: string): Promise {
+ const [deletedTool] = await db
+ .delete(toolsTable)
+ .where(eq(toolsTable.uuid, uuid))
+ .returning();
+
+ return deletedTool;
+ }
+}
+
+export const toolsRepository = new ToolsRepository();
diff --git a/apps/backend/src/db/schema.ts b/apps/backend/src/db/schema.ts
new file mode 100644
index 00000000..d6824723
--- /dev/null
+++ b/apps/backend/src/db/schema.ts
@@ -0,0 +1,341 @@
+import { OAuthClientInformation } from "@modelcontextprotocol/sdk/shared/auth.js";
+import { OAuthTokens } from "@modelcontextprotocol/sdk/shared/auth.js";
+import { McpServerStatusEnum, McpServerTypeEnum } from "@repo/zod-types";
+import { sql } from "drizzle-orm";
+import {
+ boolean,
+ index,
+ jsonb,
+ pgEnum,
+ pgTable,
+ text,
+ timestamp,
+ unique,
+ uuid,
+} from "drizzle-orm/pg-core";
+
+export const mcpServerTypeEnum = pgEnum(
+ "mcp_server_type",
+ McpServerTypeEnum.options,
+);
+export const mcpServerStatusEnum = pgEnum(
+ "mcp_server_status",
+ McpServerStatusEnum.options,
+);
+
+export const mcpServersTable = pgTable(
+ "mcp_servers",
+ {
+ uuid: uuid("uuid").primaryKey().defaultRandom(),
+ name: text("name").notNull(),
+ description: text("description"),
+ type: mcpServerTypeEnum("type")
+ .notNull()
+ .default(McpServerTypeEnum.Enum.STDIO),
+ command: text("command"),
+ args: text("args")
+ .array()
+ .notNull()
+ .default(sql`'{}'::text[]`),
+ env: jsonb("env")
+ .$type<{ [key: string]: string }>()
+ .notNull()
+ .default(sql`'{}'::jsonb`),
+ url: text("url"),
+ created_at: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ bearerToken: text("bearer_token"),
+ },
+ (table) => [
+ index("mcp_servers_type_idx").on(table.type),
+ unique("mcp_servers_name_unique_idx").on(table.name),
+ sql`CONSTRAINT mcp_servers_name_regex_check CHECK (
+ name ~ '^[a-zA-Z0-9_-]+$'
+ )`,
+ sql`CONSTRAINT mcp_servers_url_check CHECK (
+ (type = 'SSE' AND url IS NOT NULL AND command IS NULL AND url ~ '^https?://[a-zA-Z0-9-]+(\.[a-zA-Z0-9-]+)*(:[0-9]+)?(/[a-zA-Z0-9-._~:/?#\[\]@!$&''()*+,;=]*)?$') OR
+ (type = 'STDIO' AND url IS NULL AND command IS NOT NULL) OR
+ (type = 'STREAMABLE_HTTP' AND url IS NOT NULL AND command IS NULL AND url ~ '^https?://[a-zA-Z0-9-]+(\.[a-zA-Z0-9-]+)*(:[0-9]+)?(/[a-zA-Z0-9-._~:/?#\[\]@!$&''()*+,;=]*)?$')
+ )`,
+ ],
+);
+
+export const oauthSessionsTable = pgTable(
+ "oauth_sessions",
+ {
+ uuid: uuid("uuid").primaryKey().defaultRandom(),
+ mcp_server_uuid: uuid("mcp_server_uuid")
+ .notNull()
+ .references(() => mcpServersTable.uuid, { onDelete: "cascade" }),
+ client_information: jsonb("client_information")
+ .$type()
+ .notNull()
+ .default(sql`'{}'::jsonb`),
+ tokens: jsonb("tokens").$type(),
+ code_verifier: text("code_verifier"),
+ created_at: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ updated_at: timestamp("updated_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ },
+ (table) => [
+ index("oauth_sessions_mcp_server_uuid_idx").on(table.mcp_server_uuid),
+ unique("oauth_sessions_unique_per_server_idx").on(table.mcp_server_uuid),
+ ],
+);
+
+export const toolsTable = pgTable(
+ "tools",
+ {
+ uuid: uuid("uuid").primaryKey().defaultRandom(),
+ name: text("name").notNull(),
+ description: text("description"),
+ toolSchema: jsonb("tool_schema")
+ .$type<{
+ type: "object";
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ properties?: Record;
+ required?: string[];
+ }>()
+ .notNull(),
+ created_at: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ updated_at: timestamp("updated_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ mcp_server_uuid: uuid("mcp_server_uuid")
+ .notNull()
+ .references(() => mcpServersTable.uuid, { onDelete: "cascade" }),
+ },
+ (table) => [
+ index("tools_mcp_server_uuid_idx").on(table.mcp_server_uuid),
+ unique("tools_unique_tool_name_per_server_idx").on(
+ table.mcp_server_uuid,
+ table.name,
+ ),
+ ],
+);
+
+// Better-auth tables
+export const usersTable = pgTable("users", {
+ id: text("id").primaryKey(),
+ name: text("name").notNull(),
+ email: text("email").notNull().unique(),
+ emailVerified: boolean("email_verified").notNull().default(false),
+ image: text("image"),
+ createdAt: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ updatedAt: timestamp("updated_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+});
+
+export const sessionsTable = pgTable("sessions", {
+ id: text("id").primaryKey(),
+ expiresAt: timestamp("expires_at", { withTimezone: true }).notNull(),
+ token: text("token").notNull().unique(),
+ createdAt: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ updatedAt: timestamp("updated_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ ipAddress: text("ip_address"),
+ userAgent: text("user_agent"),
+ userId: text("user_id")
+ .notNull()
+ .references(() => usersTable.id, { onDelete: "cascade" }),
+});
+
+export const accountsTable = pgTable("accounts", {
+ id: text("id").primaryKey(),
+ accountId: text("account_id").notNull(),
+ providerId: text("provider_id").notNull(),
+ userId: text("user_id")
+ .notNull()
+ .references(() => usersTable.id, { onDelete: "cascade" }),
+ accessToken: text("access_token"),
+ refreshToken: text("refresh_token"),
+ idToken: text("id_token"),
+ accessTokenExpiresAt: timestamp("access_token_expires_at", {
+ withTimezone: true,
+ }),
+ refreshTokenExpiresAt: timestamp("refresh_token_expires_at", {
+ withTimezone: true,
+ }),
+ scope: text("scope"),
+ password: text("password"),
+ createdAt: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ updatedAt: timestamp("updated_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+});
+
+export const verificationsTable = pgTable("verifications", {
+ id: text("id").primaryKey(),
+ identifier: text("identifier").notNull(),
+ value: text("value").notNull(),
+ expiresAt: timestamp("expires_at", { withTimezone: true }).notNull(),
+ createdAt: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ updatedAt: timestamp("updated_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+});
+
+// Namespaces table
+export const namespacesTable = pgTable("namespaces", {
+ uuid: uuid("uuid").primaryKey().defaultRandom(),
+ name: text("name").notNull().unique(),
+ description: text("description"),
+ created_at: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ updated_at: timestamp("updated_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+});
+
+// Endpoints table - public routing endpoints that map to namespaces
+export const endpointsTable = pgTable(
+ "endpoints",
+ {
+ uuid: uuid("uuid").primaryKey().defaultRandom(),
+ name: text("name").notNull().unique(),
+ description: text("description"),
+ namespace_uuid: uuid("namespace_uuid")
+ .notNull()
+ .references(() => namespacesTable.uuid, { onDelete: "cascade" }),
+ enable_api_key_auth: boolean("enable_api_key_auth").notNull().default(true),
+ use_query_param_auth: boolean("use_query_param_auth")
+ .notNull()
+ .default(false),
+ created_at: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ updated_at: timestamp("updated_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ },
+ (table) => [
+ index("endpoints_namespace_uuid_idx").on(table.namespace_uuid),
+ unique("endpoints_name_unique_idx").on(table.name),
+ sql`CONSTRAINT endpoints_name_url_compatible_check CHECK (
+ name ~ '^[a-zA-Z0-9_-]+$'
+ )`,
+ ],
+);
+
+// Many-to-many relationship table between namespaces and mcp servers
+export const namespaceServerMappingsTable = pgTable(
+ "namespace_server_mappings",
+ {
+ uuid: uuid("uuid").primaryKey().defaultRandom(),
+ namespace_uuid: uuid("namespace_uuid")
+ .notNull()
+ .references(() => namespacesTable.uuid, { onDelete: "cascade" }),
+ mcp_server_uuid: uuid("mcp_server_uuid")
+ .notNull()
+ .references(() => mcpServersTable.uuid, { onDelete: "cascade" }),
+ status: mcpServerStatusEnum("status")
+ .notNull()
+ .default(McpServerStatusEnum.Enum.ACTIVE),
+ created_at: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ },
+ (table) => [
+ index("namespace_server_mappings_namespace_uuid_idx").on(
+ table.namespace_uuid,
+ ),
+ index("namespace_server_mappings_mcp_server_uuid_idx").on(
+ table.mcp_server_uuid,
+ ),
+ index("namespace_server_mappings_status_idx").on(table.status),
+ unique("namespace_server_mappings_unique_idx").on(
+ table.namespace_uuid,
+ table.mcp_server_uuid,
+ ),
+ ],
+);
+
+// Many-to-many relationship table between namespaces and tools for status control
+export const namespaceToolMappingsTable = pgTable(
+ "namespace_tool_mappings",
+ {
+ uuid: uuid("uuid").primaryKey().defaultRandom(),
+ namespace_uuid: uuid("namespace_uuid")
+ .notNull()
+ .references(() => namespacesTable.uuid, { onDelete: "cascade" }),
+ tool_uuid: uuid("tool_uuid")
+ .notNull()
+ .references(() => toolsTable.uuid, { onDelete: "cascade" }),
+ mcp_server_uuid: uuid("mcp_server_uuid")
+ .notNull()
+ .references(() => mcpServersTable.uuid, { onDelete: "cascade" }),
+ status: mcpServerStatusEnum("status")
+ .notNull()
+ .default(McpServerStatusEnum.Enum.ACTIVE),
+ created_at: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ },
+ (table) => [
+ index("namespace_tool_mappings_namespace_uuid_idx").on(
+ table.namespace_uuid,
+ ),
+ index("namespace_tool_mappings_tool_uuid_idx").on(table.tool_uuid),
+ index("namespace_tool_mappings_mcp_server_uuid_idx").on(
+ table.mcp_server_uuid,
+ ),
+ index("namespace_tool_mappings_status_idx").on(table.status),
+ unique("namespace_tool_mappings_unique_idx").on(
+ table.namespace_uuid,
+ table.tool_uuid,
+ ),
+ ],
+);
+
+// API Keys table
+export const apiKeysTable = pgTable(
+ "api_keys",
+ {
+ uuid: uuid("uuid").primaryKey().defaultRandom(),
+ name: text("name").notNull(),
+ key: text("key").notNull().unique(),
+ user_id: text("user_id")
+ .notNull()
+ .references(() => usersTable.id, { onDelete: "cascade" }),
+ created_at: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ is_active: boolean("is_active").notNull().default(true),
+ },
+ (table) => [
+ index("api_keys_user_id_idx").on(table.user_id),
+ index("api_keys_key_idx").on(table.key),
+ index("api_keys_is_active_idx").on(table.is_active),
+ unique("api_keys_name_per_user_idx").on(table.user_id, table.name),
+ ],
+);
+
+// Configuration table for app-wide settings
+export const configTable = pgTable("config", {
+ id: text("id").primaryKey(),
+ value: text("value").notNull(),
+ description: text("description"),
+ created_at: timestamp("created_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+ updated_at: timestamp("updated_at", { withTimezone: true })
+ .notNull()
+ .defaultNow(),
+});
diff --git a/apps/backend/src/db/serializers/api-keys.serializer.ts b/apps/backend/src/db/serializers/api-keys.serializer.ts
new file mode 100644
index 00000000..6ad8e200
--- /dev/null
+++ b/apps/backend/src/db/serializers/api-keys.serializer.ts
@@ -0,0 +1,44 @@
+export class ApiKeysSerializer {
+ static serializeApiKey(dbApiKey: {
+ uuid: string;
+ name: string;
+ key: string;
+ created_at: Date;
+ is_active: boolean;
+ }) {
+ return {
+ uuid: dbApiKey.uuid,
+ name: dbApiKey.name,
+ key: dbApiKey.key,
+ created_at: dbApiKey.created_at,
+ is_active: dbApiKey.is_active,
+ };
+ }
+
+ static serializeApiKeyList(
+ dbApiKeys: Array<{
+ uuid: string;
+ name: string;
+ key: string;
+ created_at: Date;
+ is_active: boolean;
+ }>,
+ ) {
+ return dbApiKeys.map(this.serializeApiKey);
+ }
+
+ static serializeCreateApiKeyResponse(dbApiKey: {
+ uuid: string;
+ name: string;
+ key: string;
+ user_id: string;
+ created_at: Date;
+ }) {
+ return {
+ uuid: dbApiKey.uuid,
+ name: dbApiKey.name,
+ key: dbApiKey.key,
+ created_at: dbApiKey.created_at,
+ };
+ }
+}
diff --git a/apps/backend/src/db/serializers/endpoints.serializer.ts b/apps/backend/src/db/serializers/endpoints.serializer.ts
new file mode 100644
index 00000000..45784beb
--- /dev/null
+++ b/apps/backend/src/db/serializers/endpoints.serializer.ts
@@ -0,0 +1,53 @@
+import {
+ DatabaseEndpoint,
+ DatabaseEndpointWithNamespace,
+ Endpoint,
+ EndpointWithNamespace,
+} from "@repo/zod-types";
+
+export class EndpointsSerializer {
+ static serializeEndpoint(dbEndpoint: DatabaseEndpoint): Endpoint {
+ return {
+ uuid: dbEndpoint.uuid,
+ name: dbEndpoint.name,
+ description: dbEndpoint.description,
+ namespace_uuid: dbEndpoint.namespace_uuid,
+ enable_api_key_auth: dbEndpoint.enable_api_key_auth,
+ use_query_param_auth: dbEndpoint.use_query_param_auth,
+ created_at: dbEndpoint.created_at.toISOString(),
+ updated_at: dbEndpoint.updated_at.toISOString(),
+ };
+ }
+
+ static serializeEndpointList(dbEndpoints: DatabaseEndpoint[]): Endpoint[] {
+ return dbEndpoints.map(this.serializeEndpoint);
+ }
+
+ static serializeEndpointWithNamespace(
+ dbEndpoint: DatabaseEndpointWithNamespace,
+ ): EndpointWithNamespace {
+ return {
+ uuid: dbEndpoint.uuid,
+ name: dbEndpoint.name,
+ description: dbEndpoint.description,
+ namespace_uuid: dbEndpoint.namespace_uuid,
+ enable_api_key_auth: dbEndpoint.enable_api_key_auth,
+ use_query_param_auth: dbEndpoint.use_query_param_auth,
+ created_at: dbEndpoint.created_at.toISOString(),
+ updated_at: dbEndpoint.updated_at.toISOString(),
+ namespace: {
+ uuid: dbEndpoint.namespace.uuid,
+ name: dbEndpoint.namespace.name,
+ description: dbEndpoint.namespace.description,
+ created_at: dbEndpoint.namespace.created_at.toISOString(),
+ updated_at: dbEndpoint.namespace.updated_at.toISOString(),
+ },
+ };
+ }
+
+ static serializeEndpointWithNamespaceList(
+ dbEndpoints: DatabaseEndpointWithNamespace[],
+ ): EndpointWithNamespace[] {
+ return dbEndpoints.map(this.serializeEndpointWithNamespace);
+ }
+}
diff --git a/apps/backend/src/db/serializers/index.ts b/apps/backend/src/db/serializers/index.ts
new file mode 100644
index 00000000..83d90941
--- /dev/null
+++ b/apps/backend/src/db/serializers/index.ts
@@ -0,0 +1,6 @@
+export * from "./endpoints.serializer";
+export * from "./mcp-servers.serializer";
+export * from "./namespaces.serializer";
+export * from "./oauth-sessions.serializer";
+export * from "./tools.serializer";
+export * from "./api-keys.serializer";
diff --git a/apps/backend/src/db/serializers/mcp-servers.serializer.ts b/apps/backend/src/db/serializers/mcp-servers.serializer.ts
new file mode 100644
index 00000000..a0ca94de
--- /dev/null
+++ b/apps/backend/src/db/serializers/mcp-servers.serializer.ts
@@ -0,0 +1,22 @@
+import { DatabaseMcpServer, McpServer } from "@repo/zod-types";
+
+export class McpServersSerializer {
+ static serializeMcpServer(dbServer: DatabaseMcpServer): McpServer {
+ return {
+ uuid: dbServer.uuid,
+ name: dbServer.name,
+ description: dbServer.description,
+ type: dbServer.type,
+ command: dbServer.command,
+ args: dbServer.args,
+ env: dbServer.env,
+ url: dbServer.url,
+ created_at: dbServer.created_at.toISOString(),
+ bearerToken: dbServer.bearerToken,
+ };
+ }
+
+ static serializeMcpServerList(dbServers: DatabaseMcpServer[]): McpServer[] {
+ return dbServers.map(this.serializeMcpServer);
+ }
+}
diff --git a/apps/backend/src/db/serializers/namespaces.serializer.ts b/apps/backend/src/db/serializers/namespaces.serializer.ts
new file mode 100644
index 00000000..36dd8ed0
--- /dev/null
+++ b/apps/backend/src/db/serializers/namespaces.serializer.ts
@@ -0,0 +1,72 @@
+import {
+ DatabaseNamespace,
+ DatabaseNamespaceTool,
+ DatabaseNamespaceWithServers,
+ Namespace,
+ NamespaceTool,
+ NamespaceWithServers,
+} from "@repo/zod-types";
+
+export class NamespacesSerializer {
+ static serializeNamespace(dbNamespace: DatabaseNamespace): Namespace {
+ return {
+ uuid: dbNamespace.uuid,
+ name: dbNamespace.name,
+ description: dbNamespace.description,
+ created_at: dbNamespace.created_at.toISOString(),
+ updated_at: dbNamespace.updated_at.toISOString(),
+ };
+ }
+
+ static serializeNamespaceList(
+ dbNamespaces: DatabaseNamespace[],
+ ): Namespace[] {
+ return dbNamespaces.map(this.serializeNamespace);
+ }
+
+ static serializeNamespaceWithServers(
+ dbNamespace: DatabaseNamespaceWithServers,
+ ): NamespaceWithServers {
+ return {
+ uuid: dbNamespace.uuid,
+ name: dbNamespace.name,
+ description: dbNamespace.description,
+ created_at: dbNamespace.created_at.toISOString(),
+ updated_at: dbNamespace.updated_at.toISOString(),
+ servers: dbNamespace.servers.map((server) => ({
+ uuid: server.uuid,
+ name: server.name,
+ description: server.description,
+ type: server.type,
+ command: server.command,
+ args: server.args || [],
+ url: server.url,
+ env: server.env || {},
+ bearerToken: server.bearerToken,
+ created_at: server.created_at.toISOString(),
+ status: server.status,
+ })),
+ };
+ }
+
+ static serializeNamespaceTool(dbTool: DatabaseNamespaceTool): NamespaceTool {
+ return {
+ uuid: dbTool.uuid,
+ name: dbTool.name,
+ description: dbTool.description,
+ toolSchema: dbTool.toolSchema,
+ created_at: dbTool.created_at.toISOString(),
+ updated_at: dbTool.updated_at.toISOString(),
+ mcp_server_uuid: dbTool.mcp_server_uuid,
+ status: dbTool.status,
+ serverName: dbTool.serverName,
+ serverUuid: dbTool.serverUuid,
+ };
+ }
+
+ static serializeNamespaceTools(
+ dbTools: DatabaseNamespaceTool[],
+ ): NamespaceTool[] {
+ return dbTools.map(this.serializeNamespaceTool);
+ }
+}
diff --git a/apps/backend/src/db/serializers/oauth-sessions.serializer.ts b/apps/backend/src/db/serializers/oauth-sessions.serializer.ts
new file mode 100644
index 00000000..222c5e35
--- /dev/null
+++ b/apps/backend/src/db/serializers/oauth-sessions.serializer.ts
@@ -0,0 +1,40 @@
+import {
+ OAuthClientInformation,
+ OAuthTokens,
+} from "@modelcontextprotocol/sdk/shared/auth.js";
+
+type DatabaseOAuthSession = {
+ uuid: string;
+ mcp_server_uuid: string;
+ client_information: OAuthClientInformation | null;
+ tokens: OAuthTokens | null;
+ code_verifier: string | null;
+ created_at: Date;
+ updated_at: Date;
+};
+
+type SerializedOAuthSession = {
+ uuid: string;
+ mcp_server_uuid: string;
+ client_information: OAuthClientInformation | null;
+ tokens: OAuthTokens | null;
+ code_verifier: string | null;
+ created_at: string;
+ updated_at: string;
+};
+
+export class OAuthSessionsSerializer {
+ static serializeOAuthSession(
+ dbSession: DatabaseOAuthSession,
+ ): SerializedOAuthSession {
+ return {
+ uuid: dbSession.uuid,
+ mcp_server_uuid: dbSession.mcp_server_uuid,
+ client_information: dbSession.client_information,
+ tokens: dbSession.tokens,
+ code_verifier: dbSession.code_verifier,
+ created_at: dbSession.created_at.toISOString(),
+ updated_at: dbSession.updated_at.toISOString(),
+ };
+ }
+}
diff --git a/apps/backend/src/db/serializers/tools.serializer.ts b/apps/backend/src/db/serializers/tools.serializer.ts
new file mode 100644
index 00000000..00ba401a
--- /dev/null
+++ b/apps/backend/src/db/serializers/tools.serializer.ts
@@ -0,0 +1,33 @@
+import { Tool } from "@repo/zod-types";
+
+type DatabaseTool = {
+ uuid: string;
+ name: string;
+ description: string | null;
+ toolSchema: {
+ type: "object";
+ // eslint-disable-next-line @typescript-eslint/no-explicit-any
+ properties?: Record;
+ };
+ created_at: Date;
+ updated_at: Date;
+ mcp_server_uuid: string;
+};
+
+export class ToolsSerializer {
+ static serializeTool(dbTool: DatabaseTool): Tool {
+ return {
+ uuid: dbTool.uuid,
+ name: dbTool.name,
+ description: dbTool.description,
+ toolSchema: dbTool.toolSchema,
+ created_at: dbTool.created_at.toISOString(),
+ updated_at: dbTool.updated_at.toISOString(),
+ mcp_server_uuid: dbTool.mcp_server_uuid,
+ };
+ }
+
+ static serializeToolList(dbTools: DatabaseTool[]): Tool[] {
+ return dbTools.map(this.serializeTool);
+ }
+}
diff --git a/apps/backend/src/index.ts b/apps/backend/src/index.ts
new file mode 100644
index 00000000..9f10d96d
--- /dev/null
+++ b/apps/backend/src/index.ts
@@ -0,0 +1,105 @@
+import "dotenv/config";
+
+import express from "express";
+
+import { auth } from "./auth";
+import mcpProxyRouter from "./routers/mcp-proxy";
+import publicEndpointsRouter from "./routers/public-metamcp";
+import trpcRouter from "./routers/trpc";
+
+const app = express();
+const PORT = process.env.PORT || 12009;
+
+// Request logging middleware
+app.use((req, res, next) => {
+ console.log(`${req.method} ${req.path} - ${new Date().toISOString()}`);
+ next();
+});
+
+// Global JSON middleware for non-proxy routes
+app.use((req, res, next) => {
+ if (req.path.startsWith("/mcp-proxy/") || req.path.startsWith("/metamcp/")) {
+ // Skip JSON parsing for all MCP proxy routes and public endpoints to allow raw stream access
+ next();
+ } else {
+ express.json()(req, res, next);
+ }
+});
+
+// Mount better-auth routes by calling auth API directly
+app.use(async (req, res, next) => {
+ if (req.path.startsWith("/api/auth")) {
+ try {
+ // Create a web Request object from Express request
+ const url = new URL(req.url, `http://${req.headers.host}`);
+ const headers = new Headers();
+
+ // Copy headers from Express request
+ Object.entries(req.headers).forEach(([key, value]) => {
+ if (value) {
+ headers.set(key, Array.isArray(value) ? value[0] : value);
+ }
+ });
+
+ // Create Request object
+ const request = new Request(url.toString(), {
+ method: req.method,
+ headers,
+ body:
+ req.method !== "GET" && req.method !== "HEAD"
+ ? JSON.stringify(req.body)
+ : undefined,
+ });
+
+ // Call better-auth directly
+ const response = await auth.handler(request);
+
+ // Convert Response back to Express response
+ res.status(response.status);
+
+ // Copy headers
+ response.headers.forEach((value, key) => {
+ res.setHeader(key, value);
+ });
+
+ // Send body
+ const body = await response.text();
+ res.send(body);
+ } catch (error) {
+ console.error("Auth route error:", error);
+ res.status(500).json({
+ error: "Internal server error",
+ details: error instanceof Error ? error.message : String(error),
+ });
+ }
+ return;
+ }
+ next();
+});
+
+// Mount public endpoints routes (must be before JSON middleware to handle raw streams)
+app.use("/metamcp", publicEndpointsRouter);
+
+// Mount MCP proxy routes
+app.use("/mcp-proxy", mcpProxyRouter);
+
+// Mount tRPC routes
+app.use("/trpc", trpcRouter);
+
+app.listen(PORT, () => {
+ console.log(`Server is running on port ${PORT}`);
+ console.log(`Auth routes available at: http://localhost:${PORT}/api/auth`);
+ console.log(
+ `Public MetaMCP endpoints available at: http://localhost:${PORT}/metamcp`,
+ );
+ console.log(
+ `MCP Proxy routes available at: http://localhost:${PORT}/mcp-proxy`,
+ );
+ console.log(`tRPC routes available at: http://localhost:${PORT}/trpc`);
+});
+
+app.get("/health", (req, res) => {
+ res.json({
+ status: "ok",
+ });
+});
diff --git a/apps/backend/src/lib/config.service.ts b/apps/backend/src/lib/config.service.ts
new file mode 100644
index 00000000..02dbeb04
--- /dev/null
+++ b/apps/backend/src/lib/config.service.ts
@@ -0,0 +1,42 @@
+import { configRepo } from "../db/repositories/config.repo";
+
+// Configuration keys
+export const CONFIG_KEYS = {
+ DISABLE_SIGNUP: "disable_signup",
+} as const;
+
+export type ConfigKey = (typeof CONFIG_KEYS)[keyof typeof CONFIG_KEYS];
+
+export const configService = {
+ async isSignupDisabled(): Promise {
+ const config = await configRepo.getConfig(CONFIG_KEYS.DISABLE_SIGNUP);
+ return config?.value === "true";
+ },
+
+ async setSignupDisabled(disabled: boolean): Promise {
+ await configRepo.setConfig(
+ CONFIG_KEYS.DISABLE_SIGNUP,
+ disabled.toString(),
+ "Whether new user signup is disabled",
+ );
+ },
+
+ async getConfig(key: ConfigKey): Promise {
+ const config = await configRepo.getConfig(key);
+ return config?.value;
+ },
+
+ async setConfig(
+ key: ConfigKey,
+ value: string,
+ description?: string,
+ ): Promise {
+ await configRepo.setConfig(key, value, description);
+ },
+
+ async getAllConfigs(): Promise<
+ Array<{ id: string; value: string; description?: string | null }>
+ > {
+ return await configRepo.getAllConfigs();
+ },
+};
diff --git a/apps/backend/src/lib/mcp-proxy.ts b/apps/backend/src/lib/mcp-proxy.ts
new file mode 100644
index 00000000..9dee9de4
--- /dev/null
+++ b/apps/backend/src/lib/mcp-proxy.ts
@@ -0,0 +1,82 @@
+import { Transport } from "@modelcontextprotocol/sdk/shared/transport.js";
+import { isJSONRPCRequest } from "@modelcontextprotocol/sdk/types.js";
+
+function onClientError(error: Error) {
+ console.error("Error from inspector client:", error);
+}
+
+function onServerError(error: Error) {
+ if (
+ (error?.message &&
+ error.message.includes("Error POSTing to endpoint (HTTP 404)")) ||
+ (error?.cause && JSON.stringify(error.cause).includes("ECONNREFUSED"))
+ ) {
+ console.error("Connection refused. Is the MCP server running?");
+ } else {
+ console.error("Error from MCP server:", error);
+ }
+}
+
+export default function mcpProxy({
+ transportToClient,
+ transportToServer,
+}: {
+ transportToClient: Transport;
+ transportToServer: Transport;
+}) {
+ let transportToClientClosed = false;
+ let transportToServerClosed = false;
+
+ let reportedServerSession = false;
+
+ transportToClient.onmessage = (message) => {
+ transportToServer.send(message).catch((error) => {
+ // Send error response back to client if it was a request (has id) and connection is still open
+ if (isJSONRPCRequest(message) && !transportToClientClosed) {
+ const errorResponse = {
+ jsonrpc: "2.0" as const,
+ id: message.id,
+ error: {
+ code: -32001,
+ message: error.message,
+ data: error,
+ },
+ };
+ transportToClient.send(errorResponse).catch(onClientError);
+ }
+ });
+ };
+
+ transportToServer.onmessage = (message) => {
+ if (!reportedServerSession) {
+ if (transportToServer.sessionId) {
+ // Can only report for StreamableHttp
+ console.error(
+ "Proxy <-> Server sessionId: " + transportToServer.sessionId,
+ );
+ }
+ reportedServerSession = true;
+ }
+ transportToClient.send(message).catch(onClientError);
+ };
+
+ transportToClient.onclose = () => {
+ if (transportToServerClosed) {
+ return;
+ }
+
+ transportToClientClosed = true;
+ transportToServer.close().catch(onServerError);
+ };
+
+ transportToServer.onclose = () => {
+ if (transportToClientClosed) {
+ return;
+ }
+ transportToServerClosed = true;
+ transportToClient.close().catch(onClientError);
+ };
+
+ transportToClient.onerror = onClientError;
+ transportToServer.onerror = onServerError;
+}
diff --git a/apps/backend/src/lib/metamcp/client.ts b/apps/backend/src/lib/metamcp/client.ts
new file mode 100644
index 00000000..8c7cfbb5
--- /dev/null
+++ b/apps/backend/src/lib/metamcp/client.ts
@@ -0,0 +1,158 @@
+import { Client } from "@modelcontextprotocol/sdk/client/index.js";
+import { SSEClientTransport } from "@modelcontextprotocol/sdk/client/sse.js";
+import {
+ StdioClientTransport,
+ StdioServerParameters,
+} from "@modelcontextprotocol/sdk/client/stdio.js";
+import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
+import { Transport } from "@modelcontextprotocol/sdk/shared/transport.js";
+
+import { IOType, ServerParameters } from "./fetch-metamcp";
+
+const sleep = (time: number) =>
+ new Promise((resolve) => setTimeout(() => resolve(), time));
+export interface ConnectedClient {
+ client: Client;
+ cleanup: () => Promise;
+}
+
+/**
+ * Transforms localhost URLs to use host.docker.internal when running inside Docker
+ */
+export const transformDockerUrl = (url: string): string => {
+ if (process.env.TRANSFORM_LOCALHOST_TO_DOCKER_INTERNAL === "true") {
+ const transformed = url.replace(
+ /localhost|127\.0\.0\.1/g,
+ "host.docker.internal",
+ );
+ console.log(`Docker URL transformation: ${url} -> ${transformed}`);
+ return transformed;
+ }
+ console.log(`Docker URL transformation disabled: ${url}`);
+ return url;
+};
+
+export const createMetaMcpClient = (
+ serverParams: ServerParameters,
+): { client: Client | undefined; transport: Transport | undefined } => {
+ let transport: Transport | undefined;
+
+ // Create the appropriate transport based on server type
+ // Default to "STDIO" if type is undefined
+ if (!serverParams.type || serverParams.type === "STDIO") {
+ // Get stderr value from serverParams, environment variable, or default to "ignore"
+ const stderrValue: IOType = "pipe";
+
+ const stdioParams: StdioServerParameters = {
+ command: serverParams.command || "",
+ args: serverParams.args || undefined,
+ env: serverParams.env || undefined,
+ stderr: "ignore",
+ };
+ transport = new StdioClientTransport(stdioParams);
+
+ // Handle stderr stream when set to "pipe"
+ // if ((transport as StdioClientTransport).stderr) {
+ // const stderrStream = (transport as StdioClientTransport).stderr;
+
+ // stderrStream?.on("data", (chunk: Buffer) => {
+ // console.error(`[${serverParams.name}] ${chunk.toString().trim()}`);
+ // });
+
+ // stderrStream?.on("error", (error: Error) => {
+ // console.error(`[${serverParams.name}] stderr error:`, error);
+ // });
+ // }
+ } else if (serverParams.type === "SSE" && serverParams.url) {
+ // Transform the URL if TRANSFORM_LOCALHOST_TO_DOCKER_INTERNAL is set to "true"
+ const transformedUrl = transformDockerUrl(serverParams.url);
+ console.log(`Creating SSE transport for: ${transformedUrl}`);
+
+ if (!serverParams.oauth_tokens) {
+ transport = new SSEClientTransport(new URL(transformedUrl));
+ } else {
+ const headers: Record = {};
+ headers["Authorization"] =
+ `Bearer ${serverParams.oauth_tokens.access_token}`;
+ transport = new SSEClientTransport(new URL(transformedUrl), {
+ requestInit: {
+ headers,
+ },
+ eventSourceInit: {
+ fetch: (url, init) => fetch(url, { ...init, headers }),
+ },
+ });
+ }
+ } else if (serverParams.type === "STREAMABLE_HTTP" && serverParams.url) {
+ // Transform the URL if TRANSFORM_LOCALHOST_TO_DOCKER_INTERNAL is set to "true"
+ const transformedUrl = transformDockerUrl(serverParams.url);
+ console.log(`Creating StreamableHTTP transport for: ${transformedUrl}`);
+
+ if (!serverParams.oauth_tokens) {
+ transport = new StreamableHTTPClientTransport(new URL(transformedUrl));
+ } else {
+ const headers: Record = {};
+ headers["Authorization"] =
+ `Bearer ${serverParams.oauth_tokens.access_token}`;
+ transport = new StreamableHTTPClientTransport(new URL(transformedUrl), {
+ requestInit: {
+ headers,
+ },
+ });
+ }
+ } else {
+ console.error(`Unsupported server type: ${serverParams.type}`);
+ return { client: undefined, transport: undefined };
+ }
+
+ const client = new Client(
+ {
+ name: "metamcp-client",
+ version: "2.0.0",
+ },
+ {
+ capabilities: {
+ prompts: {},
+ resources: { subscribe: true },
+ tools: {},
+ },
+ },
+ );
+ return { client, transport };
+};
+
+export const connectMetaMcpClient = async (
+ client: Client,
+ transport: Transport,
+): Promise => {
+ const waitFor = 2500;
+ const retries = 3;
+ let count = 0;
+ let retry = true;
+
+ while (retry) {
+ try {
+ await client.connect(transport);
+
+ return {
+ client,
+ cleanup: async () => {
+ await transport.close();
+ await client.close();
+ },
+ };
+ } catch (error) {
+ console.error(`Error connecting to MetaMCP client: ${error}`);
+ count++;
+ retry = count < retries;
+ if (retry) {
+ try {
+ await client.close();
+ } catch {
+ /* empty */
+ }
+ await sleep(waitFor);
+ }
+ }
+ }
+};
diff --git a/apps/backend/src/lib/metamcp/fetch-metamcp.ts b/apps/backend/src/lib/metamcp/fetch-metamcp.ts
new file mode 100644
index 00000000..7cdabd27
--- /dev/null
+++ b/apps/backend/src/lib/metamcp/fetch-metamcp.ts
@@ -0,0 +1,137 @@
+import { McpServerStatusEnum } from "@repo/zod-types";
+import { and, eq } from "drizzle-orm";
+
+import { db } from "../../db/index";
+import { oauthSessionsRepository } from "../../db/repositories/index";
+import { mcpServersTable, namespaceServerMappingsTable } from "../../db/schema";
+import { getDefaultEnvironment } from "./utils";
+
+// Define IOType for stderr handling
+export type IOType = "overlapped" | "pipe" | "ignore" | "inherit";
+
+// Define a new interface for server parameters that can be STDIO, SSE or STREAMABLE_HTTP
+export interface ServerParameters {
+ uuid: string;
+ name: string;
+ description: string;
+ type?: "STDIO" | "SSE" | "STREAMABLE_HTTP"; // Optional field, defaults to "STDIO" when undefined
+ command?: string | null;
+ args?: string[] | null;
+ env?: Record | null;
+ stderr?: IOType; // Optional field for stderr handling, defaults to "ignore"
+ url?: string | null;
+ created_at: string;
+ status: string;
+ oauth_tokens?: {
+ access_token: string;
+ token_type: string;
+ expires_in?: number | undefined;
+ scope?: string | undefined;
+ refresh_token?: string | undefined;
+ } | null;
+}
+
+export async function getMcpServers(
+ namespaceUuid: string,
+ includeInactiveServers: boolean = false,
+): Promise> {
+ try {
+ // Build the where conditions based on includeInactiveServers flag
+ const whereConditions = [
+ eq(namespaceServerMappingsTable.namespace_uuid, namespaceUuid),
+ ];
+
+ // Only filter by ACTIVE status if includeInactiveServers is false
+ if (!includeInactiveServers) {
+ whereConditions.push(
+ eq(
+ namespaceServerMappingsTable.status,
+ McpServerStatusEnum.Enum.ACTIVE,
+ ),
+ );
+ }
+
+ // Fetch MCP servers for the specific namespace using a join query
+ const servers = await db
+ .select({
+ uuid: mcpServersTable.uuid,
+ name: mcpServersTable.name,
+ description: mcpServersTable.description,
+ type: mcpServersTable.type,
+ command: mcpServersTable.command,
+ args: mcpServersTable.args,
+ env: mcpServersTable.env,
+ url: mcpServersTable.url,
+ created_at: mcpServersTable.created_at,
+ bearerToken: mcpServersTable.bearerToken,
+ status: namespaceServerMappingsTable.status,
+ })
+ .from(mcpServersTable)
+ .innerJoin(
+ namespaceServerMappingsTable,
+ eq(mcpServersTable.uuid, namespaceServerMappingsTable.mcp_server_uuid),
+ )
+ .where(and(...whereConditions));
+
+ const serverDict: Record = {};
+ for (const server of servers) {
+ // Fetch OAuth tokens from OAuth sessions table
+ const oauthSession = await oauthSessionsRepository.findByMcpServerUuid(
+ server.uuid,
+ );
+ let oauthTokens = null;
+
+ if (oauthSession && oauthSession.tokens) {
+ oauthTokens = {
+ access_token: oauthSession.tokens.access_token,
+ token_type: oauthSession.tokens.token_type,
+ expires_in: oauthSession.tokens.expires_in,
+ scope: oauthSession.tokens.scope,
+ refresh_token: oauthSession.tokens.refresh_token,
+ };
+ }
+
+ const params: ServerParameters = {
+ uuid: server.uuid,
+ name: server.name,
+ description: server.description || "",
+ type: server.type || "STDIO",
+ command: server.command,
+ args: server.args || [],
+ env: server.env || {},
+ url: server.url,
+ created_at:
+ server.created_at?.toISOString() || new Date().toISOString(),
+ status: server.status.toLowerCase(),
+ oauth_tokens: oauthTokens,
+ };
+
+ // Process based on server type
+ if (params.type === "STDIO") {
+ if ("args" in params && !params.args) {
+ params.args = undefined;
+ }
+
+ params.env = {
+ ...getDefaultEnvironment(),
+ ...(params.env || {}),
+ };
+ } else if (params.type === "SSE" || params.type === "STREAMABLE_HTTP") {
+ // For SSE or STREAMABLE_HTTP servers, ensure url is present
+ if (!params.url) {
+ console.warn(
+ `${params.type} server ${params.uuid} is missing url field, skipping`,
+ );
+ continue;
+ }
+ }
+
+ serverDict[server.uuid] = params;
+ }
+
+ return serverDict;
+ } catch (error) {
+ console.error("Error fetching active MCP servers from database:", error);
+ return {};
+ }
+}
diff --git a/apps/backend/src/lib/metamcp/index.ts b/apps/backend/src/lib/metamcp/index.ts
new file mode 100644
index 00000000..edd5918d
--- /dev/null
+++ b/apps/backend/src/lib/metamcp/index.ts
@@ -0,0 +1,3 @@
+// Export the core MetaMCP server functionality for backend integration
+export { createServer } from "./metamcp-proxy";
+export type { ServerParameters } from "./fetch-metamcp";
diff --git a/apps/backend/src/lib/metamcp/metamcp-middleware/filter-tools.functional.ts b/apps/backend/src/lib/metamcp/metamcp-middleware/filter-tools.functional.ts
new file mode 100644
index 00000000..e2241b09
--- /dev/null
+++ b/apps/backend/src/lib/metamcp/metamcp-middleware/filter-tools.functional.ts
@@ -0,0 +1,365 @@
+import { Tool } from "@modelcontextprotocol/sdk/types.js";
+import { and, eq } from "drizzle-orm";
+
+import { db } from "../../../db/index";
+import {
+ mcpServersTable,
+ namespaceToolMappingsTable,
+ toolsTable,
+} from "../../../db/schema";
+import {
+ CallToolMiddleware,
+ ListToolsMiddleware,
+} from "./functional-middleware";
+
+/**
+ * Configuration for the filter middleware
+ */
+export interface FilterToolsConfig {
+ cacheEnabled?: boolean;
+ cacheTTL?: number; // milliseconds
+ customErrorMessage?: (toolName: string, reason: string) => string;
+}
+
+/**
+ * Tool status cache for performance
+ */
+class ToolStatusCache {
+ private cache = new Map();
+ private expiry = new Map();
+ private ttl: number;
+
+ constructor(ttl: number = 1000) {
+ this.ttl = ttl;
+ }
+
+ private getCacheKey(
+ namespaceUuid: string,
+ toolName: string,
+ serverUuid: string,
+ ): string {
+ return `${namespaceUuid}:${serverUuid}:${toolName}`;
+ }
+
+ get(
+ namespaceUuid: string,
+ toolName: string,
+ serverUuid: string,
+ ): "ACTIVE" | "INACTIVE" | null {
+ const key = this.getCacheKey(namespaceUuid, toolName, serverUuid);
+ const expiry = this.expiry.get(key);
+
+ if (!expiry || Date.now() > expiry) {
+ this.cache.delete(key);
+ this.expiry.delete(key);
+ return null;
+ }
+
+ return this.cache.get(key) || null;
+ }
+
+ set(
+ namespaceUuid: string,
+ toolName: string,
+ serverUuid: string,
+ status: "ACTIVE" | "INACTIVE",
+ ): void {
+ const key = this.getCacheKey(namespaceUuid, toolName, serverUuid);
+ this.cache.set(key, status);
+ this.expiry.set(key, Date.now() + this.ttl);
+ }
+
+ clear(namespaceUuid?: string): void {
+ if (namespaceUuid) {
+ for (const key of this.cache.keys()) {
+ if (key.startsWith(`${namespaceUuid}:`)) {
+ this.cache.delete(key);
+ this.expiry.delete(key);
+ }
+ }
+ } else {
+ this.cache.clear();
+ this.expiry.clear();
+ }
+ }
+}
+
+// Global cache instance
+const toolStatusCache = new ToolStatusCache();
+
+/**
+ * Get tool status from database with caching
+ */
+async function getToolStatus(
+ namespaceUuid: string,
+ toolName: string,
+ serverUuid: string,
+ useCache: boolean = true,
+): Promise<"ACTIVE" | "INACTIVE" | null> {
+ // Check cache first
+ if (useCache) {
+ const cached = toolStatusCache.get(namespaceUuid, toolName, serverUuid);
+ if (cached !== null) {
+ return cached;
+ }
+ }
+
+ try {
+ // Query database for tool status
+ const [toolMapping] = await db
+ .select({
+ status: namespaceToolMappingsTable.status,
+ })
+ .from(namespaceToolMappingsTable)
+ .innerJoin(
+ toolsTable,
+ eq(toolsTable.uuid, namespaceToolMappingsTable.tool_uuid),
+ )
+ .where(
+ and(
+ eq(namespaceToolMappingsTable.namespace_uuid, namespaceUuid),
+ eq(toolsTable.name, toolName),
+ eq(namespaceToolMappingsTable.mcp_server_uuid, serverUuid),
+ ),
+ );
+
+ const status = toolMapping?.status || null;
+
+ // Cache the result if found and caching is enabled
+ if (status && useCache) {
+ toolStatusCache.set(namespaceUuid, toolName, serverUuid, status);
+ }
+
+ return status;
+ } catch (error) {
+ console.error(
+ `Error fetching tool status for ${toolName} in namespace ${namespaceUuid}:`,
+ error,
+ );
+ return null;
+ }
+}
+
+/**
+ * Extract server info from tool name
+ */
+function parseToolName(
+ toolName: string,
+): { serverName: string; originalToolName: string } | null {
+ const firstDoubleUnderscoreIndex = toolName.indexOf("__");
+ if (firstDoubleUnderscoreIndex === -1) {
+ return null;
+ }
+
+ return {
+ serverName: toolName.substring(0, firstDoubleUnderscoreIndex),
+ originalToolName: toolName.substring(firstDoubleUnderscoreIndex + 2),
+ };
+}
+
+/**
+ * Get server UUID by name
+ */
+async function getServerUuidByName(serverName: string): Promise {
+ try {
+ const [server] = await db
+ .select({ uuid: mcpServersTable.uuid })
+ .from(mcpServersTable)
+ .where(eq(mcpServersTable.name, serverName));
+
+ return server?.uuid || null;
+ } catch (error) {
+ console.error(`Error fetching server UUID for ${serverName}:`, error);
+ return null;
+ }
+}
+
+/**
+ * Filter tools based on their status in the namespace
+ */
+async function filterActiveTools(
+ tools: Tool[],
+ namespaceUuid: string,
+ useCache: boolean = true,
+): Promise {
+ if (!tools || tools.length === 0) {
+ return tools;
+ }
+
+ const activeTools: Tool[] = [];
+
+ await Promise.allSettled(
+ tools.map(async (tool) => {
+ try {
+ const parsed = parseToolName(tool.name);
+ if (!parsed) {
+ // If tool name doesn't follow expected format, include it
+ activeTools.push(tool);
+ return;
+ }
+
+ const serverUuid = await getServerUuidByName(parsed.serverName);
+ if (!serverUuid) {
+ // If server not found, include the tool (fallback behavior)
+ activeTools.push(tool);
+ return;
+ }
+
+ const status = await getToolStatus(
+ namespaceUuid,
+ parsed.originalToolName,
+ serverUuid,
+ useCache,
+ );
+
+ // If no mapping exists or tool is active, include it
+ if (status === null || status === "ACTIVE") {
+ activeTools.push(tool);
+ }
+ // If status is "INACTIVE", tool is filtered out
+ } catch (error) {
+ console.error(`Error checking tool status for ${tool.name}:`, error);
+ // On error, include the tool (fail-safe behavior)
+ activeTools.push(tool);
+ }
+ }),
+ );
+
+ return activeTools;
+}
+
+/**
+ * Check if a tool is allowed to be called
+ */
+async function isToolAllowed(
+ toolName: string,
+ namespaceUuid: string,
+ serverUuid: string,
+ useCache: boolean = true,
+): Promise<{ allowed: boolean; reason?: string }> {
+ try {
+ const parsed = parseToolName(toolName);
+ if (!parsed) {
+ // If tool name doesn't follow expected format, allow it
+ return { allowed: true };
+ }
+
+ const status = await getToolStatus(
+ namespaceUuid,
+ parsed.originalToolName,
+ serverUuid,
+ useCache,
+ );
+
+ // If no mapping exists or tool is active, allow it
+ if (status === null || status === "ACTIVE") {
+ return { allowed: true };
+ }
+
+ // Tool is inactive
+ return {
+ allowed: false,
+ reason: "Tool has been marked as inactive in this namespace",
+ };
+ } catch (error) {
+ console.error(
+ `Error checking if tool ${toolName} is allowed in namespace ${namespaceUuid}:`,
+ error,
+ );
+ // On error, allow the tool (fail-safe behavior)
+ return { allowed: true };
+ }
+}
+
+/**
+ * Creates a List Tools middleware that filters out inactive tools
+ */
+export function createFilterListToolsMiddleware(
+ config: FilterToolsConfig = {},
+): ListToolsMiddleware {
+ const useCache = config.cacheEnabled ?? true;
+
+ return (handler) => {
+ return async (request, context) => {
+ // Call the original handler to get the tools
+ const response = await handler(request, context);
+
+ // Filter the tools based on namespace tool mappings
+ if (response.tools) {
+ const filteredTools = await filterActiveTools(
+ response.tools,
+ context.namespaceUuid,
+ useCache,
+ );
+
+ return {
+ ...response,
+ tools: filteredTools,
+ };
+ }
+
+ return response;
+ };
+ };
+}
+
+/**
+ * Creates a Call Tool middleware that blocks calls to inactive tools
+ */
+export function createFilterCallToolMiddleware(
+ config: FilterToolsConfig = {},
+): CallToolMiddleware {
+ const useCache = config.cacheEnabled ?? true;
+ const customErrorMessage =
+ config.customErrorMessage ??
+ ((toolName: string, reason: string) =>
+ `Tool "${toolName}" is currently inactive and disallowed in this namespace: ${reason}`);
+
+ return (handler) => {
+ return async (request, context) => {
+ // Extract tool name and server info from the request
+ const toolName = request.params.name;
+
+ // We need to get serverUuid somehow - this would need to be passed through context
+ // For now, let's extract it from the tool name format
+ const parsed = parseToolName(toolName);
+ if (parsed) {
+ const serverUuid = await getServerUuidByName(parsed.serverName);
+ if (serverUuid) {
+ const { allowed, reason } = await isToolAllowed(
+ toolName,
+ context.namespaceUuid,
+ serverUuid,
+ useCache,
+ );
+
+ if (!allowed) {
+ // Return error response instead of calling the handler
+ return {
+ content: [
+ {
+ type: "text",
+ text: customErrorMessage(
+ toolName,
+ reason || "Unknown reason",
+ ),
+ },
+ ],
+ isError: true,
+ };
+ }
+ }
+ }
+
+ // Tool is allowed, call the original handler
+ return handler(request, context);
+ };
+ };
+}
+
+/**
+ * Utility function to clear cache
+ */
+export function clearFilterCache(namespaceUuid?: string): void {
+ toolStatusCache.clear(namespaceUuid);
+}
diff --git a/apps/backend/src/lib/metamcp/metamcp-middleware/functional-middleware.ts b/apps/backend/src/lib/metamcp/metamcp-middleware/functional-middleware.ts
new file mode 100644
index 00000000..afdcda61
--- /dev/null
+++ b/apps/backend/src/lib/metamcp/metamcp-middleware/functional-middleware.ts
@@ -0,0 +1,96 @@
+import {
+ CallToolRequest,
+ CallToolResult,
+ ListToolsRequest,
+ ListToolsResult,
+} from "@modelcontextprotocol/sdk/types.js";
+
+// Base context for all handlers
+export interface MetaMCPHandlerContext {
+ namespaceUuid: string;
+ sessionId: string;
+}
+
+// Handler function types
+export type ListToolsHandler = (
+ request: ListToolsRequest,
+ context: MetaMCPHandlerContext,
+) => Promise;
+
+export type CallToolHandler = (
+ request: CallToolRequest,
+ context: MetaMCPHandlerContext,
+) => Promise;
+
+// Middleware function types that can transform request/response
+export type ListToolsMiddleware = (
+ handler: ListToolsHandler,
+) => ListToolsHandler;
+
+export type CallToolMiddleware = (handler: CallToolHandler) => CallToolHandler;
+
+// Request transformer type (for future use)
+export type RequestTransformer = (
+ request: T,
+ context: MetaMCPHandlerContext,
+) => Promise | T;
+
+// Response transformer type
+export type ResponseTransformer = (
+ response: T,
+ context: MetaMCPHandlerContext,
+) => Promise | T;
+
+/**
+ * Creates a functional middleware that can transform requests and responses
+ */
+export function createFunctionalMiddleware(options: {
+ transformRequest?: RequestTransformer;
+ transformResponse?: ResponseTransformer;
+}) {
+ return (
+ handler: (
+ request: TRequest,
+ context: MetaMCPHandlerContext,
+ ) => Promise,
+ ) => {
+ return async (
+ request: TRequest,
+ context: MetaMCPHandlerContext,
+ ): Promise => {
+ // Transform request if transformer provided
+ let transformedRequest = request;
+ if (options.transformRequest) {
+ transformedRequest = await Promise.resolve(
+ options.transformRequest(request, context),
+ );
+ }
+
+ // Call the original handler
+ let response = await handler(transformedRequest, context);
+
+ // Transform response if transformer provided
+ if (options.transformResponse) {
+ response = await Promise.resolve(
+ options.transformResponse(response, context),
+ );
+ }
+
+ return response;
+ };
+ };
+}
+
+/**
+ * Compose multiple middleware functions together
+ */
+export function compose any>(
+ ...middlewares: Array<(handler: T) => T>
+): (handler: T) => T {
+ return (handler: T) => {
+ return middlewares.reduceRight(
+ (wrapped, middleware) => middleware(wrapped),
+ handler,
+ );
+ };
+}
diff --git a/apps/backend/src/lib/metamcp/metamcp-proxy.ts b/apps/backend/src/lib/metamcp/metamcp-proxy.ts
new file mode 100644
index 00000000..1ecf791f
--- /dev/null
+++ b/apps/backend/src/lib/metamcp/metamcp-proxy.ts
@@ -0,0 +1,477 @@
+import { Server } from "@modelcontextprotocol/sdk/server/index.js";
+import {
+ CallToolRequestSchema,
+ CallToolResult,
+ CompatibilityCallToolResultSchema,
+ GetPromptRequestSchema,
+ GetPromptResultSchema,
+ ListPromptsRequestSchema,
+ ListPromptsResultSchema,
+ ListResourcesRequestSchema,
+ ListResourcesResultSchema,
+ ListResourceTemplatesRequestSchema,
+ ListResourceTemplatesResultSchema,
+ ListToolsRequestSchema,
+ ListToolsResultSchema,
+ ReadResourceRequestSchema,
+ ReadResourceResultSchema,
+ ResourceTemplate,
+ Tool,
+} from "@modelcontextprotocol/sdk/types.js";
+import { z } from "zod";
+
+import { toolsImplementations } from "../../trpc/tools.impl";
+import { ConnectedClient } from "./client";
+import { getMcpServers } from "./fetch-metamcp";
+import {
+ createFilterCallToolMiddleware,
+ createFilterListToolsMiddleware,
+} from "./metamcp-middleware/filter-tools.functional";
+import {
+ CallToolHandler,
+ compose,
+ ListToolsHandler,
+ MetaMCPHandlerContext,
+} from "./metamcp-middleware/functional-middleware";
+import {
+ cleanupSessionConnections,
+ getSession,
+ initSessionConnections,
+} from "./sessions";
+import { sanitizeName } from "./utils";
+
+export const createServer = async (
+ namespaceUuid: string,
+ sessionId: string,
+ includeInactiveServers: boolean = false,
+) => {
+ const toolToClient: Record = {};
+ const toolToServerUuid: Record = {};
+ const promptToClient: Record = {};
+ const resourceToClient: Record = {};
+
+ const server = new Server(
+ {
+ name: "metamcp-unified",
+ version: "1.0.0",
+ },
+ {
+ capabilities: {
+ prompts: {},
+ resources: {},
+ tools: {},
+ },
+ },
+ );
+
+ // Initialize session connections in the background when server starts
+ initSessionConnections(sessionId, namespaceUuid).catch();
+
+ // Create the handler context
+ const handlerContext: MetaMCPHandlerContext = {
+ namespaceUuid,
+ sessionId,
+ };
+
+ // Original List Tools Handler
+ const originalListToolsHandler: ListToolsHandler = async (
+ request,
+ context,
+ ) => {
+ const serverParams = await getMcpServers(
+ context.namespaceUuid,
+ includeInactiveServers,
+ );
+ const allTools: Tool[] = [];
+
+ await Promise.allSettled(
+ Object.entries(serverParams).map(async ([mcpServerUuid, params]) => {
+ const session = await getSession(
+ context.sessionId,
+ mcpServerUuid,
+ params,
+ );
+ if (!session) return;
+
+ const capabilities = session.client.getServerCapabilities();
+ if (!capabilities?.tools) return;
+
+ // Use name assigned by user, fallback to name from server
+ const serverName =
+ params.name || session.client.getServerVersion()?.name || "";
+ try {
+ const result = await session.client.request(
+ {
+ method: "tools/list",
+ params: { _meta: request.params?._meta },
+ },
+ ListToolsResultSchema,
+ );
+
+ // Save original tools to database
+ if (result.tools && result.tools.length > 0) {
+ try {
+ await toolsImplementations.create({
+ tools: result.tools,
+ mcpServerUuid: mcpServerUuid,
+ });
+ console.log(
+ `Saved ${result.tools.length} tools for server: ${serverName}`,
+ );
+ } catch (dbError) {
+ console.error(
+ `Error saving tools to database for server ${serverName}:`,
+ dbError,
+ );
+ }
+ }
+
+ const toolsWithSource =
+ result.tools?.map((tool) => {
+ const toolName = `${sanitizeName(serverName)}__${tool.name}`;
+ toolToClient[toolName] = session;
+ toolToServerUuid[toolName] = mcpServerUuid;
+ return {
+ ...tool,
+ name: toolName,
+ description: tool.description,
+ };
+ }) || [];
+
+ allTools.push(...toolsWithSource);
+ } catch (error) {
+ console.error(`Error fetching tools from: ${serverName}`, error);
+ }
+ }),
+ );
+
+ return { tools: allTools };
+ };
+
+ // Original Call Tool Handler
+ const originalCallToolHandler: CallToolHandler = async (request, context) => {
+ const { name, arguments: args } = request.params;
+
+ // Extract the original tool name by removing the server prefix
+ const firstDoubleUnderscoreIndex = name.indexOf("__");
+ if (firstDoubleUnderscoreIndex === -1) {
+ throw new Error(`Invalid tool name format: ${name}`);
+ }
+
+ const originalToolName = name.substring(firstDoubleUnderscoreIndex + 2);
+ const clientForTool = toolToClient[name];
+ const serverUuid = toolToServerUuid[name];
+
+ if (!clientForTool) {
+ throw new Error(`Unknown tool: ${name}`);
+ }
+
+ if (!serverUuid) {
+ throw new Error(`Server UUID not found for tool: ${name}`);
+ }
+
+ try {
+ // Use the correct schema for tool calls
+ const result = await clientForTool.client.request(
+ {
+ method: "tools/call",
+ params: {
+ name: originalToolName,
+ arguments: args || {},
+ _meta: {
+ progressToken: request.params._meta?.progressToken,
+ },
+ },
+ },
+ CompatibilityCallToolResultSchema,
+ );
+
+ // Cast the result to CallToolResult type
+ return result as CallToolResult;
+ } catch (error) {
+ console.error(
+ `Error calling tool "${name}" through ${
+ clientForTool.client.getServerVersion()?.name || "unknown"
+ }:`,
+ error,
+ );
+ throw error;
+ }
+ };
+
+ // Compose middleware with handlers - this is the Express-like functional approach
+ const listToolsWithMiddleware = compose(
+ createFilterListToolsMiddleware({ cacheEnabled: true }),
+ // Add more middleware here as needed
+ // createLoggingMiddleware(),
+ // createRateLimitingMiddleware(),
+ )(originalListToolsHandler);
+
+ const callToolWithMiddleware = compose(
+ createFilterCallToolMiddleware({
+ cacheEnabled: true,
+ customErrorMessage: (toolName, reason) =>
+ `Access denied to tool "${toolName}": ${reason}`,
+ }),
+ // Add more middleware here as needed
+ // createAuditingMiddleware(),
+ // createAuthorizationMiddleware(),
+ )(originalCallToolHandler);
+
+ // Set up the handlers with middleware
+ server.setRequestHandler(ListToolsRequestSchema, async (request) => {
+ return await listToolsWithMiddleware(request, handlerContext);
+ });
+
+ server.setRequestHandler(CallToolRequestSchema, async (request) => {
+ return await callToolWithMiddleware(request, handlerContext);
+ });
+
+ // Get Prompt Handler
+ server.setRequestHandler(GetPromptRequestSchema, async (request) => {
+ const { name } = request.params;
+ const clientForPrompt = promptToClient[name];
+
+ if (!clientForPrompt) {
+ throw new Error(`Unknown prompt: ${name}`);
+ }
+
+ try {
+ // Extract the original prompt name by removing the server prefix
+ // For nested MetaMCP, names may be like "MetaMCPTest__Everything__promptName"
+ // We need to extract "Everything__promptName" (everything after the first "__")
+ const firstDoubleUnderscoreIndex = name.indexOf("__");
+ if (firstDoubleUnderscoreIndex === -1) {
+ throw new Error(`Invalid prompt name format: ${name}`);
+ }
+
+ const promptName = name.substring(firstDoubleUnderscoreIndex + 2);
+ const response = await clientForPrompt.client.request(
+ {
+ method: "prompts/get",
+ params: {
+ name: promptName,
+ arguments: request.params.arguments || {},
+ _meta: request.params._meta,
+ },
+ },
+ GetPromptResultSchema,
+ );
+
+ return response;
+ } catch (error) {
+ console.error(
+ `Error getting prompt through ${
+ clientForPrompt.client.getServerVersion()?.name
+ }:`,
+ error,
+ );
+ throw error;
+ }
+ });
+
+ // List Prompts Handler
+ server.setRequestHandler(ListPromptsRequestSchema, async (request) => {
+ const serverParams = await getMcpServers(
+ namespaceUuid,
+ includeInactiveServers,
+ );
+ const allPrompts: z.infer["prompts"] = [];
+
+ await Promise.allSettled(
+ Object.entries(serverParams).map(async ([uuid, params]) => {
+ const session = await getSession(sessionId, uuid, params);
+ if (!session) return;
+
+ const capabilities = session.client.getServerCapabilities();
+ if (!capabilities?.prompts) return;
+
+ // Use name assigned by user, fallback to name from server
+ const serverName =
+ params.name || session.client.getServerVersion()?.name || "";
+ try {
+ const result = await session.client.request(
+ {
+ method: "prompts/list",
+ params: {
+ cursor: request.params?.cursor,
+ _meta: request.params?._meta,
+ },
+ },
+ ListPromptsResultSchema,
+ );
+
+ if (result.prompts) {
+ const promptsWithSource = result.prompts.map((prompt) => {
+ const promptName = `${sanitizeName(serverName)}__${prompt.name}`;
+ promptToClient[promptName] = session;
+ return {
+ ...prompt,
+ name: promptName,
+ description: prompt.description || "",
+ };
+ });
+ allPrompts.push(...promptsWithSource);
+ }
+ } catch (error) {
+ console.error(`Error fetching prompts from: ${serverName}`, error);
+ }
+ }),
+ );
+
+ return {
+ prompts: allPrompts,
+ nextCursor: request.params?.cursor,
+ };
+ });
+
+ // List Resources Handler
+ server.setRequestHandler(ListResourcesRequestSchema, async (request) => {
+ const serverParams = await getMcpServers(
+ namespaceUuid,
+ includeInactiveServers,
+ );
+ const allResources: z.infer["resources"] =
+ [];
+
+ await Promise.allSettled(
+ Object.entries(serverParams).map(async ([uuid, params]) => {
+ const session = await getSession(sessionId, uuid, params);
+ if (!session) return;
+
+ const capabilities = session.client.getServerCapabilities();
+ if (!capabilities?.resources) return;
+
+ // Use name assigned by user, fallback to name from server
+ const serverName =
+ params.name || session.client.getServerVersion()?.name || "";
+ try {
+ const result = await session.client.request(
+ {
+ method: "resources/list",
+ params: {
+ cursor: request.params?.cursor,
+ _meta: request.params?._meta,
+ },
+ },
+ ListResourcesResultSchema,
+ );
+
+ if (result.resources) {
+ const resourcesWithSource = result.resources.map((resource) => {
+ resourceToClient[resource.uri] = session;
+ return {
+ ...resource,
+ name: resource.name || "",
+ };
+ });
+ allResources.push(...resourcesWithSource);
+ }
+ } catch (error) {
+ console.error(`Error fetching resources from: ${serverName}`, error);
+ }
+ }),
+ );
+
+ return {
+ resources: allResources,
+ nextCursor: request.params?.cursor,
+ };
+ });
+
+ // Read Resource Handler
+ server.setRequestHandler(ReadResourceRequestSchema, async (request) => {
+ const { uri } = request.params;
+ const clientForResource = resourceToClient[uri];
+
+ if (!clientForResource) {
+ throw new Error(`Unknown resource: ${uri}`);
+ }
+
+ try {
+ return await clientForResource.client.request(
+ {
+ method: "resources/read",
+ params: {
+ uri,
+ _meta: request.params._meta,
+ },
+ },
+ ReadResourceResultSchema,
+ );
+ } catch (error) {
+ console.error(
+ `Error reading resource through ${
+ clientForResource.client.getServerVersion()?.name
+ }:`,
+ error,
+ );
+ throw error;
+ }
+ });
+
+ // List Resource Templates Handler
+ server.setRequestHandler(
+ ListResourceTemplatesRequestSchema,
+ async (request) => {
+ const serverParams = await getMcpServers(
+ namespaceUuid,
+ includeInactiveServers,
+ );
+ const allTemplates: ResourceTemplate[] = [];
+
+ await Promise.allSettled(
+ Object.entries(serverParams).map(async ([uuid, params]) => {
+ const session = await getSession(sessionId, uuid, params);
+ if (!session) return;
+
+ const capabilities = session.client.getServerCapabilities();
+ if (!capabilities?.resources) return;
+
+ const serverName =
+ params.name || session.client.getServerVersion()?.name || "";
+
+ try {
+ const result = await session.client.request(
+ {
+ method: "resources/templates/list",
+ params: {
+ cursor: request.params?.cursor,
+ _meta: request.params?._meta,
+ },
+ },
+ ListResourceTemplatesResultSchema,
+ );
+
+ if (result.resourceTemplates) {
+ const templatesWithSource = result.resourceTemplates.map(
+ (template) => ({
+ ...template,
+ name: template.name || "",
+ }),
+ );
+ allTemplates.push(...templatesWithSource);
+ }
+ } catch (error) {
+ console.error(
+ `Error fetching resource templates from: ${serverName}`,
+ error,
+ );
+ return;
+ }
+ }),
+ );
+
+ return {
+ resourceTemplates: allTemplates,
+ nextCursor: request.params?.cursor,
+ };
+ },
+ );
+
+ const cleanup = async () => {
+ await cleanupSessionConnections(sessionId);
+ };
+
+ return { server, cleanup };
+};
diff --git a/apps/backend/src/lib/metamcp/sessions.ts b/apps/backend/src/lib/metamcp/sessions.ts
new file mode 100644
index 00000000..0caa8dc2
--- /dev/null
+++ b/apps/backend/src/lib/metamcp/sessions.ts
@@ -0,0 +1,112 @@
+import {
+ ConnectedClient,
+ connectMetaMcpClient,
+ createMetaMcpClient,
+} from "./client";
+import { getMcpServers, ServerParameters } from "./fetch-metamcp";
+import { getSessionKey } from "./utils";
+
+// Two-level cache: sessionId -> (server config hash -> ConnectedClient)
+const _sessionConnections: Record> = {};
+
+export const getSession = async (
+ sessionId: string,
+ serverUuid: string,
+ params: ServerParameters,
+): Promise => {
+ // Initialize session connections if not exists
+ if (!_sessionConnections[sessionId]) {
+ _sessionConnections[sessionId] = {};
+ }
+
+ const sessionConnections = _sessionConnections[sessionId];
+ const sessionKey = getSessionKey(serverUuid, params);
+
+ // Return existing connection for this server config in this session
+ if (sessionConnections[sessionKey]) {
+ return sessionConnections[sessionKey];
+ }
+
+ // Close existing session for this UUID if it exists with a different hash
+ const oldSessionKeys = Object.keys(sessionConnections).filter((k) =>
+ k.startsWith(`${serverUuid}_`),
+ );
+
+ await Promise.allSettled(
+ oldSessionKeys.map(async (oldSessionKey) => {
+ await sessionConnections[oldSessionKey].cleanup();
+ delete sessionConnections[oldSessionKey];
+ }),
+ );
+
+ // Create new connection for this server config in this session
+ const { client, transport } = createMetaMcpClient(params);
+ if (!client || !transport) {
+ return;
+ }
+
+ const newClient = await connectMetaMcpClient(client, transport);
+ if (!newClient) {
+ return;
+ }
+
+ sessionConnections[sessionKey] = newClient;
+ return newClient;
+};
+
+export const initSessionConnections = async (
+ sessionId: string,
+ namespaceUuid: string,
+): Promise => {
+ const serverParams = await getMcpServers(namespaceUuid);
+
+ // Initialize connections for all servers in this namespace for this session
+ await Promise.allSettled(
+ Object.entries(serverParams).map(async ([uuid, params]) => {
+ try {
+ await getSession(sessionId, uuid, params);
+ } catch (_error) {
+ // Ignore errors during initialization
+ }
+ }),
+ );
+};
+
+export const cleanupSessionConnections = async (
+ sessionId: string,
+): Promise => {
+ const sessionConnections = _sessionConnections[sessionId];
+ if (!sessionConnections) {
+ return;
+ }
+
+ // Cleanup all connections for this session
+ await Promise.allSettled(
+ Object.entries(sessionConnections).map(async ([_sessionKey, client]) => {
+ await client.cleanup();
+ }),
+ );
+
+ // Remove the session from cache
+ delete _sessionConnections[sessionId];
+};
+
+export const cleanupAllSessions = async (): Promise => {
+ await Promise.allSettled(
+ Object.keys(_sessionConnections).map(async (sessionId) => {
+ await cleanupSessionConnections(sessionId);
+ }),
+ );
+};
+
+// Get all active session IDs (for debugging/monitoring)
+export const getActiveSessionIds = (): string[] => {
+ return Object.keys(_sessionConnections);
+};
+
+// Get server connections for a specific session (for debugging/monitoring)
+export const getSessionConnections = (
+ sessionId: string,
+): Record | undefined => {
+ return _sessionConnections[sessionId];
+};
diff --git a/apps/backend/src/lib/metamcp/utils.ts b/apps/backend/src/lib/metamcp/utils.ts
new file mode 100644
index 00000000..827bded3
--- /dev/null
+++ b/apps/backend/src/lib/metamcp/utils.ts
@@ -0,0 +1,88 @@
+import crypto from "crypto";
+
+import { ServerParameters } from "./fetch-metamcp";
+
+/**
+ * Environment variables to inherit by default, if an environment is not explicitly given.
+ */
+export const DEFAULT_INHERITED_ENV_VARS =
+ process.platform === "win32"
+ ? [
+ "APPDATA",
+ "HOMEDRIVE",
+ "HOMEPATH",
+ "LOCALAPPDATA",
+ "PATH",
+ "PROCESSOR_ARCHITECTURE",
+ "SYSTEMDRIVE",
+ "SYSTEMROOT",
+ "TEMP",
+ "USERNAME",
+ "USERPROFILE",
+ ]
+ : /* list inspired by the default env inheritance of sudo */
+ ["HOME", "LOGNAME", "PATH", "SHELL", "TERM", "USER"];
+
+/**
+ * Returns a default environment object including only environment variables deemed safe to inherit.
+ */
+export function getDefaultEnvironment(): Record {
+ const env: Record = {};
+
+ for (const key of DEFAULT_INHERITED_ENV_VARS) {
+ const value = process.env[key];
+ if (value === undefined) {
+ continue;
+ }
+
+ if (value.startsWith("()")) {
+ // Skip functions, which are a security risk.
+ continue;
+ }
+
+ env[key] = value;
+ }
+
+ return env;
+}
+
+export function sanitizeName(name: string): string {
+ return name.replace(/[^a-zA-Z0-9_-]/g, "");
+}
+
+export function computeParamsHash(
+ params: ServerParameters,
+ uuid: string,
+): string {
+ let paramsDict: object;
+
+ // Default to "STDIO" if type is undefined
+ if (!params.type || params.type === "STDIO") {
+ paramsDict = {
+ uuid,
+ type: "STDIO", // Explicitly set type to "STDIO" for consistent hashing
+ command: params.command,
+ args: params.args,
+ env: params.env
+ ? Object.fromEntries(
+ Object.entries(params.env).sort((a, b) => a[0].localeCompare(b[0])),
+ )
+ : null,
+ };
+ } else if (params.type === "SSE" || params.type === "STREAMABLE_HTTP") {
+ paramsDict = {
+ uuid,
+ type: params.type,
+ url: params.url,
+ };
+ } else {
+ throw new Error(`Unsupported server type: ${params.type}`);
+ }
+
+ const paramsJson = JSON.stringify(paramsDict);
+ return crypto.createHash("sha256").update(paramsJson).digest("hex");
+}
+
+export function getSessionKey(uuid: string, params: ServerParameters): string {
+ return `${uuid}_${computeParamsHash(params, uuid)}`;
+}
diff --git a/apps/backend/src/middleware/better-auth-mcp.middleware.ts b/apps/backend/src/middleware/better-auth-mcp.middleware.ts
new file mode 100644
index 00000000..dc29e54d
--- /dev/null
+++ b/apps/backend/src/middleware/better-auth-mcp.middleware.ts
@@ -0,0 +1,76 @@
+import express from "express";
+
+import { auth } from "../auth";
+
+/**
+ * Better Auth middleware for MCP proxy routes
+ * Uses original request cookies for session validation
+ */
+export const betterAuthMcpMiddleware = async (
+ req: express.Request,
+ res: express.Response,
+ next: express.NextFunction,
+) => {
+ try {
+ console.log("Auth middleware - method:", req.method, "path:", req.path);
+
+ // Check if we have cookies in the request
+ if (!req.headers.cookie) {
+ console.log("Auth middleware - no cookies found in request");
+ return res.status(401).json({
+ error: "Authentication required",
+ message: "No session cookies found",
+ });
+ }
+
+ // Verify the session using better-auth with original cookies
+ const sessionUrl = new URL(
+ "/api/auth/get-session",
+ `http://${req.headers.host}`,
+ );
+
+ const headers = new Headers();
+ headers.set("cookie", req.headers.cookie);
+
+ const sessionRequest = new Request(sessionUrl.toString(), {
+ method: "GET",
+ headers,
+ });
+
+ const sessionResponse = await auth.handler(sessionRequest);
+
+ if (!sessionResponse.ok) {
+ console.log("Auth middleware - session verification failed");
+ return res.status(401).json({
+ error: "Invalid session",
+ message: "Session verification failed",
+ });
+ }
+
+ const sessionData = (await sessionResponse.json()) as any;
+
+ if (!sessionData || !sessionData.user) {
+ console.log("Auth middleware - no valid user session found");
+ return res.status(401).json({
+ error: "Invalid session",
+ message: "No valid user session found",
+ });
+ }
+
+ // Add user info to request for downstream use
+ (req as any).user = sessionData.user;
+ (req as any).session = sessionData.session;
+
+ console.log(
+ "Auth middleware - authentication successful for user:",
+ sessionData.user.id,
+ );
+ next();
+ } catch (error) {
+ console.error("Better auth middleware error:", error);
+ return res.status(500).json({
+ error: "Authentication error",
+ message: "Failed to verify authentication",
+ });
+ }
+};
diff --git a/apps/backend/src/routers/mcp-proxy.ts b/apps/backend/src/routers/mcp-proxy.ts
new file mode 100644
index 00000000..d019c8d3
--- /dev/null
+++ b/apps/backend/src/routers/mcp-proxy.ts
@@ -0,0 +1,42 @@
+import cors from "cors";
+import express from "express";
+import helmet from "helmet";
+
+import metamcpRoutes from "./mcp-proxy/metamcp";
+import serverRoutes from "./mcp-proxy/server";
+
+const mcpProxyRouter = express.Router();
+
+// Apply security middleware for MCP proxy communication
+mcpProxyRouter.use(helmet());
+mcpProxyRouter.use(
+ cors({
+ origin: process.env.FRONTEND_URL || "http://localhost:12008",
+ credentials: true,
+ allowedHeaders: [
+ "Content-Type",
+ "Authorization",
+ "mcp-session-id",
+ "x-custom-auth-header",
+ "last-event-id",
+ ],
+ }),
+);
+
+// Basic authentication disabled for easier OAuth integration
+
+// Apply additional headers
+mcpProxyRouter.use((req, res, next) => {
+ res.header("Access-Control-Expose-Headers", "mcp-session-id");
+ res.header("Access-Control-Expose-Headers", "authorization");
+ res.header("Access-Control-Expose-Headers", "last-event-id");
+ next();
+});
+
+// Mount MCP server proxy routes under /server
+mcpProxyRouter.use("/server", serverRoutes);
+
+// Mount MetaMCP routes under /metamcp
+mcpProxyRouter.use("/metamcp", metamcpRoutes);
+
+export default mcpProxyRouter;
diff --git a/apps/backend/src/routers/mcp-proxy/metamcp.ts b/apps/backend/src/routers/mcp-proxy/metamcp.ts
new file mode 100644
index 00000000..895e48eb
--- /dev/null
+++ b/apps/backend/src/routers/mcp-proxy/metamcp.ts
@@ -0,0 +1,266 @@
+import { randomUUID } from "node:crypto";
+
+import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
+import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
+import { Transport } from "@modelcontextprotocol/sdk/shared/transport.js";
+import express from "express";
+
+import { createServer } from "../../lib/metamcp/index";
+import { cleanupSessionConnections } from "../../lib/metamcp/sessions";
+import { betterAuthMcpMiddleware } from "../../middleware/better-auth-mcp.middleware";
+
+const metamcpRouter = express.Router();
+
+// Apply better auth middleware to all metamcp routes
+metamcpRouter.use(betterAuthMcpMiddleware);
+
+const webAppTransports: Map = new Map(); // Web app transports by sessionId
+const metamcpServers: Map<
+ string,
+ { server: any; cleanup: () => Promise }
+> = new Map(); // MetaMCP servers by sessionId
+
+// Create a MetaMCP server instance
+const createMetaMcpServer = async (
+ namespaceUuid: string,
+ sessionId: string,
+ includeInactiveServers: boolean = false,
+) => {
+ const { server, cleanup } = await createServer(
+ namespaceUuid,
+ sessionId,
+ includeInactiveServers,
+ );
+ return { server, cleanup };
+};
+
+// Cleanup function for a specific session
+const cleanupSession = async (sessionId: string) => {
+ console.log(`Cleaning up session ${sessionId}`);
+
+ // Clean up transport
+ const transport = webAppTransports.get(sessionId);
+ if (transport) {
+ webAppTransports.delete(sessionId);
+ await transport.close();
+ }
+
+ // Clean up server instance
+ const serverInstance = metamcpServers.get(sessionId);
+ if (serverInstance) {
+ metamcpServers.delete(sessionId);
+ await serverInstance.cleanup();
+ }
+
+ // Clean up session connections
+ await cleanupSessionConnections(sessionId);
+};
+
+metamcpRouter.get("/:uuid/mcp", async (req, res) => {
+ const namespaceUuid = req.params.uuid;
+ const sessionId = req.headers["mcp-session-id"] as string;
+ console.log(
+ `Received GET message for MetaMCP namespace ${namespaceUuid} sessionId ${sessionId}`,
+ );
+ try {
+ const transport = webAppTransports.get(
+ sessionId,
+ ) as StreamableHTTPServerTransport;
+ if (!transport) {
+ res.status(404).end("Session not found");
+ return;
+ } else {
+ await transport.handleRequest(req, res);
+ }
+ } catch (error) {
+ console.error("Error in MetaMCP /mcp route:", error);
+ res.status(500).json(error);
+ }
+});
+
+metamcpRouter.post("/:uuid/mcp", async (req, res) => {
+ const namespaceUuid = req.params.uuid;
+ const sessionId = req.headers["mcp-session-id"] as string | undefined;
+ let mcpServerInstance:
+ | { server: any; cleanup: () => Promise }
+ | undefined;
+
+ if (!sessionId) {
+ try {
+ console.log(
+ `New MetaMCP StreamableHttp connection request for namespace ${namespaceUuid}`,
+ );
+
+ const webAppTransport = new StreamableHTTPServerTransport({
+ sessionIdGenerator: randomUUID,
+ onsessioninitialized: async (newSessionId) => {
+ try {
+ // Extract includeInactiveServers from query parameters
+ const includeInactiveServers =
+ req.query.includeInactiveServers === "true";
+
+ // Create MetaMCP server instance with sessionId
+ mcpServerInstance = await createMetaMcpServer(
+ namespaceUuid,
+ newSessionId,
+ includeInactiveServers,
+ );
+ console.log(
+ `Created MetaMCP server instance for session ${newSessionId}`,
+ );
+
+ webAppTransports.set(newSessionId, webAppTransport);
+ metamcpServers.set(newSessionId, mcpServerInstance);
+
+ console.log(
+ `MetaMCP Client <-> Proxy sessionId: ${newSessionId} for namespace ${namespaceUuid}`,
+ );
+
+ await mcpServerInstance.server.connect(webAppTransport);
+
+ // Handle cleanup when connection closes
+ res.on("close", async () => {
+ console.log(
+ `MetaMCP connection closed for session ${newSessionId}`,
+ );
+ await cleanupSession(newSessionId);
+ });
+ } catch (error) {
+ console.error(`Error initializing session ${newSessionId}:`, error);
+ }
+ },
+ });
+ console.log("Created MetaMCP StreamableHttp transport");
+
+ await (webAppTransport as StreamableHTTPServerTransport).handleRequest(
+ req,
+ res,
+ req.body,
+ );
+ } catch (error) {
+ console.error("Error in MetaMCP /mcp POST route:", error);
+ res.status(500).json(error);
+ }
+ } else {
+ console.log(
+ `Received POST message for MetaMCP namespace ${namespaceUuid} sessionId ${sessionId}`,
+ );
+ try {
+ const transport = webAppTransports.get(
+ sessionId,
+ ) as StreamableHTTPServerTransport;
+ if (!transport) {
+ res.status(404).end("Transport not found for sessionId " + sessionId);
+ } else {
+ await (transport as StreamableHTTPServerTransport).handleRequest(
+ req,
+ res,
+ );
+ }
+ } catch (error) {
+ console.error("Error in MetaMCP /mcp route:", error);
+ res.status(500).json(error);
+ }
+ }
+});
+
+metamcpRouter.delete("/:uuid/mcp", async (req, res) => {
+ const namespaceUuid = req.params.uuid;
+ const sessionId = req.headers["mcp-session-id"] as string | undefined;
+ console.log(
+ `Received DELETE message for MetaMCP namespace ${namespaceUuid} sessionId ${sessionId}`,
+ );
+
+ if (sessionId) {
+ try {
+ await cleanupSession(sessionId);
+ console.log(`MetaMCP session ${sessionId} cleaned up successfully`);
+ res.status(200).end();
+ } catch (error) {
+ console.error("Error in MetaMCP /mcp DELETE route:", error);
+ res.status(500).json(error);
+ }
+ } else {
+ res.status(400).end("Missing sessionId");
+ }
+});
+
+metamcpRouter.get("/:uuid/sse", async (req, res) => {
+ const namespaceUuid = req.params.uuid;
+ const includeInactiveServers = req.query.includeInactiveServers === "true";
+
+ try {
+ console.log(
+ `New MetaMCP SSE connection request for namespace ${namespaceUuid}, includeInactiveServers: ${includeInactiveServers}`,
+ );
+
+ const webAppTransport = new SSEServerTransport(
+ `/mcp-proxy/metamcp/${namespaceUuid}/message`,
+ res,
+ );
+ console.log("Created MetaMCP SSE transport");
+
+ const sessionId = webAppTransport.sessionId;
+
+ // Create MetaMCP server instance with sessionId and includeInactiveServers flag
+ const mcpServerInstance = await createMetaMcpServer(
+ namespaceUuid,
+ sessionId,
+ includeInactiveServers,
+ );
+ console.log(`Created MetaMCP server instance for session ${sessionId}`);
+
+ webAppTransports.set(sessionId, webAppTransport);
+ metamcpServers.set(sessionId, mcpServerInstance);
+
+ // Handle cleanup when connection closes
+ res.on("close", async () => {
+ console.log(`MetaMCP SSE connection closed for session ${sessionId}`);
+ await cleanupSession(sessionId);
+ });
+
+ await mcpServerInstance.server.connect(webAppTransport);
+ } catch (error) {
+ console.error("Error in MetaMCP /sse route:", error);
+ res.status(500).json(error);
+ }
+});
+
+metamcpRouter.post("/:uuid/message", async (req, res) => {
+ const namespaceUuid = req.params.uuid;
+ try {
+ const sessionId = req.query.sessionId;
+ console.log(
+ `Received POST message for MetaMCP namespace ${namespaceUuid} sessionId ${sessionId}`,
+ );
+
+ const transport = webAppTransports.get(
+ sessionId as string,
+ ) as SSEServerTransport;
+ if (!transport) {
+ res.status(404).end("Session not found");
+ return;
+ }
+ await transport.handlePostMessage(req, res);
+ } catch (error) {
+ console.error("Error in MetaMCP /message route:", error);
+ res.status(500).json(error);
+ }
+});
+
+metamcpRouter.get("/health", (req, res) => {
+ res.json({
+ status: "ok",
+ service: "metamcp",
+ });
+});
+
+metamcpRouter.get("/info", (req, res) => {
+ res.json({
+ service: "metamcp",
+ version: "1.0.0",
+ description: "MetaMCP unified MCP proxy service",
+ });
+});
+
+export default metamcpRouter;
diff --git a/apps/backend/src/routers/mcp-proxy/server.ts b/apps/backend/src/routers/mcp-proxy/server.ts
new file mode 100644
index 00000000..9c9cf156
--- /dev/null
+++ b/apps/backend/src/routers/mcp-proxy/server.ts
@@ -0,0 +1,512 @@
+import { randomUUID } from "node:crypto";
+
+import {
+ SSEClientTransport,
+ SseError,
+} from "@modelcontextprotocol/sdk/client/sse.js";
+import {
+ getDefaultEnvironment,
+ StdioClientTransport,
+} from "@modelcontextprotocol/sdk/client/stdio.js";
+import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";
+import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
+import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
+import { Transport } from "@modelcontextprotocol/sdk/shared/transport.js";
+import { McpServerTypeEnum } from "@repo/zod-types";
+import express from "express";
+import { parse as shellParseArgs } from "shell-quote";
+import { findActualExecutable } from "spawn-rx";
+
+import mcpProxy from "../../lib/mcp-proxy";
+import { transformDockerUrl } from "../../lib/metamcp/client";
+import { betterAuthMcpMiddleware } from "../../middleware/better-auth-mcp.middleware";
+
+const SSE_HEADERS_PASSTHROUGH = ["authorization"];
+const STREAMABLE_HTTP_HEADERS_PASSTHROUGH = [
+ "authorization",
+ "mcp-session-id",
+ "last-event-id",
+];
+
+const defaultEnvironment = {
+ ...getDefaultEnvironment(),
+};
+
+// Function to get HTTP headers.
+// Supports only "SSE" and "STREAMABLE_HTTP" transport types.
+const getHttpHeaders = (
+ req: express.Request,
+ transportType: string,
+): Record => {
+ const headers: Record = {
+ Accept:
+ transportType === McpServerTypeEnum.Enum.SSE
+ ? "text/event-stream"
+ : "text/event-stream, application/json",
+ };
+ const defaultHeaders =
+ transportType === McpServerTypeEnum.Enum.SSE
+ ? SSE_HEADERS_PASSTHROUGH
+ : STREAMABLE_HTTP_HEADERS_PASSTHROUGH;
+
+ for (const key of defaultHeaders) {
+ if (req.headers[key] === undefined) {
+ continue;
+ }
+
+ const value = req.headers[key];
+ headers[key] = Array.isArray(value) ? value[value.length - 1] : value;
+ }
+
+ // If the header "x-custom-auth-header" is present, use its value as the custom header name.
+ if (req.headers["x-custom-auth-header"] !== undefined) {
+ const customHeaderName = req.headers["x-custom-auth-header"] as string;
+ const lowerCaseHeaderName = customHeaderName.toLowerCase();
+ if (req.headers[lowerCaseHeaderName] !== undefined) {
+ const value = req.headers[lowerCaseHeaderName];
+ headers[customHeaderName] = value as string;
+ }
+ }
+ return headers;
+};
+
+const serverRouter = express.Router();
+
+// Apply better auth middleware to all MCP proxy routes
+serverRouter.use(betterAuthMcpMiddleware);
+
+const webAppTransports: Map = new Map(); // Web app transports by web app sessionId
+const serverTransports: Map = new Map(); // Server Transports by web app sessionId
+
+// Session cleanup function
+const cleanupSession = async (sessionId: string, mcpServerName?: string) => {
+ console.log(
+ `Cleaning up proxy session ${sessionId} for MCP server: ${mcpServerName || "Unknown"}`,
+ );
+
+ // Clean up web app transport
+ const webAppTransport = webAppTransports.get(sessionId);
+ if (webAppTransport) {
+ try {
+ await webAppTransport.close();
+ } catch (error) {
+ console.error(
+ `Error closing web app transport for session ${sessionId}:`,
+ error,
+ );
+ }
+ webAppTransports.delete(sessionId);
+ }
+
+ // Clean up server transport
+ const serverTransport = serverTransports.get(sessionId);
+ if (serverTransport) {
+ try {
+ await serverTransport.close();
+ } catch (error) {
+ console.error(
+ `Error closing server transport for session ${sessionId}:`,
+ error,
+ );
+ }
+ serverTransports.delete(sessionId);
+ }
+
+ console.log(`Session ${sessionId} cleanup completed`);
+};
+
+const createTransport = async (req: express.Request): Promise => {
+ const query = req.query;
+ console.log("Query parameters:", JSON.stringify(query));
+
+ const transportType = query.transportType as string;
+
+ if (transportType === McpServerTypeEnum.Enum.STDIO) {
+ const command = query.command as string;
+ const origArgs = shellParseArgs(query.args as string) as string[];
+ const queryEnv = query.env ? JSON.parse(query.env as string) : {};
+ const env = { ...process.env, ...defaultEnvironment, ...queryEnv };
+
+ const { cmd, args } = findActualExecutable(command, origArgs);
+
+ console.log(`STDIO transport: command=${cmd}, args=${args}`);
+
+ const transport = new StdioClientTransport({
+ command: cmd,
+ args,
+ env,
+ stderr: "pipe",
+ });
+
+ await transport.start();
+ return transport;
+ } else if (transportType === McpServerTypeEnum.Enum.SSE) {
+ const url = transformDockerUrl(query.url as string);
+
+ const headers = getHttpHeaders(req, transportType);
+
+ console.log(
+ `SSE transport: url=${url}, headers=${JSON.stringify(headers)}`,
+ );
+
+ const transport = new SSEClientTransport(new URL(url), {
+ eventSourceInit: {
+ fetch: (url, init) => fetch(url, { ...init, headers }),
+ },
+ requestInit: {
+ headers,
+ },
+ });
+ await transport.start();
+ return transport;
+ } else if (transportType === McpServerTypeEnum.Enum.STREAMABLE_HTTP) {
+ const headers = getHttpHeaders(req, transportType);
+
+ const transport = new StreamableHTTPClientTransport(
+ new URL(transformDockerUrl(query.url as string)),
+ {
+ requestInit: {
+ headers,
+ },
+ },
+ );
+ await transport.start();
+ return transport;
+ } else {
+ console.error(`Invalid transport type: ${transportType}`);
+ throw new Error("Invalid transport type specified");
+ }
+};
+
+serverRouter.get("/mcp", async (req, res) => {
+ const sessionId = req.headers["mcp-session-id"] as string;
+ console.log(`Received GET message for sessionId ${sessionId}`);
+ try {
+ const transport = webAppTransports.get(
+ sessionId,
+ ) as StreamableHTTPServerTransport;
+ if (!transport) {
+ res.status(404).end("Session not found");
+ return;
+ } else {
+ await transport.handleRequest(req, res);
+ }
+ } catch (error) {
+ console.error("Error in /mcp route:", error);
+ res.status(500).json(error);
+ }
+});
+
+serverRouter.post("/mcp", async (req, res) => {
+ const sessionId = req.headers["mcp-session-id"] as string | undefined;
+ let serverTransport: Transport | undefined;
+ if (!sessionId) {
+ try {
+ console.log("New StreamableHttp connection request");
+ try {
+ serverTransport = await createTransport(req);
+ } catch (error) {
+ if (error instanceof SseError && error.code === 401) {
+ console.error(
+ "Received 401 Unauthorized from MCP server:",
+ error.message,
+ );
+ res.status(401).json(error);
+ return;
+ }
+
+ throw error;
+ }
+
+ console.log("Created StreamableHttp server transport");
+
+ // Generate session ID upfront for better tracking
+ const newSessionId = randomUUID();
+
+ const webAppTransport = new StreamableHTTPServerTransport({
+ sessionIdGenerator: () => newSessionId,
+ onsessioninitialized: (sessionId) => {
+ webAppTransports.set(sessionId, webAppTransport);
+ if (serverTransport) {
+ serverTransports.set(sessionId, serverTransport);
+ }
+ console.log("Client <-> Proxy sessionId: " + sessionId);
+ },
+ });
+ console.log("Created StreamableHttp client transport");
+
+ await webAppTransport.start();
+
+ // Set up proxy connection with error handling
+ try {
+ mcpProxy({
+ transportToClient: webAppTransport,
+ transportToServer: serverTransport,
+ });
+ } catch (error) {
+ console.error(
+ `Error setting up proxy for session ${newSessionId}:`,
+ error,
+ );
+ await cleanupSession(newSessionId, req.query.mcpServerName as string);
+ throw error;
+ }
+
+ // Handle the actual request
+ await (webAppTransport as StreamableHTTPServerTransport).handleRequest(
+ req,
+ res,
+ req.body,
+ );
+ } catch (error) {
+ console.error("Error in /mcp POST route:", error);
+ res.status(500).json(error);
+ }
+ } else {
+ console.log(`Received POST message for sessionId ${sessionId}`);
+ try {
+ const transport = webAppTransports.get(
+ sessionId,
+ ) as StreamableHTTPServerTransport;
+ if (!transport) {
+ res.status(404).end("Transport not found for sessionId " + sessionId);
+ } else {
+ await (transport as StreamableHTTPServerTransport).handleRequest(
+ req,
+ res,
+ );
+ }
+ } catch (error) {
+ console.error("Error in /mcp route:", error);
+ res.status(500).json(error);
+ }
+ }
+});
+
+serverRouter.delete("/mcp", async (req, res) => {
+ const sessionId = req.headers["mcp-session-id"] as string | undefined;
+ const mcpServerName = (req.query.mcpServerName as string) || "Unknown Server";
+ console.log(
+ `Received DELETE message for sessionId ${sessionId}, MCP server: ${mcpServerName}`,
+ );
+
+ if (sessionId) {
+ try {
+ const serverTransport = serverTransports.get(
+ sessionId,
+ ) as StreamableHTTPClientTransport;
+ if (!serverTransport) {
+ res.status(404).end("Transport not found for sessionId " + sessionId);
+ return;
+ }
+
+ // Terminate the session and clean up
+ try {
+ await serverTransport.terminateSession();
+ } catch (error) {
+ console.warn(`Warning: Error terminating session ${sessionId}:`, error);
+ // Continue with cleanup even if termination fails
+ }
+
+ await cleanupSession(sessionId, mcpServerName);
+ console.log(
+ `Session ${sessionId} terminated and cleaned up successfully`,
+ );
+ res.status(200).end();
+ } catch (error) {
+ console.error("Error in /mcp DELETE route:", error);
+ res.status(500).json(error);
+ }
+ } else {
+ res.status(400).end("Missing sessionId");
+ }
+});
+
+serverRouter.get("/stdio", async (req, res) => {
+ try {
+ console.log("New STDIO connection request");
+ let serverTransport: Transport | undefined;
+ try {
+ serverTransport = await createTransport(req);
+ console.log("Created server transport");
+ } catch (error) {
+ if (error instanceof SseError && error.code === 401) {
+ console.error(
+ "Received 401 Unauthorized from MCP server. Authentication failure.",
+ );
+ res.status(401).json(error);
+ return;
+ }
+
+ throw error;
+ }
+
+ const webAppTransport = new SSEServerTransport(
+ "/mcp-proxy/server/message",
+ res,
+ );
+ console.log("Created client transport");
+
+ webAppTransports.set(webAppTransport.sessionId, webAppTransport);
+ serverTransports.set(webAppTransport.sessionId, serverTransport);
+
+ // Handle cleanup when connection closes
+ const handleConnectionClose = () => {
+ const mcpServerName =
+ (req.query.mcpServerName as string) || "Unknown Server";
+ console.log(
+ `Connection closed for session ${webAppTransport.sessionId}, MCP server: ${mcpServerName}`,
+ );
+ cleanupSession(webAppTransport.sessionId, mcpServerName);
+ };
+
+ // Handle various connection termination scenarios
+ res.on("close", handleConnectionClose);
+ res.on("finish", handleConnectionClose);
+ res.on("error", (error) => {
+ console.error(
+ `Response error for SSE session ${webAppTransport.sessionId}:`,
+ error,
+ );
+ handleConnectionClose();
+ });
+
+ await webAppTransport.start();
+
+ const stdinTransport = serverTransport as StdioClientTransport;
+ if (stdinTransport.stderr) {
+ stdinTransport.stderr.on("data", (chunk) => {
+ if (chunk.toString().includes("MODULE_NOT_FOUND")) {
+ webAppTransport.send({
+ jsonrpc: "2.0",
+ method: "notifications/stderr",
+ params: {
+ content: "Command not found, transports removed",
+ },
+ });
+ webAppTransport.close();
+ cleanupSession(webAppTransport.sessionId);
+ console.error("Command not found, transports removed");
+ } else {
+ webAppTransport.send({
+ jsonrpc: "2.0",
+ method: "notifications/stderr",
+ params: {
+ content: chunk.toString(),
+ },
+ });
+ }
+ });
+ }
+
+ mcpProxy({
+ transportToClient: webAppTransport,
+ transportToServer: serverTransport,
+ });
+ } catch (error) {
+ console.error("Error in /stdio route:", error);
+ res.status(500).json(error);
+ }
+});
+
+serverRouter.get("/sse", async (req, res) => {
+ try {
+ console.log(
+ "New SSE connection request. NOTE: The sse transport is deprecated and has been replaced by StreamableHttp",
+ );
+ let serverTransport: Transport | undefined;
+ try {
+ serverTransport = await createTransport(req);
+ } catch (error) {
+ if (error instanceof SseError && error.code === 401) {
+ console.error(
+ "Received 401 Unauthorized from MCP server. Authentication failure.",
+ );
+ res.status(401).json(error);
+ return;
+ } else if (error instanceof SseError && error.code === 404) {
+ console.error(
+ "Received 404 not found from MCP server. Does the MCP server support SSE?",
+ );
+ res.status(404).json(error);
+ return;
+ } else if (JSON.stringify(error).includes("ECONNREFUSED")) {
+ console.error("Connection refused. Is the MCP server running?");
+ res.status(500).json(error);
+ } else {
+ throw error;
+ }
+ }
+
+ if (serverTransport) {
+ const webAppTransport = new SSEServerTransport(
+ "/mcp-proxy/server/message",
+ res,
+ );
+ webAppTransports.set(webAppTransport.sessionId, webAppTransport);
+ console.log("Created client transport");
+ if (serverTransport) {
+ serverTransports.set(webAppTransport.sessionId, serverTransport);
+ }
+ console.log("Created server transport");
+
+ // Handle cleanup when connection closes
+ const handleConnectionClose = () => {
+ const mcpServerName =
+ (req.query.mcpServerName as string) || "Unknown Server";
+ console.log(
+ `Connection closed for session ${webAppTransport.sessionId}, MCP server: ${mcpServerName}`,
+ );
+ cleanupSession(webAppTransport.sessionId, mcpServerName);
+ };
+
+ // Handle various connection termination scenarios
+ res.on("close", handleConnectionClose);
+ res.on("finish", handleConnectionClose);
+ res.on("error", (error) => {
+ console.error(
+ `Response error for STDIO session ${webAppTransport.sessionId}:`,
+ error,
+ );
+ handleConnectionClose();
+ });
+
+ await webAppTransport.start();
+
+ mcpProxy({
+ transportToClient: webAppTransport,
+ transportToServer: serverTransport,
+ });
+ }
+ } catch (error) {
+ console.error("Error in /sse route:", error);
+ res.status(500).json(error);
+ }
+});
+
+serverRouter.post("/message", async (req, res) => {
+ try {
+ const sessionId = req.query.sessionId;
+ console.log(`Received POST message for sessionId ${sessionId}`);
+
+ const transport = webAppTransports.get(
+ sessionId as string,
+ ) as SSEServerTransport;
+ if (!transport) {
+ res.status(404).end("Session not found");
+ return;
+ }
+ await transport.handlePostMessage(req, res);
+ } catch (error) {
+ console.error("Error in /message route:", error);
+ res.status(500).json(error);
+ }
+});
+
+serverRouter.get("/health", (req, res) => {
+ res.json({
+ status: "ok",
+ });
+});
+
+export default serverRouter;
diff --git a/apps/backend/src/routers/public-metamcp.ts b/apps/backend/src/routers/public-metamcp.ts
new file mode 100644
index 00000000..1e6c8006
--- /dev/null
+++ b/apps/backend/src/routers/public-metamcp.ts
@@ -0,0 +1,68 @@
+import cors from "cors";
+import express from "express";
+
+import { endpointsRepository } from "../db/repositories/endpoints.repo";
+import sseRouter from "./public-metamcp/sse";
+import streamableHttpRouter from "./public-metamcp/streamable-http";
+
+const publicEndpointsRouter = express.Router();
+
+// Enable CORS for all public endpoint routes
+publicEndpointsRouter.use(
+ cors({
+ origin: true, // Allow all origins
+ credentials: true,
+ methods: ["GET", "POST", "DELETE", "OPTIONS"],
+ allowedHeaders: [
+ "Content-Type",
+ "mcp-session-id",
+ "Authorization",
+ "X-API-Key",
+ ],
+ }),
+);
+
+// Use StreamableHTTP router for /mcp routes
+publicEndpointsRouter.use(streamableHttpRouter);
+
+// Use SSE router for /sse and /message routes
+publicEndpointsRouter.use(sseRouter);
+
+// Health check endpoint
+publicEndpointsRouter.get("/health", (req, res) => {
+ res.json({
+ status: "ok",
+ service: "public-endpoints",
+ });
+});
+
+// List all available public endpoints
+publicEndpointsRouter.get("/", async (req, res) => {
+ try {
+ const endpoints = await endpointsRepository.findAllWithNamespaces();
+ const publicEndpoints = endpoints.map((endpoint) => ({
+ name: endpoint.name,
+ description: endpoint.description,
+ namespace: endpoint.namespace.name,
+ endpoints: {
+ mcp: `/metamcp/${endpoint.name}/mcp`,
+ sse: `/metamcp/${endpoint.name}/sse`,
+ },
+ }));
+
+ res.json({
+ service: "public-endpoints",
+ version: "1.0.0",
+ description: "Public MetaMCP endpoints",
+ endpoints: publicEndpoints,
+ });
+ } catch (error) {
+ console.error("Error listing public endpoints:", error);
+ res.status(500).json({
+ error: "Internal server error",
+ message: "Failed to list endpoints",
+ });
+ }
+});
+
+export default publicEndpointsRouter;
diff --git a/apps/backend/src/routers/public-metamcp/sse.ts b/apps/backend/src/routers/public-metamcp/sse.ts
new file mode 100644
index 00000000..a4e01c14
--- /dev/null
+++ b/apps/backend/src/routers/public-metamcp/sse.ts
@@ -0,0 +1,239 @@
+import { SSEServerTransport } from "@modelcontextprotocol/sdk/server/sse.js";
+import { Transport } from "@modelcontextprotocol/sdk/shared/transport.js";
+import { DatabaseEndpoint } from "@repo/zod-types";
+import express from "express";
+
+import { ApiKeysRepository } from "../../db/repositories/api-keys.repo";
+import { endpointsRepository } from "../../db/repositories/endpoints.repo";
+import { createServer } from "../../lib/metamcp/index";
+import { cleanupSessionConnections } from "../../lib/metamcp/sessions";
+
+// Extend Express Request interface for our custom properties
+interface AuthenticatedRequest extends express.Request {
+ namespaceUuid: string;
+ endpointName: string;
+ endpoint: DatabaseEndpoint;
+ apiKeyUserId?: string;
+ apiKeyUuid?: string;
+}
+
+const sseRouter = express.Router();
+const apiKeysRepository = new ApiKeysRepository();
+
+const webAppTransports: Map = new Map(); // Web app transports by sessionId
+const metamcpServers: Map<
+ string,
+ { server: any; cleanup: () => Promise }
+> = new Map(); // MetaMCP servers by sessionId
+
+// Create a MetaMCP server instance
+const createMetaMcpServer = async (
+ namespaceUuid: string,
+ sessionId: string,
+) => {
+ const { server, cleanup } = await createServer(namespaceUuid, sessionId);
+ return { server, cleanup };
+};
+
+// Cleanup function for a specific session
+const cleanupSession = async (sessionId: string) => {
+ console.log(`Cleaning up SSE session ${sessionId}`);
+
+ // Clean up transport
+ const transport = webAppTransports.get(sessionId);
+ if (transport) {
+ webAppTransports.delete(sessionId);
+ await transport.close();
+ }
+
+ // Clean up server instance
+ const serverInstance = metamcpServers.get(sessionId);
+ if (serverInstance) {
+ metamcpServers.delete(sessionId);
+ await serverInstance.cleanup();
+ }
+
+ // Clean up session connections
+ await cleanupSessionConnections(sessionId);
+};
+
+// Middleware to lookup endpoint by name and add namespace info to request
+const lookupEndpoint = async (
+ req: express.Request,
+ res: express.Response,
+ next: express.NextFunction,
+) => {
+ const endpointName = req.params.endpoint_name;
+
+ try {
+ const endpoint = await endpointsRepository.findByName(endpointName);
+ if (!endpoint) {
+ return res.status(404).json({
+ error: "Endpoint not found",
+ message: `No endpoint found with name: ${endpointName}`,
+ });
+ }
+
+ // Add the endpoint info to the request for use in handlers
+ const authReq = req as AuthenticatedRequest;
+ authReq.namespaceUuid = endpoint.namespace_uuid;
+ authReq.endpointName = endpointName;
+ authReq.endpoint = endpoint;
+
+ next();
+ } catch (error) {
+ console.error("Error looking up endpoint:", error);
+ return res.status(500).json({
+ error: "Internal server error",
+ message: "Failed to lookup endpoint",
+ });
+ }
+};
+
+// API Key authentication middleware
+const authenticateApiKey = async (
+ req: express.Request,
+ res: express.Response,
+ next: express.NextFunction,
+) => {
+ const authReq = req as AuthenticatedRequest;
+ const endpoint = authReq.endpoint;
+
+ // Skip authentication if not enabled for this endpoint
+ if (!endpoint?.enable_api_key_auth) {
+ return next();
+ }
+
+ try {
+ let apiKey: string | undefined;
+
+ // Always check headers first (Authorization: Bearer or X-API-Key: )
+ const authHeader = req.headers.authorization;
+ if (authHeader && authHeader.startsWith("Bearer ")) {
+ apiKey = authHeader.substring(7);
+ } else {
+ apiKey = req.headers["x-api-key"] as string;
+ }
+
+ // If no API key in headers and query param auth is enabled, check query parameters
+ if (!apiKey && endpoint.use_query_param_auth) {
+ apiKey = (req.query.api_key as string) || (req.query.apikey as string);
+ }
+
+ if (!apiKey) {
+ const authMethods = [
+ "Authorization header (Bearer token)",
+ "X-API-Key header",
+ ];
+ if (endpoint.use_query_param_auth) {
+ authMethods.push("query parameter (api_key or apikey)");
+ }
+
+ return res.status(401).json({
+ error: "Authentication required",
+ message: `API key required in one of: ${authMethods.join(", ")}`,
+ });
+ }
+
+ // Validate the API key
+ const validation = await apiKeysRepository.validateApiKey(apiKey);
+ if (!validation.valid) {
+ return res.status(401).json({
+ error: "Invalid API key",
+ message: "The provided API key is invalid or inactive",
+ });
+ }
+
+ // Add user info to request for potential logging/auditing
+ authReq.apiKeyUserId = validation.user_id;
+ authReq.apiKeyUuid = validation.key_uuid;
+
+ next();
+ } catch (error) {
+ console.error("Error validating API key:", error);
+ return res.status(500).json({
+ error: "Internal server error",
+ message: "Failed to validate API key",
+ });
+ }
+};
+
+sseRouter.get(
+ "/:endpoint_name/sse",
+ lookupEndpoint,
+ authenticateApiKey,
+ async (req, res) => {
+ const authReq = req as AuthenticatedRequest;
+ const { namespaceUuid, endpointName } = authReq;
+
+ try {
+ console.log(
+ `New public endpoint SSE connection request for ${endpointName} -> namespace ${namespaceUuid}`,
+ );
+
+ const webAppTransport = new SSEServerTransport(
+ `/metamcp/${endpointName}/message`,
+ res,
+ );
+ console.log("Created public endpoint SSE transport");
+
+ const sessionId = webAppTransport.sessionId;
+
+ // Create MetaMCP server instance with sessionId
+ const mcpServerInstance = await createMetaMcpServer(
+ namespaceUuid,
+ sessionId,
+ );
+ console.log(
+ `Created MetaMCP server instance for public endpoint session ${sessionId}`,
+ );
+
+ webAppTransports.set(sessionId, webAppTransport);
+ metamcpServers.set(sessionId, mcpServerInstance);
+
+ // Handle cleanup when connection closes
+ res.on("close", async () => {
+ console.log(
+ `Public endpoint SSE connection closed for session ${sessionId}`,
+ );
+ await cleanupSession(sessionId);
+ });
+
+ await mcpServerInstance.server.connect(webAppTransport);
+ } catch (error) {
+ console.error("Error in public endpoint /sse route:", error);
+ res.status(500).json(error);
+ }
+ },
+);
+
+sseRouter.post(
+ "/:endpoint_name/message",
+ lookupEndpoint,
+ authenticateApiKey,
+ async (req, res) => {
+ const authReq = req as AuthenticatedRequest;
+ const { namespaceUuid, endpointName } = authReq;
+
+ try {
+ const sessionId = req.query.sessionId;
+ console.log(
+ `Received POST message for public endpoint ${endpointName} -> namespace ${namespaceUuid} sessionId ${sessionId}`,
+ );
+
+ const transport = webAppTransports.get(
+ sessionId as string,
+ ) as SSEServerTransport;
+ if (!transport) {
+ res.status(404).end("Session not found");
+ return;
+ }
+ await transport.handlePostMessage(req, res);
+ } catch (error) {
+ console.error("Error in public endpoint /message route:", error);
+ res.status(500).json(error);
+ }
+ },
+);
+
+export default sseRouter;
diff --git a/apps/backend/src/routers/public-metamcp/streamable-http.ts b/apps/backend/src/routers/public-metamcp/streamable-http.ts
new file mode 100644
index 00000000..4893ff78
--- /dev/null
+++ b/apps/backend/src/routers/public-metamcp/streamable-http.ts
@@ -0,0 +1,358 @@
+import { randomUUID } from "node:crypto";
+
+import { StreamableHTTPServerTransport } from "@modelcontextprotocol/sdk/server/streamableHttp.js";
+import { DatabaseEndpoint } from "@repo/zod-types";
+import express from "express";
+
+import { ApiKeysRepository } from "../../db/repositories/api-keys.repo";
+import { endpointsRepository } from "../../db/repositories/endpoints.repo";
+import { createServer } from "../../lib/metamcp/index";
+import { cleanupSessionConnections } from "../../lib/metamcp/sessions";
+
+// Extend Express Request interface for our custom properties
+interface AuthenticatedRequest extends express.Request {
+ namespaceUuid: string;
+ endpointName: string;
+ endpoint: DatabaseEndpoint;
+ apiKeyUserId?: string;
+ apiKeyUuid?: string;
+}
+
+const streamableHttpRouter = express.Router();
+const apiKeysRepository = new ApiKeysRepository();
+
+const transports: Record = {}; // Web app transports by sessionId
+const metamcpServers: Record<
+ string,
+ {
+ server: Awaited>["server"];
+ cleanup: () => Promise;
+ }
+> = {}; // MetaMCP servers by endpoint name
+
+// Track active sessions per endpoint for cleanup purposes
+const endpointSessionCounts: Record = {};
+
+// Create a MetaMCP server instance
+const createMetaMcpServer = async (
+ namespaceUuid: string,
+ sessionId: string,
+ endpointName: string,
+) => {
+ // Check if we already have a server for this endpoint
+ if (metamcpServers[endpointName]) {
+ console.log(
+ `Reusing existing MetaMCP server for endpoint: ${endpointName}`,
+ );
+ return metamcpServers[endpointName];
+ }
+
+ const { server, cleanup } = await createServer(namespaceUuid, sessionId);
+ const serverInstance = { server, cleanup };
+
+ // Cache by endpoint name
+ metamcpServers[endpointName] = serverInstance;
+ console.log(
+ `Created and cached new MetaMCP server for endpoint: ${endpointName}`,
+ );
+
+ return serverInstance;
+};
+
+// Cleanup endpoint server if no more sessions are using it
+const cleanupEndpointIfUnused = async (endpointName: string) => {
+ const sessionCount = endpointSessionCounts[endpointName] || 0;
+ if (sessionCount <= 0) {
+ const serverInstance = metamcpServers[endpointName];
+ if (serverInstance) {
+ console.log(
+ `Cleaning up unused MetaMCP server for endpoint: ${endpointName}`,
+ );
+ await serverInstance.cleanup();
+ delete metamcpServers[endpointName];
+ delete endpointSessionCounts[endpointName];
+ }
+ }
+};
+
+// Cleanup function for a specific session
+const cleanupSession = async (sessionId: string, endpointName: string) => {
+ console.log(`Cleaning up StreamableHTTP session ${sessionId}`);
+
+ // Clean up transport
+ const transport = transports[sessionId];
+ if (transport) {
+ delete transports[sessionId];
+ await transport.close();
+ }
+
+ // Decrement session count for this endpoint
+ if (endpointSessionCounts[endpointName] > 0) {
+ endpointSessionCounts[endpointName]--;
+ }
+
+ // Clean up session connections (but keep the server instance cached by endpoint)
+ await cleanupSessionConnections(sessionId);
+
+ // Cleanup endpoint server if no more sessions are using it
+ await cleanupEndpointIfUnused(endpointName);
+};
+
+// Middleware to lookup endpoint by name and add namespace info to request
+const lookupEndpoint = async (
+ req: express.Request,
+ res: express.Response,
+ next: express.NextFunction,
+) => {
+ const endpointName = req.params.endpoint_name;
+
+ try {
+ const endpoint = await endpointsRepository.findByName(endpointName);
+ if (!endpoint) {
+ return res.status(404).json({
+ error: "Endpoint not found",
+ message: `No endpoint found with name: ${endpointName}`,
+ });
+ }
+
+ // Add the endpoint info to the request for use in handlers
+ const authReq = req as AuthenticatedRequest;
+ authReq.namespaceUuid = endpoint.namespace_uuid;
+ authReq.endpointName = endpointName;
+ authReq.endpoint = endpoint;
+
+ next();
+ } catch (error) {
+ console.error("Error looking up endpoint:", error);
+ return res.status(500).json({
+ error: "Internal server error",
+ message: "Failed to lookup endpoint",
+ });
+ }
+};
+
+// API Key authentication middleware
+const authenticateApiKey = async (
+ req: express.Request,
+ res: express.Response,
+ next: express.NextFunction,
+) => {
+ const authReq = req as AuthenticatedRequest;
+ const endpoint = authReq.endpoint;
+
+ // Skip authentication if not enabled for this endpoint
+ if (!endpoint?.enable_api_key_auth) {
+ return next();
+ }
+
+ try {
+ let apiKey: string | undefined;
+
+ // Always check headers first (Authorization: Bearer or X-API-Key: )
+ const authHeader = req.headers.authorization;
+ if (authHeader && authHeader.startsWith("Bearer ")) {
+ apiKey = authHeader.substring(7);
+ } else {
+ apiKey = req.headers["x-api-key"] as string;
+ }
+
+ // If no API key in headers and query param auth is enabled, check query parameters
+ if (!apiKey && endpoint.use_query_param_auth) {
+ apiKey = (req.query.api_key as string) || (req.query.apikey as string);
+ }
+
+ if (!apiKey) {
+ const authMethods = [
+ "Authorization header (Bearer token)",
+ "X-API-Key header",
+ ];
+ if (endpoint.use_query_param_auth) {
+ authMethods.push("query parameter (api_key or apikey)");
+ }
+
+ return res.status(401).json({
+ error: "Authentication required",
+ message: `API key required in one of: ${authMethods.join(", ")}`,
+ });
+ }
+
+ // Validate the API key
+ const validation = await apiKeysRepository.validateApiKey(apiKey);
+ if (!validation.valid) {
+ return res.status(401).json({
+ error: "Invalid API key",
+ message: "The provided API key is invalid or inactive",
+ });
+ }
+
+ // Add user info to request for potential logging/auditing
+ authReq.apiKeyUserId = validation.user_id;
+ authReq.apiKeyUuid = validation.key_uuid;
+
+ next();
+ } catch (error) {
+ console.error("Error validating API key:", error);
+ return res.status(500).json({
+ error: "Internal server error",
+ message: "Failed to validate API key",
+ });
+ }
+};
+
+streamableHttpRouter.get(
+ "/:endpoint_name/mcp",
+ lookupEndpoint,
+ authenticateApiKey,
+ async (req, res) => {
+ const authReq = req as AuthenticatedRequest;
+ const { namespaceUuid, endpointName } = authReq;
+ const sessionId = req.headers["mcp-session-id"] as string;
+
+ console.log(
+ `Received GET message for public endpoint ${endpointName} -> namespace ${namespaceUuid} sessionId ${sessionId}`,
+ );
+
+ try {
+ const transport = transports[sessionId];
+ if (!transport) {
+ res.status(404).end("Session not found");
+ return;
+ } else {
+ await transport.handleRequest(req, res);
+ }
+ } catch (error) {
+ console.error("Error in public endpoint /mcp route:", error);
+ res.status(500).json(error);
+ }
+ },
+);
+
+streamableHttpRouter.post(
+ "/:endpoint_name/mcp",
+ lookupEndpoint,
+ authenticateApiKey,
+ async (req, res) => {
+ const authReq = req as AuthenticatedRequest;
+ const { namespaceUuid, endpointName } = authReq;
+ const sessionId = req.headers["mcp-session-id"] as string | undefined;
+
+ if (!sessionId) {
+ try {
+ console.log(
+ `New public endpoint StreamableHttp connection request for ${endpointName} -> namespace ${namespaceUuid}`,
+ );
+
+ // Generate session ID upfront
+ const newSessionId = randomUUID();
+
+ // Get or create MetaMCP server instance for this endpoint
+ const mcpServerInstance = await createMetaMcpServer(
+ namespaceUuid,
+ newSessionId,
+ endpointName,
+ );
+ if (!mcpServerInstance) {
+ throw new Error("Failed to create MetaMCP server instance");
+ }
+
+ // Increment session count for this endpoint
+ endpointSessionCounts[endpointName] =
+ (endpointSessionCounts[endpointName] || 0) + 1;
+
+ console.log(
+ `Using MetaMCP server instance for public endpoint session ${newSessionId} (endpoint: ${endpointName}, sessions: ${endpointSessionCounts[endpointName]})`,
+ );
+
+ // Create transport with the predetermined session ID
+ const transport = new StreamableHTTPServerTransport({
+ sessionIdGenerator: () => newSessionId,
+ onsessioninitialized: async (sessionId) => {
+ try {
+ console.log(`Session initialized for sessionId: ${sessionId}`);
+ } catch (error) {
+ console.error(
+ `Error initializing public endpoint session ${sessionId}:`,
+ error,
+ );
+ }
+ },
+ });
+
+ // Note: Cleanup is handled explicitly via DELETE requests
+ // StreamableHTTP is designed to persist across multiple requests
+ console.log("Created public endpoint StreamableHttp transport");
+
+ // Store transport reference
+ transports[newSessionId] = transport;
+
+ console.log(
+ `Public Endpoint Client <-> Proxy sessionId: ${newSessionId} for endpoint ${endpointName} -> namespace ${namespaceUuid}`,
+ );
+ console.log(`Stored transport for sessionId: ${newSessionId}`);
+ console.log(`Current stored sessions:`, Object.keys(transports));
+
+ // Connect the server to the transport before handling the request
+ await mcpServerInstance.server.connect(transport);
+
+ // Now handle the request - server is guaranteed to be ready
+ await transport.handleRequest(req, res, req.body);
+ } catch (error) {
+ console.error("Error in public endpoint /mcp POST route:", error);
+ res.status(500).json(error);
+ }
+ } else {
+ console.log(
+ `Received POST message for public endpoint ${endpointName} -> namespace ${namespaceUuid} sessionId ${sessionId}`,
+ );
+ console.log(`Available session IDs:`, Object.keys(transports));
+ console.log(`Looking for sessionId: ${sessionId}`);
+ try {
+ const transport = transports[sessionId];
+ if (!transport) {
+ console.error(
+ `Transport not found for sessionId ${sessionId}. Available sessions:`,
+ Object.keys(transports),
+ );
+ res.status(404).end("Transport not found for sessionId " + sessionId);
+ } else {
+ await transport.handleRequest(req, res, req.body);
+ }
+ } catch (error) {
+ console.error("Error in public endpoint /mcp route:", error);
+ res.status(500).json(error);
+ }
+ }
+ },
+);
+
+streamableHttpRouter.delete(
+ "/:endpoint_name/mcp",
+ lookupEndpoint,
+ authenticateApiKey,
+ async (req, res) => {
+ const authReq = req as AuthenticatedRequest;
+ const { namespaceUuid, endpointName } = authReq;
+ const sessionId = req.headers["mcp-session-id"] as string | undefined;
+
+ console.log(
+ `Received DELETE message for public endpoint ${endpointName} -> namespace ${namespaceUuid} sessionId ${sessionId}`,
+ );
+
+ if (sessionId) {
+ try {
+ await cleanupSession(sessionId, endpointName);
+ console.log(
+ `Public endpoint session ${sessionId} cleaned up successfully`,
+ );
+ res.status(200).end();
+ } catch (error) {
+ console.error("Error in public endpoint /mcp DELETE route:", error);
+ res.status(500).json(error);
+ }
+ } else {
+ res.status(400).end("Missing sessionId");
+ }
+ },
+);
+
+export default streamableHttpRouter;
diff --git a/apps/backend/src/routers/trpc.ts b/apps/backend/src/routers/trpc.ts
new file mode 100644
index 00000000..c40c927e
--- /dev/null
+++ b/apps/backend/src/routers/trpc.ts
@@ -0,0 +1,55 @@
+import { createAppRouter } from "@repo/trpc";
+import * as trpcExpress from "@trpc/server/adapters/express";
+import cors from "cors";
+import express from "express";
+import helmet from "helmet";
+
+import { createContext } from "../trpc";
+import { apiKeysImplementations } from "../trpc/api-keys.impl";
+import { configImplementations } from "../trpc/config.impl";
+import { endpointsImplementations } from "../trpc/endpoints.impl";
+import { mcpServersImplementations } from "../trpc/mcp-servers.impl";
+import { namespacesImplementations } from "../trpc/namespaces.impl";
+import { oauthImplementations } from "../trpc/oauth.impl";
+import { toolsImplementations } from "../trpc/tools.impl";
+
+// Create the app router with implementations
+const appRouter = createAppRouter({
+ frontend: {
+ mcpServers: mcpServersImplementations,
+ namespaces: namespacesImplementations,
+ endpoints: endpointsImplementations,
+ oauth: oauthImplementations,
+ tools: toolsImplementations,
+ apiKeys: apiKeysImplementations,
+ config: configImplementations,
+ },
+});
+
+// Export the router type for client usage
+export type AppRouter = typeof appRouter;
+
+// Create Express router
+const trpcRouter = express.Router();
+
+// Apply security middleware for frontend communication
+trpcRouter.use(helmet());
+trpcRouter.use(
+ cors({
+ origin: process.env.FRONTEND_URL || "http://localhost:12008",
+ credentials: true,
+ }),
+);
+
+// Better-auth integration now handled in tRPC context
+
+// Mount tRPC handler
+trpcRouter.use(
+ "/frontend",
+ trpcExpress.createExpressMiddleware({
+ router: appRouter,
+ createContext,
+ }),
+);
+
+export default trpcRouter;
diff --git a/apps/backend/src/trpc.ts b/apps/backend/src/trpc.ts
new file mode 100644
index 00000000..c396efac
--- /dev/null
+++ b/apps/backend/src/trpc.ts
@@ -0,0 +1,94 @@
+import type { BaseContext } from "@repo/trpc";
+import { initTRPC, TRPCError } from "@trpc/server";
+import type { Request, Response } from "express";
+
+import { auth, type Session, type User } from "./auth";
+
+// Extend the base context with Express request/response and auth data
+export interface Context extends BaseContext {
+ req: Request;
+ res: Response;
+ user?: User;
+ session?: Session;
+}
+
+// Create context from Express request/response with auth
+export const createContext = async ({
+ req,
+ res,
+}: {
+ req: Request;
+ res: Response;
+}): Promise => {
+ let user: User | undefined;
+ let session: Session | undefined;
+
+ try {
+ // Check if we have cookies in the request
+ if (req.headers.cookie) {
+ // Create a proper Request object for better-auth
+ const sessionUrl = new URL(
+ "/api/auth/get-session",
+ `http://${req.headers.host || "localhost:12009"}`,
+ );
+
+ const headers = new Headers();
+ headers.set("cookie", req.headers.cookie);
+
+ const sessionRequest = new Request(sessionUrl.toString(), {
+ method: "GET",
+ headers,
+ });
+
+ const sessionResponse = await auth.handler(sessionRequest);
+
+ if (sessionResponse.ok) {
+ const sessionData = (await sessionResponse.json()) as {
+ user?: User;
+ session?: Session;
+ };
+
+ if (sessionData?.user && sessionData?.session) {
+ user = sessionData.user;
+ session = sessionData.session;
+ }
+ }
+ }
+ } catch (error) {
+ // Log error but don't throw - we want to allow unauthenticated requests
+ console.error("Error getting session in tRPC context:", error);
+ }
+
+ return {
+ req,
+ res,
+ user,
+ session,
+ };
+};
+
+// Initialize tRPC with extended context
+const t = initTRPC.context().create();
+
+// Export router and procedure helpers
+export const router = t.router;
+export const publicProcedure = t.procedure;
+
+// Create a protected procedure that requires authentication
+export const protectedProcedure = t.procedure.use(({ ctx, next }) => {
+ if (!ctx.user || !ctx.session) {
+ throw new TRPCError({
+ code: "UNAUTHORIZED",
+ message: "You must be logged in to access this resource",
+ });
+ }
+
+ return next({
+ ctx: {
+ ...ctx,
+ // Override types to indicate user and session are guaranteed to exist
+ user: ctx.user,
+ session: ctx.session,
+ } as Context & { user: User; session: Session },
+ });
+});
diff --git a/apps/backend/src/trpc/api-keys.impl.ts b/apps/backend/src/trpc/api-keys.impl.ts
new file mode 100644
index 00000000..99f00f8e
--- /dev/null
+++ b/apps/backend/src/trpc/api-keys.impl.ts
@@ -0,0 +1,105 @@
+import {
+ CreateApiKeyRequestSchema,
+ CreateApiKeyResponseSchema,
+ DeleteApiKeyRequestSchema,
+ DeleteApiKeyResponseSchema,
+ ListApiKeysResponseSchema,
+ UpdateApiKeyRequestSchema,
+ UpdateApiKeyResponseSchema,
+ ValidateApiKeyRequestSchema,
+ ValidateApiKeyResponseSchema,
+} from "@repo/zod-types";
+import { z } from "zod";
+
+import { ApiKeysRepository } from "../db/repositories";
+import { ApiKeysSerializer } from "../db/serializers";
+
+const apiKeysRepository = new ApiKeysRepository();
+
+export const apiKeysImplementations = {
+ create: async (
+ input: z.infer,
+ userId: string,
+ ): Promise> => {
+ try {
+ const result = await apiKeysRepository.create({
+ name: input.name,
+ user_id: userId,
+ is_active: true,
+ });
+
+ return ApiKeysSerializer.serializeCreateApiKeyResponse(result);
+ } catch (error) {
+ console.error("Error creating API key:", error);
+ throw new Error(
+ error instanceof Error ? error.message : "Internal server error",
+ );
+ }
+ },
+
+ list: async (
+ userId: string,
+ ): Promise> => {
+ try {
+ const apiKeys = await apiKeysRepository.findByUserId(userId);
+
+ return {
+ apiKeys: ApiKeysSerializer.serializeApiKeyList(apiKeys),
+ };
+ } catch (error) {
+ console.error("Error fetching API keys:", error);
+ throw new Error("Failed to fetch API keys");
+ }
+ },
+
+ update: async (
+ input: z.infer,
+ userId: string,
+ ): Promise> => {
+ try {
+ const result = await apiKeysRepository.update(input.uuid, userId, {
+ name: input.name,
+ is_active: input.is_active,
+ });
+
+ return ApiKeysSerializer.serializeApiKey(result);
+ } catch (error) {
+ console.error("Error updating API key:", error);
+ throw new Error(
+ error instanceof Error ? error.message : "Internal server error",
+ );
+ }
+ },
+
+ delete: async (
+ input: z.infer,
+ userId: string,
+ ): Promise> => {
+ try {
+ await apiKeysRepository.delete(input.uuid, userId);
+
+ return {
+ success: true,
+ message: "API key deleted successfully",
+ };
+ } catch (error) {
+ console.error("Error deleting API key:", error);
+ return {
+ success: false,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+
+ validate: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ return await apiKeysRepository.validateApiKey(input.key);
+ } catch (error) {
+ console.error("Error validating API key:", error);
+ return { valid: false };
+ }
+ },
+};
diff --git a/apps/backend/src/trpc/config.impl.ts b/apps/backend/src/trpc/config.impl.ts
new file mode 100644
index 00000000..95a65783
--- /dev/null
+++ b/apps/backend/src/trpc/config.impl.ts
@@ -0,0 +1,33 @@
+import { configService } from "../lib/config.service";
+
+export const configImplementations = {
+ getSignupDisabled: async (): Promise => {
+ return await configService.isSignupDisabled();
+ },
+
+ setSignupDisabled: async (input: {
+ disabled: boolean;
+ }): Promise<{ success: boolean }> => {
+ await configService.setSignupDisabled(input.disabled);
+ return { success: true };
+ },
+
+ getAllConfigs: async (): Promise<
+ Array<{ id: string; value: string; description?: string | null }>
+ > => {
+ return await configService.getAllConfigs();
+ },
+
+ setConfig: async (input: {
+ key: string;
+ value: string;
+ description?: string;
+ }): Promise<{ success: boolean }> => {
+ await configService.setConfig(
+ input.key as any,
+ input.value,
+ input.description,
+ );
+ return { success: true };
+ },
+};
diff --git a/apps/backend/src/trpc/endpoints.impl.ts b/apps/backend/src/trpc/endpoints.impl.ts
new file mode 100644
index 00000000..7fe6e8ac
--- /dev/null
+++ b/apps/backend/src/trpc/endpoints.impl.ts
@@ -0,0 +1,222 @@
+import {
+ CreateEndpointRequestSchema,
+ CreateEndpointResponseSchema,
+ DeleteEndpointResponseSchema,
+ GetEndpointResponseSchema,
+ ListEndpointsResponseSchema,
+ UpdateEndpointRequestSchema,
+ UpdateEndpointResponseSchema,
+} from "@repo/zod-types";
+import { z } from "zod";
+
+import {
+ ApiKeysRepository,
+ endpointsRepository,
+ mcpServersRepository,
+} from "../db/repositories";
+import { EndpointsSerializer } from "../db/serializers";
+
+const apiKeysRepository = new ApiKeysRepository();
+
+export const endpointsImplementations = {
+ create: async (
+ input: z.infer,
+ userId: string,
+ ): Promise> => {
+ try {
+ // Check if endpoint name already exists
+ const existingEndpoint = await endpointsRepository.findByName(input.name);
+ if (existingEndpoint) {
+ return {
+ success: false as const,
+ message: "Endpoint name already exists",
+ };
+ }
+
+ const result = await endpointsRepository.create({
+ name: input.name,
+ description: input.description,
+ namespace_uuid: input.namespaceUuid,
+ enable_api_key_auth: input.enableApiKeyAuth ?? true,
+ use_query_param_auth: input.useQueryParamAuth ?? false,
+ });
+
+ // Create MCP server if requested
+ if (input.createMcpServer) {
+ try {
+ const mcpServerName = `${input.name}-endpoint`;
+ const mcpServerDescription = `Auto-generated MCP server for endpoint "${input.name}"`;
+
+ // Use environment variable for base URL, fallback to localhost if not set
+ const baseUrl =
+ process.env.NEXT_PUBLIC_APP_URL || "http://localhost:12008";
+ const endpointUrl = `${baseUrl}/metamcp/${input.name}/mcp`;
+
+ // Get or create API key for bearer token
+ let bearerToken = "";
+ try {
+ const userApiKeys = await apiKeysRepository.findByUserId(userId);
+ const activeApiKey = userApiKeys.find((key) => key.is_active);
+
+ if (activeApiKey) {
+ bearerToken = activeApiKey.key;
+ } else {
+ // Create a new API key if none exists
+ const newApiKey = await apiKeysRepository.create({
+ name: "Auto-generated for MCP Server",
+ user_id: userId,
+ is_active: true,
+ });
+ bearerToken = newApiKey.key;
+ }
+ } catch (apiKeyError) {
+ console.error("Error getting API key for MCP server:", apiKeyError);
+ // Continue without bearer token if API key operation fails
+ }
+
+ await mcpServersRepository.create({
+ name: mcpServerName,
+ description: mcpServerDescription,
+ type: "STREAMABLE_HTTP",
+ url: endpointUrl,
+ bearerToken: bearerToken,
+ command: "",
+ args: [],
+ env: {},
+ });
+ } catch (mcpError) {
+ console.error("Error creating MCP server:", mcpError);
+ // Don't fail the endpoint creation if MCP server creation fails
+ // Just log the error and continue
+ }
+ }
+
+ return {
+ success: true as const,
+ data: EndpointsSerializer.serializeEndpoint(result),
+ message: "Endpoint created successfully",
+ };
+ } catch (error) {
+ console.error("Error creating endpoint:", error);
+ return {
+ success: false as const,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+
+ list: async (): Promise> => {
+ try {
+ const endpoints = await endpointsRepository.findAllWithNamespaces();
+
+ return {
+ success: true as const,
+ data: EndpointsSerializer.serializeEndpointWithNamespaceList(endpoints),
+ message: "Endpoints retrieved successfully",
+ };
+ } catch (error) {
+ console.error("Error fetching endpoints:", error);
+ return {
+ success: false as const,
+ data: [],
+ message: "Failed to fetch endpoints",
+ };
+ }
+ },
+
+ get: async (input: {
+ uuid: string;
+ }): Promise> => {
+ try {
+ const endpoint = await endpointsRepository.findByUuidWithNamespace(
+ input.uuid,
+ );
+
+ if (!endpoint) {
+ return {
+ success: false as const,
+ message: "Endpoint not found",
+ };
+ }
+
+ return {
+ success: true as const,
+ data: EndpointsSerializer.serializeEndpointWithNamespace(endpoint),
+ message: "Endpoint retrieved successfully",
+ };
+ } catch (error) {
+ console.error("Error fetching endpoint:", error);
+ return {
+ success: false as const,
+ message: "Failed to fetch endpoint",
+ };
+ }
+ },
+
+ delete: async (input: {
+ uuid: string;
+ }): Promise> => {
+ try {
+ const deletedEndpoint = await endpointsRepository.deleteByUuid(
+ input.uuid,
+ );
+
+ if (!deletedEndpoint) {
+ return {
+ success: false as const,
+ message: "Endpoint not found",
+ };
+ }
+
+ return {
+ success: true as const,
+ message: "Endpoint deleted successfully",
+ };
+ } catch (error) {
+ console.error("Error deleting endpoint:", error);
+ return {
+ success: false as const,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+
+ update: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ // Check if another endpoint with the same name exists (excluding current one)
+ const existingEndpoint = await endpointsRepository.findByName(input.name);
+ if (existingEndpoint && existingEndpoint.uuid !== input.uuid) {
+ return {
+ success: false as const,
+ message: "Endpoint name already exists",
+ };
+ }
+
+ const result = await endpointsRepository.update({
+ uuid: input.uuid,
+ name: input.name,
+ description: input.description,
+ namespace_uuid: input.namespaceUuid,
+ enable_api_key_auth: input.enableApiKeyAuth,
+ use_query_param_auth: input.useQueryParamAuth,
+ });
+
+ return {
+ success: true as const,
+ data: EndpointsSerializer.serializeEndpoint(result),
+ message: "Endpoint updated successfully",
+ };
+ } catch (error) {
+ console.error("Error updating endpoint:", error);
+ return {
+ success: false as const,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+};
diff --git a/apps/backend/src/trpc/mcp-servers.impl.ts b/apps/backend/src/trpc/mcp-servers.impl.ts
new file mode 100644
index 00000000..52b24812
--- /dev/null
+++ b/apps/backend/src/trpc/mcp-servers.impl.ts
@@ -0,0 +1,209 @@
+import {
+ BulkImportMcpServersRequestSchema,
+ BulkImportMcpServersResponseSchema,
+ CreateMcpServerRequestSchema,
+ CreateMcpServerResponseSchema,
+ DeleteMcpServerResponseSchema,
+ GetMcpServerResponseSchema,
+ ListMcpServersResponseSchema,
+ UpdateMcpServerRequestSchema,
+ UpdateMcpServerResponseSchema,
+} from "@repo/zod-types";
+import { z } from "zod";
+
+import { mcpServersRepository } from "../db/repositories";
+import { McpServersSerializer } from "../db/serializers";
+
+export const mcpServersImplementations = {
+ create: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ const createdServer = await mcpServersRepository.create(input);
+
+ if (!createdServer) {
+ return {
+ success: false as const,
+ message: "Failed to create MCP server",
+ };
+ }
+
+ return {
+ success: true as const,
+ data: McpServersSerializer.serializeMcpServer(createdServer),
+ message: "MCP server created successfully",
+ };
+ } catch (error) {
+ console.error("Error creating MCP server:", error);
+ return {
+ success: false as const,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+
+ list: async (): Promise> => {
+ try {
+ const servers = await mcpServersRepository.findAll();
+
+ return {
+ success: true as const,
+ data: McpServersSerializer.serializeMcpServerList(servers),
+ message: "MCP servers retrieved successfully",
+ };
+ } catch (error) {
+ console.error("Error fetching MCP servers:", error);
+ return {
+ success: false as const,
+ data: [],
+ message: "Failed to fetch MCP servers",
+ };
+ }
+ },
+
+ bulkImport: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ const serversToInsert = [];
+ const errors: string[] = [];
+ let imported = 0;
+
+ for (const [serverName, serverConfig] of Object.entries(
+ input.mcpServers,
+ )) {
+ try {
+ // Validate server name format
+ if (!/^[a-zA-Z0-9_-]+$/.test(serverName)) {
+ throw new Error(
+ `Server name "${serverName}" is invalid. Server names must only contain letters, numbers, underscores, and hyphens.`,
+ );
+ }
+
+ // Provide default type if not specified
+ const serverWithDefaults = {
+ name: serverName,
+ type: serverConfig.type || ("STDIO" as const),
+ description: serverConfig.description || null,
+ command: serverConfig.command || null,
+ args: serverConfig.args || [],
+ env: serverConfig.env || {},
+ url: serverConfig.url || null,
+ bearerToken: undefined,
+ };
+
+ serversToInsert.push(serverWithDefaults);
+ } catch (error) {
+ errors.push(
+ `Failed to process server "${serverName}": ${error instanceof Error ? error.message : "Unknown error"}`,
+ );
+ }
+ }
+
+ if (serversToInsert.length > 0) {
+ await mcpServersRepository.bulkCreate(serversToInsert);
+ imported = serversToInsert.length;
+ }
+
+ return {
+ success: true as const,
+ imported,
+ errors: errors.length > 0 ? errors : undefined,
+ message: `Successfully imported ${imported} MCP servers${errors.length > 0 ? ` with ${errors.length} errors` : ""}`,
+ };
+ } catch (error) {
+ console.error("Error bulk importing MCP servers:", error);
+ return {
+ success: false as const,
+ imported: 0,
+ message:
+ error instanceof Error
+ ? error.message
+ : "Internal server error during bulk import",
+ };
+ }
+ },
+
+ get: async (input: {
+ uuid: string;
+ }): Promise> => {
+ try {
+ const server = await mcpServersRepository.findByUuid(input.uuid);
+
+ if (!server) {
+ return {
+ success: false as const,
+ message: "MCP server not found",
+ };
+ }
+
+ return {
+ success: true as const,
+ data: McpServersSerializer.serializeMcpServer(server),
+ message: "MCP server retrieved successfully",
+ };
+ } catch (error) {
+ console.error("Error fetching MCP server:", error);
+ return {
+ success: false as const,
+ message: "Failed to fetch MCP server",
+ };
+ }
+ },
+
+ delete: async (input: {
+ uuid: string;
+ }): Promise> => {
+ try {
+ const deletedServer = await mcpServersRepository.deleteByUuid(input.uuid);
+
+ if (!deletedServer) {
+ return {
+ success: false as const,
+ message: "MCP server not found",
+ };
+ }
+
+ return {
+ success: true as const,
+ message: "MCP server deleted successfully",
+ };
+ } catch (error) {
+ console.error("Error deleting MCP server:", error);
+ return {
+ success: false as const,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+
+ update: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ const updatedServer = await mcpServersRepository.update(input);
+
+ if (!updatedServer) {
+ return {
+ success: false as const,
+ message: "MCP server not found",
+ };
+ }
+
+ return {
+ success: true as const,
+ data: McpServersSerializer.serializeMcpServer(updatedServer),
+ message: "MCP server updated successfully",
+ };
+ } catch (error) {
+ console.error("Error updating MCP server:", error);
+ return {
+ success: false as const,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+};
diff --git a/apps/backend/src/trpc/namespaces.impl.ts b/apps/backend/src/trpc/namespaces.impl.ts
new file mode 100644
index 00000000..7ce27b63
--- /dev/null
+++ b/apps/backend/src/trpc/namespaces.impl.ts
@@ -0,0 +1,434 @@
+import {
+ CreateNamespaceRequestSchema,
+ CreateNamespaceResponseSchema,
+ DeleteNamespaceResponseSchema,
+ GetNamespaceResponseSchema,
+ GetNamespaceToolsRequestSchema,
+ GetNamespaceToolsResponseSchema,
+ ListNamespacesResponseSchema,
+ RefreshNamespaceToolsRequestSchema,
+ RefreshNamespaceToolsResponseSchema,
+ UpdateNamespaceRequestSchema,
+ UpdateNamespaceResponseSchema,
+ UpdateNamespaceServerStatusRequestSchema,
+ UpdateNamespaceServerStatusResponseSchema,
+ UpdateNamespaceToolStatusRequestSchema,
+ UpdateNamespaceToolStatusResponseSchema,
+} from "@repo/zod-types";
+import { z } from "zod";
+
+import {
+ mcpServersRepository,
+ namespaceMappingsRepository,
+ namespacesRepository,
+ toolsRepository,
+} from "../db/repositories";
+import { NamespacesSerializer } from "../db/serializers";
+
+export const namespacesImplementations = {
+ create: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ const result = await namespacesRepository.create({
+ name: input.name,
+ description: input.description,
+ mcpServerUuids: input.mcpServerUuids,
+ });
+
+ return {
+ success: true as const,
+ data: NamespacesSerializer.serializeNamespace(result),
+ message: "Namespace created successfully",
+ };
+ } catch (error) {
+ console.error("Error creating namespace:", error);
+ return {
+ success: false as const,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+
+ list: async (): Promise> => {
+ try {
+ const namespaces = await namespacesRepository.findAll();
+
+ return {
+ success: true as const,
+ data: NamespacesSerializer.serializeNamespaceList(namespaces),
+ message: "Namespaces retrieved successfully",
+ };
+ } catch (error) {
+ console.error("Error fetching namespaces:", error);
+ return {
+ success: false as const,
+ data: [],
+ message: "Failed to fetch namespaces",
+ };
+ }
+ },
+
+ get: async (input: {
+ uuid: string;
+ }): Promise> => {
+ try {
+ const namespaceWithServers =
+ await namespacesRepository.findByUuidWithServers(input.uuid);
+
+ if (!namespaceWithServers) {
+ return {
+ success: false as const,
+ message: "Namespace not found",
+ };
+ }
+
+ return {
+ success: true as const,
+ data: NamespacesSerializer.serializeNamespaceWithServers(
+ namespaceWithServers,
+ ),
+ message: "Namespace retrieved successfully",
+ };
+ } catch (error) {
+ console.error("Error fetching namespace:", error);
+ return {
+ success: false as const,
+ message: "Failed to fetch namespace",
+ };
+ }
+ },
+
+ getTools: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ const toolsData = await namespacesRepository.findToolsByNamespaceUuid(
+ input.namespaceUuid,
+ );
+
+ return {
+ success: true as const,
+ data: NamespacesSerializer.serializeNamespaceTools(toolsData),
+ message: "Namespace tools retrieved successfully",
+ };
+ } catch (error) {
+ console.error("Error fetching namespace tools:", error);
+ return {
+ success: false as const,
+ data: [],
+ message: "Failed to fetch namespace tools",
+ };
+ }
+ },
+
+ delete: async (input: {
+ uuid: string;
+ }): Promise> => {
+ try {
+ const deletedNamespace = await namespacesRepository.deleteByUuid(
+ input.uuid,
+ );
+
+ if (!deletedNamespace) {
+ return {
+ success: false as const,
+ message: "Namespace not found",
+ };
+ }
+
+ return {
+ success: true as const,
+ message: "Namespace deleted successfully",
+ };
+ } catch (error) {
+ console.error("Error deleting namespace:", error);
+ return {
+ success: false as const,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+
+ update: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ const result = await namespacesRepository.update({
+ uuid: input.uuid,
+ name: input.name,
+ description: input.description,
+ mcpServerUuids: input.mcpServerUuids,
+ });
+
+ return {
+ success: true as const,
+ data: NamespacesSerializer.serializeNamespace(result),
+ message: "Namespace updated successfully",
+ };
+ } catch (error) {
+ console.error("Error updating namespace:", error);
+ return {
+ success: false as const,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+
+ updateServerStatus: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ const updatedMapping =
+ await namespaceMappingsRepository.updateServerStatus({
+ namespaceUuid: input.namespaceUuid,
+ serverUuid: input.serverUuid,
+ status: input.status,
+ });
+
+ if (!updatedMapping) {
+ return {
+ success: false as const,
+ message: "Server not found in namespace",
+ };
+ }
+
+ return {
+ success: true as const,
+ message: "Server status updated successfully",
+ };
+ } catch (error) {
+ console.error("Error updating server status:", error);
+ return {
+ success: false as const,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+
+ updateToolStatus: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ const updatedMapping = await namespaceMappingsRepository.updateToolStatus(
+ {
+ namespaceUuid: input.namespaceUuid,
+ toolUuid: input.toolUuid,
+ serverUuid: input.serverUuid,
+ status: input.status,
+ },
+ );
+
+ if (!updatedMapping) {
+ return {
+ success: false as const,
+ message: "Tool not found in namespace",
+ };
+ }
+
+ return {
+ success: true as const,
+ message: "Tool status updated successfully",
+ };
+ } catch (error) {
+ console.error("Error updating tool status:", error);
+ return {
+ success: false as const,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+
+ refreshTools: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ if (!input.tools || input.tools.length === 0) {
+ return {
+ success: true as const,
+ message: "No tools to refresh",
+ toolsCreated: 0,
+ mappingsCreated: 0,
+ };
+ }
+
+ // Parse tool names to extract server names and actual tool names
+ const parsedTools: Array<{
+ serverName: string;
+ toolName: string;
+ description: string;
+ inputSchema: Record;
+ }> = [];
+
+ for (const tool of input.tools) {
+ // Split by "__" - use last occurrence if there are multiple
+ const lastDoubleUnderscoreIndex = tool.name.lastIndexOf("__");
+
+ if (lastDoubleUnderscoreIndex === -1) {
+ console.warn(
+ `Tool name "${tool.name}" does not contain "__" separator, skipping`,
+ );
+ continue;
+ }
+
+ const serverName = tool.name.substring(0, lastDoubleUnderscoreIndex);
+ const toolName = tool.name.substring(lastDoubleUnderscoreIndex + 2);
+
+ if (!serverName || !toolName) {
+ console.warn(`Invalid tool name format "${tool.name}", skipping`);
+ continue;
+ }
+
+ parsedTools.push({
+ serverName,
+ toolName,
+ description: tool.description || "",
+ inputSchema: tool.inputSchema,
+ });
+ }
+
+ if (parsedTools.length === 0) {
+ return {
+ success: true as const,
+ message: "No valid tools to refresh after parsing",
+ toolsCreated: 0,
+ mappingsCreated: 0,
+ };
+ }
+
+ // Group tools by server name and resolve server UUIDs
+ const toolsByServerName: Record<
+ string,
+ {
+ serverUuid: string;
+ tools: Array<{
+ toolName: string;
+ description: string;
+ inputSchema: Record;
+ }>;
+ }
+ > = {};
+
+ for (const parsedTool of parsedTools) {
+ // Find server by name - first try exact match
+ let server = await mcpServersRepository.findByName(
+ parsedTool.serverName,
+ );
+
+ // If exact match fails, try to handle nested MetaMCP scenarios
+ // For nested MetaMCP, tool names may be in format "ParentServer__ChildServer__tool"
+ // but we need to find the actual "ParentServer" in the database
+ if (!server && parsedTool.serverName.includes("__")) {
+ // Try the first part before the first "__" (this would be the actual server)
+ const firstDoubleUnderscoreIndex =
+ parsedTool.serverName.indexOf("__");
+ const actualServerName = parsedTool.serverName.substring(
+ 0,
+ firstDoubleUnderscoreIndex,
+ );
+
+ server = await mcpServersRepository.findByName(actualServerName);
+
+ if (server) {
+ console.log(
+ `Found nested MetaMCP server mapping: "${parsedTool.serverName}" -> "${actualServerName}"`,
+ );
+ // Update the parsed tool to use the correct server name and adjust tool name
+ const remainingPart = parsedTool.serverName.substring(
+ firstDoubleUnderscoreIndex + 2,
+ );
+ parsedTool.toolName = `${remainingPart}__${parsedTool.toolName}`;
+ parsedTool.serverName = actualServerName;
+ }
+ }
+
+ if (!server) {
+ console.warn(
+ `Server "${parsedTool.serverName}" not found in database, skipping tool "${parsedTool.toolName}"`,
+ );
+ continue;
+ }
+
+ if (!toolsByServerName[parsedTool.serverName]) {
+ toolsByServerName[parsedTool.serverName] = {
+ serverUuid: server.uuid,
+ tools: [],
+ };
+ }
+
+ toolsByServerName[parsedTool.serverName].tools.push({
+ toolName: parsedTool.toolName,
+ description: parsedTool.description,
+ inputSchema: parsedTool.inputSchema,
+ });
+ }
+
+ if (Object.keys(toolsByServerName).length === 0) {
+ return {
+ success: false as const,
+ message: "No servers found for the provided tools",
+ };
+ }
+
+ let totalToolsCreated = 0;
+ let totalMappingsCreated = 0;
+
+ // Process tools for each server
+ for (const [serverName, serverData] of Object.entries(
+ toolsByServerName,
+ )) {
+ const { serverUuid, tools } = serverData;
+
+ // Bulk upsert tools to the tools table with the actual tool names
+ const upsertedTools = await toolsRepository.bulkUpsert({
+ mcpServerUuid: serverUuid,
+ tools: tools.map((tool) => ({
+ name: tool.toolName, // Use the actual tool name, not the prefixed name
+ description: tool.description,
+ inputSchema: tool.inputSchema,
+ })),
+ });
+
+ totalToolsCreated += upsertedTools.length;
+
+ // Create namespace tool mappings
+ const toolMappings = upsertedTools.map((tool) => ({
+ toolUuid: tool.uuid,
+ serverUuid: serverUuid,
+ status: "ACTIVE" as const,
+ }));
+
+ const createdMappings =
+ await namespaceMappingsRepository.bulkUpsertNamespaceToolMappings({
+ namespaceUuid: input.namespaceUuid,
+ toolMappings,
+ });
+
+ totalMappingsCreated += createdMappings.length;
+
+ console.log(
+ `Processed ${tools.length} tools for server "${serverName}" (${serverUuid})`,
+ );
+ }
+
+ return {
+ success: true as const,
+ message: `Successfully refreshed ${totalToolsCreated} tools with ${totalMappingsCreated} mappings`,
+ toolsCreated: totalToolsCreated,
+ mappingsCreated: totalMappingsCreated,
+ };
+ } catch (error) {
+ console.error("Error refreshing namespace tools:", error);
+ return {
+ success: false as const,
+ message:
+ error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+};
diff --git a/apps/backend/src/trpc/oauth.impl.ts b/apps/backend/src/trpc/oauth.impl.ts
new file mode 100644
index 00000000..e522c410
--- /dev/null
+++ b/apps/backend/src/trpc/oauth.impl.ts
@@ -0,0 +1,75 @@
+import {
+ GetOAuthSessionRequestSchema,
+ GetOAuthSessionResponseSchema,
+ UpsertOAuthSessionRequestSchema,
+ UpsertOAuthSessionResponseSchema,
+} from "@repo/zod-types";
+import { z } from "zod";
+
+import { oauthSessionsRepository } from "../db/repositories";
+import { OAuthSessionsSerializer } from "../db/serializers";
+
+export const oauthImplementations = {
+ get: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ const session = await oauthSessionsRepository.findByMcpServerUuid(
+ input.mcp_server_uuid,
+ );
+
+ if (!session) {
+ return {
+ success: false as const,
+ message: "OAuth session not found",
+ };
+ }
+
+ return {
+ success: true as const,
+ data: OAuthSessionsSerializer.serializeOAuthSession(session),
+ message: "OAuth session retrieved successfully",
+ };
+ } catch (error) {
+ console.error("Error fetching OAuth session:", error);
+ return {
+ success: false as const,
+ message: "Failed to fetch OAuth session",
+ };
+ }
+ },
+
+ upsert: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ const session = await oauthSessionsRepository.upsert({
+ mcp_server_uuid: input.mcp_server_uuid,
+ ...(input.client_information && {
+ client_information: input.client_information,
+ }),
+ ...(input.tokens && { tokens: input.tokens }),
+ ...(input.code_verifier && { code_verifier: input.code_verifier }),
+ });
+
+ if (!session) {
+ return {
+ success: false as const,
+ error: "Failed to upsert OAuth session",
+ };
+ }
+
+ return {
+ success: true as const,
+ data: OAuthSessionsSerializer.serializeOAuthSession(session),
+ message: "OAuth session upserted successfully",
+ };
+ } catch (error) {
+ console.error("Error upserting OAuth session:", error);
+ return {
+ success: false as const,
+ error: error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+};
diff --git a/apps/backend/src/trpc/tools.impl.ts b/apps/backend/src/trpc/tools.impl.ts
new file mode 100644
index 00000000..9f8f0822
--- /dev/null
+++ b/apps/backend/src/trpc/tools.impl.ts
@@ -0,0 +1,67 @@
+import {
+ CreateToolRequestSchema,
+ CreateToolResponseSchema,
+ GetToolsByMcpServerUuidRequestSchema,
+ GetToolsByMcpServerUuidResponseSchema,
+} from "@repo/zod-types";
+import { z } from "zod";
+
+import { toolsRepository } from "../db/repositories";
+import { ToolsSerializer } from "../db/serializers";
+
+export const toolsImplementations = {
+ getByMcpServerUuid: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ const tools = await toolsRepository.findByMcpServerUuid(
+ input.mcpServerUuid,
+ );
+
+ return {
+ success: true as const,
+ data: ToolsSerializer.serializeToolList(tools),
+ message: "Tools retrieved successfully",
+ };
+ } catch (error) {
+ console.error("Error fetching tools by MCP server UUID:", error);
+ return {
+ success: false as const,
+ data: [],
+ message: "Failed to fetch tools",
+ };
+ }
+ },
+
+ create: async (
+ input: z.infer,
+ ): Promise> => {
+ try {
+ if (!input.tools || input.tools.length === 0) {
+ return {
+ success: true as const,
+ count: 0,
+ message: "No tools to save",
+ };
+ }
+
+ const results = await toolsRepository.bulkUpsert({
+ tools: input.tools,
+ mcpServerUuid: input.mcpServerUuid,
+ });
+
+ return {
+ success: true as const,
+ count: results.length,
+ message: `Successfully saved ${results.length} tools`,
+ };
+ } catch (error) {
+ console.error("Error saving tools to database:", error);
+ return {
+ success: false as const,
+ count: 0,
+ error: error instanceof Error ? error.message : "Internal server error",
+ };
+ }
+ },
+};
diff --git a/apps/backend/tsconfig.json b/apps/backend/tsconfig.json
new file mode 100644
index 00000000..5f62273c
--- /dev/null
+++ b/apps/backend/tsconfig.json
@@ -0,0 +1,25 @@
+{
+ "compilerOptions": {
+ "target": "ES2022",
+ "module": "ESNext",
+ "lib": ["ES2022"],
+ "outDir": "./dist",
+ "rootDir": "./src",
+ "strict": true,
+ "esModuleInterop": true,
+ "skipLibCheck": true,
+ "forceConsistentCasingInFileNames": true,
+ "moduleResolution": "Node",
+ "allowSyntheticDefaultImports": true,
+ "experimentalDecorators": true,
+ "emitDecoratorMetadata": true,
+ "allowImportingTsExtensions": true,
+ "noEmit": true,
+ "baseUrl": ".",
+ "paths": {
+ "@/*": ["./src/*"]
+ }
+ },
+ "include": ["src/**/*"],
+ "exclude": ["node_modules", "dist"]
+}
diff --git a/apps/backend/tsup.config.ts b/apps/backend/tsup.config.ts
new file mode 100644
index 00000000..c327e3f9
--- /dev/null
+++ b/apps/backend/tsup.config.ts
@@ -0,0 +1,32 @@
+import { defineConfig } from "tsup";
+
+export default defineConfig({
+ entry: ["src/index.ts"],
+ format: ["esm"],
+ target: "node18",
+ outDir: "dist",
+ sourcemap: true,
+ clean: true,
+ splitting: false,
+ bundle: true,
+ keepNames: true,
+ minify: false,
+ external: [
+ "@modelcontextprotocol/sdk",
+ "@repo/trpc",
+ "@repo/zod-types",
+ "@trpc/server",
+ "basic-auth",
+ "better-auth",
+ "cors",
+ "dotenv",
+ "drizzle-orm",
+ "express",
+ "helmet",
+ "nanoid",
+ "pg",
+ "shell-quote",
+ "spawn-rx",
+ "zod",
+ ],
+});
diff --git a/apps/frontend/.gitignore b/apps/frontend/.gitignore
new file mode 100644
index 00000000..f886745c
--- /dev/null
+++ b/apps/frontend/.gitignore
@@ -0,0 +1,36 @@
+# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
+
+# dependencies
+/node_modules
+/.pnp
+.pnp.js
+.yarn/install-state.gz
+
+# testing
+/coverage
+
+# next.js
+/.next/
+/out/
+
+# production
+/build
+
+# misc
+.DS_Store
+*.pem
+
+# debug
+npm-debug.log*
+yarn-debug.log*
+yarn-error.log*
+
+# env files (can opt-in for commiting if needed)
+.env*
+
+# vercel
+.vercel
+
+# typescript
+*.tsbuildinfo
+next-env.d.ts
diff --git a/apps/frontend/README.md b/apps/frontend/README.md
new file mode 100644
index 00000000..0e7ef2a2
--- /dev/null
+++ b/apps/frontend/README.md
@@ -0,0 +1,36 @@
+This is a [Next.js](https://nextjs.org) project bootstrapped with [`create-next-app`](https://nextjs.org/docs/app/api-reference/create-next-app).
+
+## Getting Started
+
+First, run the development server:
+
+```bash
+npm run dev
+# or
+yarn dev
+# or
+pnpm dev
+# or
+bun dev
+```
+
+Open [http://localhost:12008](http://localhost:12008) with your browser to see the result.
+
+You can start editing the page by modifying `app/page.tsx`. The page auto-updates as you edit the file.
+
+This project uses [`next/font`](https://nextjs.org/docs/app/building-your-application/optimizing/fonts) to automatically optimize and load Inter, a custom Google Font.
+
+## Learn More
+
+To learn more about Next.js, take a look at the following resources:
+
+- [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API.
+- [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial.
+
+You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js) - your feedback and contributions are welcome!
+
+## Deploy on Vercel
+
+The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js.
+
+Check out our [Next.js deployment documentation](https://nextjs.org/docs/app/building-your-application/deploying) for more details.
diff --git a/apps/frontend/app/(sidebar)/api-keys/page.tsx b/apps/frontend/app/(sidebar)/api-keys/page.tsx
new file mode 100644
index 00000000..94e54397
--- /dev/null
+++ b/apps/frontend/app/(sidebar)/api-keys/page.tsx
@@ -0,0 +1,289 @@
+"use client";
+
+import { zodResolver } from "@hookform/resolvers/zod";
+import { CreateApiKeyFormSchema } from "@repo/zod-types";
+import { format } from "date-fns";
+import { Copy, Eye, EyeOff, Key, Plus, Trash2 } from "lucide-react";
+import { useState } from "react";
+import { useForm } from "react-hook-form";
+import { toast } from "sonner";
+import { z } from "zod";
+
+import { Button } from "@/components/ui/button";
+import {
+ Dialog,
+ DialogContent,
+ DialogDescription,
+ DialogHeader,
+ DialogTitle,
+ DialogTrigger,
+} from "@/components/ui/dialog";
+import { Input } from "@/components/ui/input";
+import { Separator } from "@/components/ui/separator";
+import {
+ Table,
+ TableBody,
+ TableCell,
+ TableHead,
+ TableHeader,
+ TableRow,
+} from "@/components/ui/table";
+import { trpc } from "@/lib/trpc";
+
+type CreateApiKeyFormData = z.infer;
+
+export default function ApiKeysPage() {
+ const [createDialogOpen, setCreateDialogOpen] = useState(false);
+ const [newApiKey, setNewApiKey] = useState(null);
+ const [visibleKeys, setVisibleKeys] = useState>(new Set());
+
+ const { data: apiKeys, refetch } = trpc.apiKeys.list.useQuery();
+ const createMutation = trpc.apiKeys.create.useMutation({
+ onSuccess: (data) => {
+ setNewApiKey(data.key);
+ refetch();
+ toast.success("API key created successfully");
+ },
+ onError: (error) => {
+ toast.error(error.message);
+ },
+ });
+
+ const deleteMutation = trpc.apiKeys.delete.useMutation({
+ onSuccess: () => {
+ refetch();
+ toast.success("API key deleted successfully");
+ },
+ onError: (error) => {
+ toast.error(error.message);
+ },
+ });
+
+ const form = useForm({
+ resolver: zodResolver(CreateApiKeyFormSchema),
+ defaultValues: {
+ name: "",
+ },
+ });
+
+ const onSubmit = (data: CreateApiKeyFormData) => {
+ createMutation.mutate(data);
+ };
+
+ const handleCreateSuccess = () => {
+ form.reset();
+ setCreateDialogOpen(false);
+ };
+
+ const copyToClipboard = (text: string) => {
+ navigator.clipboard.writeText(text);
+ toast.success("Copied to clipboard");
+ };
+
+ const toggleKeyVisibility = (uuid: string) => {
+ setVisibleKeys((prev) => {
+ const newSet = new Set(prev);
+ if (newSet.has(uuid)) {
+ newSet.delete(uuid);
+ } else {
+ newSet.add(uuid);
+ }
+ return newSet;
+ });
+ };
+
+ const maskKey = (key: string) => {
+ return "β’".repeat(32);
+ };
+
+ return (
+
+
+
+
+
+ API Keys
+
+ Manage your API keys for programmatic access
+
+
+
+
+
+
+
+
+
+
+
+
+ Name
+ Key
+ Created
+ Status
+ Actions
+
+
+
+ {apiKeys?.apiKeys?.length === 0 ? (
+
+
+
+
+ No API keys found
+
+ Create your first API key to get started
+
+
+
+
+ ) : (
+ apiKeys?.apiKeys?.map((apiKey) => (
+
+ {apiKey.name}
+
+
+
+ {visibleKeys.has(apiKey.uuid)
+ ? apiKey.key
+ : maskKey(apiKey.key)}
+
+
+
+
+
+
+ {format(new Date(apiKey.created_at), "MMM d, yyyy")}
+
+
+
+ {apiKey.is_active ? "Active" : "Inactive"}
+
+
+
+
+
+
+ ))
+ )}
+
+
+
+
+ );
+}
diff --git a/apps/frontend/app/(sidebar)/endpoints/endpoints-list.tsx b/apps/frontend/app/(sidebar)/endpoints/endpoints-list.tsx
new file mode 100644
index 00000000..0a12c573
--- /dev/null
+++ b/apps/frontend/app/(sidebar)/endpoints/endpoints-list.tsx
@@ -0,0 +1,481 @@
+"use client";
+
+import { EndpointWithNamespace } from "@repo/zod-types";
+import {
+ ColumnDef,
+ flexRender,
+ getCoreRowModel,
+ getFilteredRowModel,
+ getSortedRowModel,
+ SortingState,
+ useReactTable,
+} from "@tanstack/react-table";
+import {
+ ArrowUpDown,
+ Copy,
+ Edit,
+ Link,
+ MoreHorizontal,
+ Package,
+ Search,
+ Trash2,
+} from "lucide-react";
+import { useRouter } from "next/navigation";
+import { useState } from "react";
+import { toast } from "sonner";
+
+import { EditEndpoint } from "@/components/edit-endpoint";
+import { Button } from "@/components/ui/button";
+import {
+ Dialog,
+ DialogContent,
+ DialogDescription,
+ DialogFooter,
+ DialogHeader,
+ DialogTitle,
+} from "@/components/ui/dialog";
+import {
+ DropdownMenu,
+ DropdownMenuContent,
+ DropdownMenuItem,
+ DropdownMenuTrigger,
+} from "@/components/ui/dropdown-menu";
+import { Input } from "@/components/ui/input";
+import {
+ Table,
+ TableBody,
+ TableCell,
+ TableHead,
+ TableHeader,
+ TableRow,
+} from "@/components/ui/table";
+import { trpc } from "@/lib/trpc";
+
+interface EndpointsListProps {
+ onRefresh?: () => void;
+}
+
+export function EndpointsList({ onRefresh }: EndpointsListProps) {
+ const [sorting, setSorting] = useState([]);
+ const [globalFilter, setGlobalFilter] = useState("");
+ const [deleteDialogOpen, setDeleteDialogOpen] = useState(false);
+ const [endpointToDelete, setEndpointToDelete] =
+ useState(null);
+ const [editDialogOpen, setEditDialogOpen] = useState(false);
+ const [endpointToEdit, setEndpointToEdit] =
+ useState(null);
+ const router = useRouter();
+
+ const utils = trpc.useUtils();
+
+ // Fetch endpoints using tRPC
+ const {
+ data: endpointsResponse,
+ isLoading,
+ error,
+ } = trpc.frontend.endpoints.list.useQuery();
+
+ // Fetch user's API keys to use in URLs
+ const { data: apiKeysResponse } = trpc.apiKeys.list.useQuery();
+
+ // Delete mutation
+ const deleteEndpointMutation = trpc.frontend.endpoints.delete.useMutation({
+ onSuccess: () => {
+ toast.success("Endpoint deleted successfully");
+ utils.frontend.endpoints.list.invalidate();
+ setDeleteDialogOpen(false);
+ setEndpointToDelete(null);
+ onRefresh?.();
+ },
+ onError: (error) => {
+ toast.error("Failed to delete endpoint", {
+ description: error.message,
+ });
+ },
+ });
+
+ const endpoints = endpointsResponse?.success ? endpointsResponse.data : [];
+
+ const handleDeleteEndpoint = (endpoint: EndpointWithNamespace) => {
+ setEndpointToDelete(endpoint);
+ setDeleteDialogOpen(true);
+ };
+
+ const handleEditEndpoint = (endpoint: EndpointWithNamespace) => {
+ setEndpointToEdit(endpoint);
+ setEditDialogOpen(true);
+ };
+
+ const handleEditSuccess = (_updatedEndpoint: EndpointWithNamespace) => {
+ // The cache invalidation is handled in the EditEndpoint component
+ onRefresh?.();
+ };
+
+ const confirmDelete = () => {
+ if (endpointToDelete) {
+ deleteEndpointMutation.mutate({ uuid: endpointToDelete.uuid });
+ }
+ };
+
+ const formatDate = (dateString: string) => {
+ return new Date(dateString).toLocaleString();
+ };
+
+ // Define columns for the data table
+ const columns: ColumnDef[] = [
+ {
+ accessorKey: "name",
+ header: ({ column }) => {
+ return (
+
+ );
+ },
+ cell: ({ row }) => {
+ const endpoint = row.original;
+ return (
+
+ {endpoint.name}
+
+ SSE: {process.env.NEXT_PUBLIC_APP_URL}/metamcp/{endpoint.name}/sse
+
+ Streamable HTTP: {process.env.NEXT_PUBLIC_APP_URL}/metamcp/
+ {endpoint.name}
+ /mcp
+
+
+ );
+ },
+ },
+ {
+ accessorKey: "description",
+ header: "Description",
+ cell: ({ row }) => {
+ const description = row.getValue("description") as string | null;
+ return (
+
+ {description ? (
+ {description}
+ ) : (
+
+ No description
+
+ )}
+
+ );
+ },
+ },
+ {
+ accessorKey: "namespace",
+ header: "Namespace",
+ cell: ({ row }) => {
+ const endpoint = row.original;
+ return (
+
+
+ router.push(`/namespaces/${endpoint.namespace.uuid}`)
+ }
+ >
+
+ {endpoint.namespace.name}
+
+
+ );
+ },
+ },
+ {
+ accessorKey: "created_at",
+ header: ({ column }) => {
+ return (
+
+ );
+ },
+ cell: ({ row }) => {
+ const date = formatDate(row.getValue("created_at"));
+ return (
+ {date}
+ );
+ },
+ },
+ {
+ id: "actions",
+ header: "Actions",
+ cell: ({ row }) => {
+ const endpoint = row.original;
+
+ const copyFullSseUrl = () => {
+ const baseUrl = `${process.env.NEXT_PUBLIC_APP_URL}/metamcp/${endpoint.name}/sse`;
+ navigator.clipboard.writeText(baseUrl);
+ toast.success("SSE URL copied to clipboard");
+ };
+
+ const copyFullShttpUrl = () => {
+ const baseUrl = `${process.env.NEXT_PUBLIC_APP_URL}/metamcp/${endpoint.name}/mcp`;
+ navigator.clipboard.writeText(baseUrl);
+ toast.success("SHTTP URL copied to clipboard");
+ };
+
+ const getApiKey = () => {
+ const apiKeys = apiKeysResponse?.apiKeys || [];
+ const activeApiKey = apiKeys.find((key) => key.is_active);
+ return activeApiKey?.key || "YOUR_API_KEY";
+ };
+
+ const copyFullSseUrlWithApiKey = () => {
+ const apiKey = getApiKey();
+ const baseUrl = `${process.env.NEXT_PUBLIC_APP_URL}/metamcp/${endpoint.name}/sse?api_key=${apiKey}`;
+ navigator.clipboard.writeText(baseUrl);
+ toast.success("SSE URL with API key parameter copied to clipboard");
+ };
+
+ const copyFullShttpUrlWithApiKey = () => {
+ const apiKey = getApiKey();
+ const baseUrl = `${process.env.NEXT_PUBLIC_APP_URL}/metamcp/${endpoint.name}/mcp?api_key=${apiKey}`;
+ navigator.clipboard.writeText(baseUrl);
+ toast.success("SHTTP URL with API key parameter copied to clipboard");
+ };
+
+ return (
+
+
+
+
+
+ handleEditEndpoint(endpoint)}>
+
+ Edit endpoint
+
+ navigator.clipboard.writeText(endpoint.uuid)}
+ >
+
+ Copy endpoint UUID
+
+
+
+ Copy full SSE URL
+
+
+
+ Copy full SHTTP URL
+
+ {endpoint.use_query_param_auth && (
+ <>
+
+
+ Copy SSE URL with API key
+
+
+
+ Copy SHTTP URL with API key
+
+ >
+ )}
+
+ router.push(`/namespaces/${endpoint.namespace.uuid}`)
+ }
+ >
+
+ View namespace
+
+ handleDeleteEndpoint(endpoint)}
+ className="text-destructive"
+ >
+
+ Delete endpoint
+
+
+
+ );
+ },
+ },
+ ];
+
+ const table = useReactTable({
+ data: endpoints,
+ columns,
+ onSortingChange: setSorting,
+ onGlobalFilterChange: (value) => setGlobalFilter(value || ""),
+ globalFilterFn: "includesString",
+ getCoreRowModel: getCoreRowModel(),
+ getFilteredRowModel: getFilteredRowModel(),
+ getSortedRowModel: getSortedRowModel(),
+ getRowId: (row) => row.uuid,
+ state: {
+ sorting,
+ globalFilter,
+ },
+ // Maintain sorting stability
+ manualSorting: false,
+ enableSorting: true,
+ });
+
+ if (isLoading) {
+ return (
+
+
+
+
+ Loading endpoints...
+
+
+
+
+ );
+ }
+
+ if (error) {
+ return (
+
+
+ Failed to load endpoints
+ {error.message}
+
+
+ );
+ }
+
+ if (endpoints.length === 0) {
+ return (
+
+
+
+ No Endpoints
+
+ You haven't created any endpoints yet. Get started by creating
+ your first endpoint to make your namespaces publicly accessible.
+
+
+
+ );
+ }
+
+ return (
+
+
+
+
+ setGlobalFilter(event.target.value || "")}
+ className="pl-8"
+ />
+
+
+
+
+
+
+ {table.getHeaderGroups().map((headerGroup) => (
+
+ {headerGroup.headers.map((header) => {
+ return (
+
+ {header.isPlaceholder
+ ? null
+ : flexRender(
+ header.column.columnDef.header,
+ header.getContext(),
+ )}
+
+ );
+ })}
+
+ ))}
+
+
+ {table.getRowModel().rows?.length ? (
+ table.getRowModel().rows.map((row) => (
+
+ {row.getVisibleCells().map((cell) => (
+
+ {flexRender(
+ cell.column.columnDef.cell,
+ cell.getContext(),
+ )}
+
+ ))}
+
+ ))
+ ) : (
+
+
+ No results.
+
+
+ )}
+
+
+
+
+ {/* Edit Endpoint Dialog */}
+ {
+ setEditDialogOpen(false);
+ setEndpointToEdit(null);
+ }}
+ onSuccess={handleEditSuccess}
+ />
+
+ {/* Delete Confirmation Dialog */}
+
+
+ );
+}
diff --git a/apps/frontend/app/(sidebar)/endpoints/page.tsx b/apps/frontend/app/(sidebar)/endpoints/page.tsx
new file mode 100644
index 00000000..53e74432
--- /dev/null
+++ b/apps/frontend/app/(sidebar)/endpoints/page.tsx
@@ -0,0 +1,372 @@
+"use client";
+
+import { zodResolver } from "@hookform/resolvers/zod";
+import {
+ CreateEndpointFormData,
+ createEndpointFormSchema,
+ CreateEndpointRequest,
+} from "@repo/zod-types";
+import { Check, ChevronDown, Link, Plus } from "lucide-react";
+import { useState } from "react";
+import { useForm } from "react-hook-form";
+import { toast } from "sonner";
+
+import { Button } from "@/components/ui/button";
+import { Checkbox } from "@/components/ui/checkbox";
+import {
+ Dialog,
+ DialogContent,
+ DialogDescription,
+ DialogHeader,
+ DialogTitle,
+ DialogTrigger,
+} from "@/components/ui/dialog";
+import {
+ DropdownMenu,
+ DropdownMenuContent,
+ DropdownMenuItem,
+ DropdownMenuTrigger,
+} from "@/components/ui/dropdown-menu";
+import { Input } from "@/components/ui/input";
+import { Switch } from "@/components/ui/switch";
+import { Textarea } from "@/components/ui/textarea";
+import { trpc } from "@/lib/trpc";
+
+import { EndpointsList } from "./endpoints-list";
+
+export default function EndpointsPage() {
+ const [createOpen, setCreateOpen] = useState(false);
+ const [isSubmitting, setIsSubmitting] = useState(false);
+ const [selectedNamespaceUuid, setSelectedNamespaceUuid] =
+ useState("");
+ const [selectedNamespaceName, setSelectedNamespaceName] =
+ useState("");
+
+ // Get the tRPC query client for cache invalidation
+ const utils = trpc.useUtils();
+
+ // Fetch available namespaces for selection
+ const { data: namespacesResponse, isLoading: namespacesLoading } =
+ trpc.frontend.namespaces.list.useQuery();
+
+ const availableNamespaces = namespacesResponse?.success
+ ? namespacesResponse.data
+ : [];
+
+ // tRPC mutation for creating endpoint
+ const createEndpointMutation = trpc.frontend.endpoints.create.useMutation({
+ onSuccess: (data) => {
+ console.log("Endpoint created successfully:", data);
+ toast.success("Endpoint Created", {
+ description: `Successfully created "${form.getValues().name}" endpoint`,
+ });
+ setCreateOpen(false);
+ form.reset({
+ name: "",
+ description: "",
+ namespaceUuid: "",
+ });
+ setSelectedNamespaceUuid("");
+ setSelectedNamespaceName("");
+ // Invalidate and refetch the endpoint list
+ utils.frontend.endpoints.list.invalidate();
+ },
+ onError: (error) => {
+ console.error("Error creating endpoint:", error);
+ toast.error("Failed to Create Endpoint", {
+ description: error.message || "An unexpected error occurred",
+ });
+ },
+ onSettled: () => {
+ setIsSubmitting(false);
+ },
+ });
+
+ const form = useForm({
+ resolver: zodResolver(createEndpointFormSchema),
+ defaultValues: {
+ name: "",
+ description: "",
+ namespaceUuid: "",
+ enableApiKeyAuth: true,
+ useQueryParamAuth: false,
+ createMcpServer: true,
+ },
+ });
+
+ const onSubmit = async (data: CreateEndpointFormData) => {
+ setIsSubmitting(true);
+ try {
+ // Create the API request payload
+ const apiPayload: CreateEndpointRequest = {
+ name: data.name,
+ description: data.description,
+ namespaceUuid: data.namespaceUuid,
+ enableApiKeyAuth: data.enableApiKeyAuth,
+ useQueryParamAuth: data.useQueryParamAuth,
+ createMcpServer: data.createMcpServer,
+ };
+
+ // Use tRPC mutation
+ createEndpointMutation.mutate(apiPayload);
+ } catch (error) {
+ setIsSubmitting(false);
+ console.error("Error preparing endpoint data:", error);
+ toast.error("Failed to Create Endpoint", {
+ description:
+ error instanceof Error
+ ? error.message
+ : "An unexpected error occurred",
+ });
+ }
+ };
+
+ const handleNamespaceSelect = (
+ namespaceUuid: string,
+ namespaceName: string,
+ ) => {
+ setSelectedNamespaceUuid(namespaceUuid);
+ setSelectedNamespaceName(namespaceName);
+ form.setValue("namespaceUuid", namespaceUuid);
+ form.clearErrors("namespaceUuid");
+ };
+
+ const resetForm = () => {
+ setCreateOpen(false);
+ form.reset({
+ name: "",
+ description: "",
+ namespaceUuid: "",
+ enableApiKeyAuth: true,
+ useQueryParamAuth: false,
+ createMcpServer: true,
+ });
+ setSelectedNamespaceUuid("");
+ setSelectedNamespaceName("");
+ };
+
+ return (
+
+
+
+
+
+
+ MetaMCP Endpoints
+
+
+ Create public unified MCP server endpoints that map to namespaces
+ for external access
+
+
+
+
+
+
+
+
+
+
+ );
+}
diff --git a/apps/frontend/app/(sidebar)/layout.tsx b/apps/frontend/app/(sidebar)/layout.tsx
new file mode 100644
index 00000000..56c21f9e
--- /dev/null
+++ b/apps/frontend/app/(sidebar)/layout.tsx
@@ -0,0 +1,171 @@
+"use client";
+
+import {
+ Key,
+ Link,
+ Package,
+ Search,
+ SearchCode,
+ Server,
+ Settings,
+} from "lucide-react";
+import Image from "next/image";
+
+import { Button } from "@/components/ui/button";
+import { Separator } from "@/components/ui/separator";
+import {
+ Sidebar,
+ SidebarContent,
+ SidebarFooter,
+ SidebarGroup,
+ SidebarGroupContent,
+ SidebarGroupLabel,
+ SidebarHeader,
+ SidebarInset,
+ SidebarMenu,
+ SidebarMenuButton,
+ SidebarMenuItem,
+ SidebarProvider,
+ SidebarTrigger,
+} from "@/components/ui/sidebar";
+import { authClient } from "@/lib/auth-client";
+
+// Menu items (removed Home item)
+const items = [
+ {
+ title: "Explore MCP Servers (beta)",
+ url: "/search",
+ icon: Search,
+ },
+ {
+ title: "MCP Servers",
+ url: "/mcp-servers",
+ icon: Server,
+ },
+ {
+ title: "MetaMCP Namespaces",
+ url: "/namespaces",
+ icon: Package,
+ },
+ {
+ title: "MetaMCP Endpoints",
+ url: "/endpoints",
+ icon: Link,
+ },
+ {
+ title: "MCP Inspector",
+ url: "/mcp-inspector",
+ icon: SearchCode,
+ },
+ {
+ title: "API Keys",
+ url: "/api-keys",
+ icon: Key,
+ },
+ {
+ title: "Settings",
+ url: "/settings",
+ icon: Settings,
+ },
+];
+
+function UserInfoFooter() {
+ const { data: session } = authClient.useSession();
+
+ const handleSignOut = async () => {
+ await authClient.signOut({
+ fetchOptions: {
+ onSuccess: () => {
+ window.location.href = "/login";
+ },
+ },
+ });
+ };
+
+ if (!session?.user) {
+ return null;
+ }
+
+ return (
+
+
+
+
+ Signed in as
+
+
+ {session.user.name}
+
+
+ {session.user.email}
+
+
+
+
+
+
+ );
+}
+
+export default function SidebarLayout({
+ children,
+}: {
+ children: React.ReactNode;
+}) {
+ return (
+
+
+
+
+
+ MetaMCP
+
+
+
+
+
+ Application
+
+
+ {items.map((item) => (
+
+
+
+
+ {item.title}
+
+
+
+ ))}
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {children}
+
+
+ );
+}
diff --git a/apps/frontend/app/(sidebar)/mcp-inspector/components/inspector.tsx b/apps/frontend/app/(sidebar)/mcp-inspector/components/inspector.tsx
new file mode 100644
index 00000000..9e6e7d4e
--- /dev/null
+++ b/apps/frontend/app/(sidebar)/mcp-inspector/components/inspector.tsx
@@ -0,0 +1,123 @@
+"use client";
+
+import { RequestOptions } from "@modelcontextprotocol/sdk/shared/protocol.js";
+import { ClientRequest } from "@modelcontextprotocol/sdk/types.js";
+import {
+ ActivitySquare,
+ FileText,
+ FolderTree,
+ MessageSquare,
+ SearchCode,
+ Wrench,
+ Zap,
+} from "lucide-react";
+import { useState } from "react";
+import { z } from "zod";
+
+import { Tabs, TabsContent, TabsList, TabsTrigger } from "@/components/ui/tabs";
+
+import { InspectorPing } from "./inspector/inspector-ping";
+import { InspectorPrompts } from "./inspector/inspector-prompts";
+import { InspectorResources } from "./inspector/inspector-resources";
+import { InspectorRoots } from "./inspector/inspector-roots";
+import { InspectorSampling } from "./inspector/inspector-sampling";
+import { InspectorTools } from "./inspector/inspector-tools";
+
+interface InspectorProps {
+ mcpServerUuid: string;
+ makeRequest: (
+ request: ClientRequest,
+ schema: T,
+ options?: RequestOptions & { suppressToast?: boolean },
+ ) => Promise>;
+ serverCapabilities?: Record | null;
+}
+
+export function Inspector({
+ mcpServerUuid,
+ makeRequest,
+ serverCapabilities,
+}: InspectorProps) {
+ const [activeTab, setActiveTab] = useState("tools");
+
+ // Check server capabilities to determine which tabs to show
+ const hasTools = serverCapabilities?.tools !== undefined;
+ const hasResources = serverCapabilities?.resources !== undefined;
+ const hasPrompts = serverCapabilities?.prompts !== undefined;
+ const hasRoots = serverCapabilities?.roots !== undefined;
+ const hasSampling = serverCapabilities?.sampling !== undefined;
+
+ return (
+
+
+
+ MCP Inspector
+
+ Interactive testing and debugging interface
+
+
+
+
+
+
+
+ Tools
+
+
+
+ Resources
+
+
+
+ Prompts
+
+
+
+ Ping
+
+
+
+ Roots
+
+
+
+ Sampling
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ );
+}
diff --git a/apps/frontend/app/(sidebar)/mcp-inspector/components/inspector/inspector-ping.tsx b/apps/frontend/app/(sidebar)/mcp-inspector/components/inspector/inspector-ping.tsx
new file mode 100644
index 00000000..2d19141e
--- /dev/null
+++ b/apps/frontend/app/(sidebar)/mcp-inspector/components/inspector/inspector-ping.tsx
@@ -0,0 +1,360 @@
+"use client";
+
+import { RequestOptions } from "@modelcontextprotocol/sdk/shared/protocol.js";
+import {
+ ClientRequest,
+ EmptyResultSchema,
+} from "@modelcontextprotocol/sdk/types.js";
+import { Activity, CheckCircle, Clock, XCircle, Zap } from "lucide-react";
+import { useState } from "react";
+import { toast } from "sonner";
+import { z } from "zod";
+
+import { Button } from "@/components/ui/button";
+
+interface PingHistory {
+ id: string;
+ timestamp: Date;
+ success: boolean;
+ duration: number;
+ error?: string;
+ method: string;
+}
+
+interface InspectorPingProps {
+ makeRequest: (
+ request: ClientRequest,
+ schema: T,
+ options?: RequestOptions & { suppressToast?: boolean },
+ ) => Promise>;
+}
+
+export function InspectorPing({ makeRequest }: InspectorPingProps) {
+ const [pinging, setPinging] = useState(false);
+ const [pingHistory, setPingHistory] = useState([]);
+ const [currentPing, setCurrentPing] = useState(null);
+
+ const handlePing = async () => {
+ setPinging(true);
+ const startTime = Date.now();
+ const pingId = `ping-${Date.now()}`;
+
+ const newPing: PingHistory = {
+ id: pingId,
+ timestamp: new Date(),
+ success: false,
+ duration: 0,
+ method: "ping",
+ };
+
+ setCurrentPing(newPing);
+
+ try {
+ // First try the standard ping method
+ await makeRequest(
+ {
+ method: "ping" as const,
+ params: {},
+ },
+ EmptyResultSchema,
+ { suppressToast: true, timeout: 5000 },
+ );
+
+ const duration = Date.now() - startTime;
+ const successPing = {
+ ...newPing,
+ success: true,
+ duration,
+ method: "ping",
+ };
+
+ setCurrentPing(successPing);
+ setPingHistory((prev) => [successPing, ...prev].slice(0, 10)); // Keep last 10 pings
+ toast.success(`Ping successful (${duration}ms)`);
+ } catch (_pingError) {
+ // If ping method doesn't exist, try a fallback method
+ try {
+ const fallbackStartTime = Date.now();
+
+ // Try tools/list as a fallback connectivity test
+ await makeRequest(
+ {
+ method: "tools/list" as const,
+ params: {},
+ },
+ z.object({ tools: z.array(z.any()) }).passthrough(),
+ { suppressToast: true, timeout: 3000 },
+ );
+
+ const duration = Date.now() - fallbackStartTime;
+ const successPing = {
+ ...newPing,
+ success: true,
+ duration,
+ method: "tools/list (fallback)",
+ };
+
+ setCurrentPing(successPing);
+ setPingHistory((prev) => [successPing, ...prev].slice(0, 10));
+ toast.success(`Server responsive via fallback (${duration}ms)`);
+ } catch (fallbackError) {
+ const duration = Date.now() - startTime;
+ const failedPing = {
+ ...newPing,
+ success: false,
+ duration,
+ error:
+ fallbackError instanceof Error
+ ? fallbackError.message
+ : String(fallbackError),
+ method: "ping + fallback",
+ };
+
+ setCurrentPing(failedPing);
+ setPingHistory((prev) => [failedPing, ...prev].slice(0, 10));
+ toast.error(`Ping failed (${duration}ms)`, {
+ description:
+ fallbackError instanceof Error
+ ? fallbackError.message
+ : String(fallbackError),
+ });
+ }
+ } finally {
+ setPinging(false);
+ }
+ };
+
+ const clearHistory = () => {
+ setPingHistory([]);
+ setCurrentPing(null);
+ };
+
+ const formatDuration = (duration: number) => {
+ if (duration < 1000) {
+ return `${duration}ms`;
+ }
+ return `${(duration / 1000).toFixed(2)}s`;
+ };
+
+ const getStatusColor = (success: boolean) => {
+ return success ? "text-green-600" : "text-red-600";
+ };
+
+ const getStatusIcon = (success: boolean) => {
+ return success ? CheckCircle : XCircle;
+ };
+
+ const getAverageResponseTime = () => {
+ if (pingHistory.length === 0) return 0;
+ const successfulPings = pingHistory.filter((p) => p.success);
+ if (successfulPings.length === 0) return 0;
+ return Math.round(
+ successfulPings.reduce((sum, ping) => sum + ping.duration, 0) /
+ successfulPings.length,
+ );
+ };
+
+ const getSuccessRate = () => {
+ if (pingHistory.length === 0) return 0;
+ const successfulPings = pingHistory.filter((p) => p.success).length;
+ return Math.round((successfulPings / pingHistory.length) * 100);
+ };
+
+ const getLatestPings = (count: number) => {
+ return pingHistory.slice(0, count);
+ };
+
+ return (
+
+ {/* Header */}
+
+
+
+ Server Connectivity Test
+
+
+
+
+
+
+
+ {/* Current Status */}
+ {currentPing && (
+
+ Current Status
+
+
+ {(() => {
+ const StatusIcon = getStatusIcon(currentPing.success);
+ return (
+
+ );
+ })()}
+
+
+ {currentPing.success
+ ? "Server Responsive"
+ : "Server Unreachable"}
+
+
+ Response time: {formatDuration(currentPing.duration)}
+
+
+ Method: {currentPing.method}
+
+ {currentPing.error && (
+
+ Error: {currentPing.error}
+
+ )}
+
+
+
+ {currentPing.timestamp.toLocaleTimeString()}
+
+
+
+ )}
+
+ {/* Statistics */}
+ {pingHistory.length > 0 && (
+
+
+
+ {getSuccessRate()}%
+
+ Success Rate
+
+
+
+ {getAverageResponseTime()}ms
+
+ Avg Response
+
+
+
+ {pingHistory.length}
+
+ Total Pings
+
+
+ )}
+
+ {/* Ping History */}
+
+
+ Ping History ({pingHistory.length})
+
+ {pingHistory.length === 0 ? (
+
+
+
+ No ping history yet. Click "Ping Server" to test
+ connectivity.
+
+
+ ) : (
+
+ {getLatestPings(10).map((ping) => {
+ const StatusIcon = getStatusIcon(ping.success);
+ return (
+
+
+
+
+
+
+ {ping.success ? "Success" : "Failed"}
+
+
+ {formatDuration(ping.duration)}
+
+
+
+ {ping.timestamp.toLocaleString()} β’ {ping.method}
+
+ {ping.error && (
+
+ {ping.error}
+
+ )}
+
+
+
+ {/* Visual indicator */}
+
+ {Array.from({ length: 5 }).map((_, i) => (
+
+ ))}
+
+
+ );
+ })}
+
+ )}
+
+
+ {/* Info Section */}
+
+
+
+
+
+ About Ping
+
+
+ The ping test checks if the MCP server is responsive. It first
+ tries the standard "ping" method, and if that's not
+ available, falls back to testing with "tools/list" to
+ verify the server is reachable. Response times help assess server
+ performance.
+
+
+
+
+
+ );
+}
diff --git a/apps/frontend/app/(sidebar)/mcp-inspector/components/inspector/inspector-prompts.tsx b/apps/frontend/app/(sidebar)/mcp-inspector/components/inspector/inspector-prompts.tsx
new file mode 100644
index 00000000..701757a6
--- /dev/null
+++ b/apps/frontend/app/(sidebar)/mcp-inspector/components/inspector/inspector-prompts.tsx
@@ -0,0 +1,535 @@
+/* eslint-disable @next/next/no-img-element */
+"use client";
+
+import { RequestOptions } from "@modelcontextprotocol/sdk/shared/protocol.js";
+import {
+ ClientRequest,
+ GetPromptResultSchema,
+ ListPromptsResultSchema,
+} from "@modelcontextprotocol/sdk/types.js";
+import {
+ AlertTriangle,
+ ChevronDown,
+ ChevronRight,
+ MessageSquare,
+ Play,
+ RefreshCw,
+} from "lucide-react";
+import { useCallback, useState } from "react";
+import { toast } from "sonner";
+import { z } from "zod";
+
+import { Button } from "@/components/ui/button";
+import { Input } from "@/components/ui/input";
+
+interface Prompt {
+ name: string;
+ description?: string;
+ arguments?: Array<{
+ name: string;
+ description?: string;
+ required?: boolean;
+ }>;
+}
+
+interface PromptMessage {
+ role: "user" | "assistant" | "system";
+ content: {
+ type: "text" | "image" | "audio" | "resource";
+ text?: string;
+ data?: string;
+ mimeType?: string;
+ resource?: {
+ uri: string;
+ mimeType?: string;
+ text?: string;
+ blob?: string;
+ };
+ };
+}
+
+interface PromptGetResponse {
+ description?: string;
+ messages: PromptMessage[];
+}
+
+interface InspectorPromptsProps {
+ makeRequest: (
+ request: ClientRequest,
+ schema: T,
+ options?: RequestOptions & { suppressToast?: boolean },
+ ) => Promise>;
+ enabled?: boolean;
+}
+
+export function InspectorPrompts({
+ makeRequest,
+ enabled = true,
+}: InspectorPromptsProps) {
+ const [prompts, setPrompts] = useState([]);
+ const [loading, setLoading] = useState(false);
+ const [selectedPrompt, setSelectedPrompt] = useState(null);
+ const [promptArgs, setPromptArgs] = useState>({});
+ const [promptResult, setPromptResult] = useState(
+ null,
+ );
+ const [getting, setGetting] = useState(false);
+ const [expandedPrompt, setExpandedPrompt] = useState(null);
+ const [nextCursor, setNextCursor] = useState();
+
+ const fetchPrompts = useCallback(
+ async (cursor?: string) => {
+ if (!enabled) return;
+
+ setLoading(true);
+ try {
+ const response = await makeRequest(
+ {
+ method: "prompts/list" as const,
+ params: cursor ? { cursor } : {},
+ },
+ ListPromptsResultSchema,
+ { suppressToast: true },
+ );
+
+ if (cursor) {
+ // Append to existing prompts if we're fetching more
+ setPrompts((prev) => [...prev, ...(response.prompts || [])]);
+ } else {
+ // Replace prompts if this is the first fetch
+ setPrompts(response.prompts || []);
+ }
+
+ setNextCursor(response.nextCursor);
+
+ if (response.prompts && response.prompts.length === 0 && !cursor) {
+ toast.info("No prompts found on MCP server");
+ }
+ } catch (error) {
+ console.error("Error fetching prompts:", error);
+ toast.error("Failed to fetch prompts from MCP server", {
+ description: error instanceof Error ? error.message : String(error),
+ });
+ if (!cursor) {
+ setPrompts([]);
+ }
+ } finally {
+ setLoading(false);
+ }
+ },
+ [makeRequest, enabled],
+ );
+
+ const clearPrompts = () => {
+ setPrompts([]);
+ setSelectedPrompt(null);
+ setPromptResult(null);
+ setNextCursor(undefined);
+ };
+
+ const handlePromptGet = async () => {
+ if (!selectedPrompt) return;
+
+ setGetting(true);
+ setPromptResult(null);
+
+ try {
+ const response = await makeRequest(
+ {
+ method: "prompts/get" as const,
+ params: {
+ name: selectedPrompt.name,
+ arguments: promptArgs,
+ },
+ },
+ GetPromptResultSchema,
+ { suppressToast: true },
+ );
+
+ setPromptResult(response);
+ toast.success(`Prompt "${selectedPrompt.name}" retrieved successfully`);
+ } catch (error) {
+ console.error("Error getting prompt:", error);
+ toast.error(`Failed to get prompt "${selectedPrompt.name}"`, {
+ description: error instanceof Error ? error.message : String(error),
+ });
+ } finally {
+ setGetting(false);
+ }
+ };
+
+ const handleArgChange = (argName: string, value: string) => {
+ setPromptArgs((prev) => ({
+ ...prev,
+ [argName]: value,
+ }));
+ };
+
+ const renderMessage = (message: PromptMessage, index: number) => {
+ const getRoleColor = (role: string) => {
+ switch (role) {
+ case "user":
+ return "text-blue-700 bg-blue-50 border-blue-200";
+ case "assistant":
+ return "text-green-700 bg-green-50 border-green-200";
+ case "system":
+ return "text-orange-700 bg-orange-50 border-orange-200";
+ default:
+ return "text-gray-700 bg-gray-50 border-gray-200";
+ }
+ };
+
+ return (
+
+
+ {message.role}
+ ({message.content.type})
+
+ {message.content.type === "text" && message.content.text && (
+
+ {message.content.text}
+
+ )}
+ {message.content.type === "image" && (
+
+ [Image data - {message.content.mimeType || "unknown format"}]
+ {message.content.data && (
+
+ )}
+
+ )}
+ {message.content.type === "audio" && (
+
+ [Audio data - {message.content.mimeType || "unknown format"}]
+ {message.content.data && (
+
+ )}
+
+ )}
+ {message.content.type === "resource" && (
+
+ [Resource - {message.content.resource?.mimeType || "unknown format"}
+ ]
+
+
+ URI: {message.content.resource?.uri}
+
+ {message.content.resource?.text && (
+
+ {message.content.resource.text}
+
+ )}
+ {message.content.resource?.blob && (
+
+ [Binary data: {message.content.resource.blob.length} chars]
+
+ )}
+
+
+ )}
+
+ );
+ };
+
+ if (!enabled) {
+ return (
+
+
+ Prompts Not Supported
+
+ This MCP server doesn't support prompts.
+
+
+ );
+ }
+
+ return (
+
+ {/* Header */}
+
+
+
+
+ Prompts ({prompts.length})
+
+
+
+
+
+ {nextCursor && (
+
+ )}
+
+
+
+ {/* Prompts Grid */}
+
+ {/* Left: Prompt Selection and Arguments */}
+
+
+ Available Prompts
+ {loading && prompts.length === 0 ? (
+
+ Loading prompts...
+
+ ) : prompts.length === 0 ? (
+
+ Click "Load Prompts" to fetch available prompts from
+ the MCP server.
+
+ ) : (
+
+ {prompts.map((prompt) => (
+ {
+ setSelectedPrompt(prompt);
+ setPromptResult(null);
+ // Reset args when selecting a new prompt
+ const initialArgs: Record = {};
+ if (prompt.arguments) {
+ prompt.arguments.forEach((arg) => {
+ initialArgs[arg.name] = "";
+ });
+ }
+ setPromptArgs(initialArgs);
+ }}
+ >
+
+
+
+
+
+ {prompt.name}
+
+ {prompt.description && (
+
+ {prompt.description}
+
+ )}
+
+
+
+ {prompt.arguments && prompt.arguments.length > 0 && (
+
+ {prompt.arguments.length} arg
+ {prompt.arguments.length > 1 ? "s" : ""}
+
+ )}
+
+
+
+
+ {expandedPrompt === prompt.name && (
+
+
+ Name: {prompt.name}
+ {prompt.description && (
+ Description: {prompt.description}
+ )}
+
+ {prompt.arguments && prompt.arguments.length > 0 && (
+
+
+ Arguments:
+
+
+ {prompt.arguments.map((arg) => (
+
+
+
+ {arg.name}
+
+ {arg.required && (
+
+ required
+
+ )}
+
+ {arg.description && (
+
+ {arg.description}
+
+ )}
+
+ ))}
+
+
+ )}
+
+ )}
+
+ ))}
+
+ )}
+
+
+ {/* Arguments Form */}
+ {selectedPrompt &&
+ selectedPrompt.arguments &&
+ selectedPrompt.arguments.length > 0 && (
+
+ Arguments
+
+ {selectedPrompt.arguments.map((arg) => (
+
+
+ {arg.description && (
+
+ {arg.description}
+
+ )}
+
+ handleArgChange(arg.name, e.target.value)
+ }
+ placeholder={`Enter ${arg.name} value`}
+ className="text-xs"
+ />
+
+ ))}
+
+
+ )}
+
+ {/* Get Prompt Button */}
+ {selectedPrompt && (
+
+ )}
+
+
+ {/* Right: Prompt Result */}
+
+ Prompt Result
+ {!selectedPrompt ? (
+
+ Select a prompt to view its result
+
+ ) : getting ? (
+
+ Getting prompt result...
+
+ ) : !promptResult ? (
+
+ Click "Get Prompt" to retrieve the result for "
+ {selectedPrompt.name}"
+
+ ) : (
+
+ {promptResult.description && (
+
+
+ Description
+
+ {promptResult.description}
+
+ )}
+
+
+
+ Messages ({promptResult.messages.length})
+
+
+ {promptResult.messages.map((message, index) =>
+ renderMessage(message, index),
+ )}
+
+
+
+ )}
+
+
+
+ {/* Info Section */}
+
+
+
+
+
+ About Prompts
+
+
+ Prompts are reusable templates that can generate messages for AI
+ conversations. They can accept arguments to customize the
+ generated content and help maintain consistent interactions across
+ different contexts.
+
+
+
+
+
+ );
+}
diff --git a/apps/frontend/app/(sidebar)/mcp-inspector/components/inspector/inspector-resources.tsx b/apps/frontend/app/(sidebar)/mcp-inspector/components/inspector/inspector-resources.tsx
new file mode 100644
index 00000000..70137970
--- /dev/null
+++ b/apps/frontend/app/(sidebar)/mcp-inspector/components/inspector/inspector-resources.tsx
@@ -0,0 +1,567 @@
+"use client";
+
+import { RequestOptions } from "@modelcontextprotocol/sdk/shared/protocol.js";
+import {
+ ClientRequest,
+ ListResourcesResultSchema,
+ ListResourceTemplatesResultSchema,
+ ReadResourceResultSchema,
+ Resource,
+ ResourceTemplate,
+} from "@modelcontextprotocol/sdk/types.js";
+import {
+ AlertTriangle,
+ ChevronDown,
+ ChevronRight,
+ Eye,
+ FileText,
+ Plus,
+ RefreshCw,
+} from "lucide-react";
+import { useCallback, useState } from "react";
+import { toast } from "sonner";
+import { z } from "zod";
+
+import { Button } from "@/components/ui/button";
+import { CodeBlock } from "@/components/ui/code-block";
+
+interface ResourceContent {
+ uri: string;
+ mimeType?: string;
+ text?: string;
+ blob?: string;
+}
+
+interface InspectorResourcesProps {
+ makeRequest: (
+ request: ClientRequest,
+ schema: T,
+ options?: RequestOptions & { suppressToast?: boolean },
+ ) => Promise>;
+ enabled?: boolean;
+}
+
+export function InspectorResources({
+ makeRequest,
+ enabled = true,
+}: InspectorResourcesProps) {
+ const [resources, setResources] = useState([]);
+ const [resourceTemplates, setResourceTemplates] = useState<
+ ResourceTemplate[]
+ >([]);
+ const [loading, setLoading] = useState(false);
+ const [templatesLoading, setTemplatesLoading] = useState(false);
+ const [selectedResource, setSelectedResource] = useState(
+ null,
+ );
+ const [resourceContent, setResourceContent] =
+ useState(null);
+ const [reading, setReading] = useState(false);
+ const [expandedResource, setExpandedResource] = useState(null);
+ const [nextResourceCursor, setNextResourceCursor] = useState<
+ string | undefined
+ >();
+ const [nextTemplateCursor, setNextTemplateCursor] = useState<
+ string | undefined
+ >();
+ const [resourceSubscriptions, setResourceSubscriptions] = useState<
+ Set
+ >(new Set());
+
+ const fetchResources = useCallback(
+ async (cursor?: string) => {
+ if (!enabled) return;
+
+ setLoading(true);
+ try {
+ const response = await makeRequest(
+ {
+ method: "resources/list" as const,
+ params: cursor ? { cursor } : {},
+ },
+ ListResourcesResultSchema,
+ { suppressToast: true },
+ );
+
+ if (cursor) {
+ // Append to existing resources if we're fetching more
+ setResources((prev) => [...prev, ...(response.resources || [])]);
+ } else {
+ // Replace resources if this is the first fetch
+ setResources(response.resources || []);
+ }
+
+ setNextResourceCursor(response.nextCursor);
+
+ if (response.resources && response.resources.length === 0 && !cursor) {
+ toast.info("No resources found on MCP server");
+ }
+ } catch (error) {
+ console.error("Error fetching resources:", error);
+ toast.error("Failed to fetch resources from MCP server", {
+ description: error instanceof Error ? error.message : String(error),
+ });
+ if (!cursor) {
+ setResources([]);
+ }
+ } finally {
+ setLoading(false);
+ }
+ },
+ [makeRequest, enabled],
+ );
+
+ const fetchResourceTemplates = useCallback(
+ async (cursor?: string) => {
+ if (!enabled) return;
+
+ setTemplatesLoading(true);
+ try {
+ const response = await makeRequest(
+ {
+ method: "resources/templates/list" as const,
+ params: cursor ? { cursor } : {},
+ },
+ ListResourceTemplatesResultSchema,
+ { suppressToast: true },
+ );
+
+ if (cursor) {
+ // Append to existing templates if we're fetching more
+ setResourceTemplates((prev) => [
+ ...prev,
+ ...(response.resourceTemplates || []),
+ ]);
+ } else {
+ // Replace templates if this is the first fetch
+ setResourceTemplates(response.resourceTemplates || []);
+ }
+
+ setNextTemplateCursor(response.nextCursor);
+
+ if (
+ response.resourceTemplates &&
+ response.resourceTemplates.length === 0 &&
+ !cursor
+ ) {
+ toast.info("No resource templates found on MCP server");
+ }
+ } catch (error) {
+ console.error("Error fetching resource templates:", error);
+ // Templates are optional, so don't show error toast for missing capability
+ if (!cursor) {
+ setResourceTemplates([]);
+ }
+ } finally {
+ setTemplatesLoading(false);
+ }
+ },
+ [makeRequest, enabled],
+ );
+
+ const clearResources = () => {
+ setResources([]);
+ setSelectedResource(null);
+ setResourceContent(null);
+ setNextResourceCursor(undefined);
+ };
+
+ const clearResourceTemplates = () => {
+ setResourceTemplates([]);
+ setNextTemplateCursor(undefined);
+ };
+
+ const handleResourceRead = async (resource: Resource) => {
+ setReading(true);
+ setResourceContent(null);
+
+ try {
+ const response = await makeRequest(
+ {
+ method: "resources/read" as const,
+ params: {
+ uri: resource.uri,
+ },
+ },
+ ReadResourceResultSchema,
+ { suppressToast: true },
+ );
+
+ if (response?.contents && response.contents.length > 0) {
+ setResourceContent(response.contents[0]!);
+ toast.success(
+ `Resource "${resource.name || resource.uri}" read successfully`,
+ );
+ } else {
+ toast.error("No content found in resource");
+ }
+ } catch (error) {
+ console.error("Error reading resource:", error);
+ toast.error(
+ `Failed to read resource "${resource.name || resource.uri}"`,
+ {
+ description: error instanceof Error ? error.message : String(error),
+ },
+ );
+ } finally {
+ setReading(false);
+ }
+ };
+
+ const subscribeToResource = async (uri: string) => {
+ if (resourceSubscriptions.has(uri)) return;
+
+ try {
+ await makeRequest(
+ {
+ method: "resources/subscribe" as const,
+ params: { uri },
+ },
+ z.object({}),
+ { suppressToast: true },
+ );
+
+ const newSubscriptions = new Set(resourceSubscriptions);
+ newSubscriptions.add(uri);
+ setResourceSubscriptions(newSubscriptions);
+
+ toast.success(`Subscribed to resource "${uri}"`);
+ } catch (error) {
+ console.error("Error subscribing to resource:", error);
+ toast.error(`Failed to subscribe to resource "${uri}"`, {
+ description: error instanceof Error ? error.message : String(error),
+ });
+ }
+ };
+
+ const unsubscribeFromResource = async (uri: string) => {
+ if (!resourceSubscriptions.has(uri)) return;
+
+ try {
+ await makeRequest(
+ {
+ method: "resources/unsubscribe" as const,
+ params: { uri },
+ },
+ z.object({}),
+ { suppressToast: true },
+ );
+
+ const newSubscriptions = new Set(resourceSubscriptions);
+ newSubscriptions.delete(uri);
+ setResourceSubscriptions(newSubscriptions);
+
+ toast.success(`Unsubscribed from resource "${uri}"`);
+ } catch (error) {
+ console.error("Error unsubscribing from resource:", error);
+ toast.error(`Failed to unsubscribe from resource "${uri}"`, {
+ description: error instanceof Error ? error.message : String(error),
+ });
+ }
+ };
+
+ const formatResourceContent = (content: ResourceContent) => {
+ if (content.text) {
+ return content.text;
+ } else if (content.blob) {
+ return `[Binary content - ${content.blob.length} characters]`;
+ }
+ return "[No content available]";
+ };
+
+ const getResourceDisplayName = (resource: Resource) => {
+ return resource.name || resource.uri.split("/").pop() || resource.uri;
+ };
+
+ if (!enabled) {
+ return (
+
+
+ Resources Not Supported
+
+ This MCP server doesn't support resources.
+
+
+ );
+ }
+
+ return (
+
+ {/* Header */}
+
+
+
+
+ Resources ({resources.length})
+
+
+
+
+
+ {nextResourceCursor && (
+
+ )}
+
+
+
+ {/* Resource Templates Section */}
+ {resourceTemplates.length > 0 && (
+
+
+
+
+
+ Resource Templates ({resourceTemplates.length})
+
+
+
+
+ {nextTemplateCursor && (
+
+ )}
+
+
+
+
+ {resourceTemplates.map((template) => (
+
+
+
+
+ {template.name || "Unnamed Template"}
+
+
+ {template.description && (
+
+ {template.description}
+
+ )}
+
+ {template.uriTemplate}
+
+
+ ))}
+
+
+ )}
+
+ {/* Resources List */}
+
+ {/* Left: Resource List */}
+
+ Available Resources
+ {loading && resources.length === 0 ? (
+
+ Loading resources...
+
+ ) : resources.length === 0 ? (
+
+ Click "Load Resources" to fetch available resources from
+ the MCP server.
+
+ ) : (
+
+ {resources.map((resource) => (
+ {
+ setSelectedResource(resource);
+ setExpandedResource(resource.uri);
+ }}
+ >
+
+
+