diff --git a/mcpgateway/alembic/versions/878e287d2366_resize_url_and_slug_columns_to_191.py b/mcpgateway/alembic/versions/878e287d2366_resize_url_and_slug_columns_to_191.py new file mode 100644 index 000000000..a6abe3bab --- /dev/null +++ b/mcpgateway/alembic/versions/878e287d2366_resize_url_and_slug_columns_to_191.py @@ -0,0 +1,130 @@ +"""" resize url and slug columns to 191" + +Revision ID: 878e287d2366 +Revises: h2b3c4d5e6f7 +Create Date: 2025-10-08 09:08:35.363100 + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa + +# revision identifiers, used by Alembic. +revision: str = '878e287d2366' +down_revision: Union[str, Sequence[str], None] = 'h2b3c4d5e6f7' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + # Get database dialect for dialect-specific operations + bind = op.get_bind() + dialect_name = bind.dialect.name + + # Truncate existing values longer than 191 chars using dialect-appropriate functions + if dialect_name == 'sqlite': + # SQLite uses SUBSTR and LENGTH + op.execute(""" + UPDATE gateways + SET slug = SUBSTR(slug, 1, 191), + url = SUBSTR(url, 1, 191) + WHERE LENGTH(slug) > 191 OR LENGTH(url) > 191; + """) + elif dialect_name == 'postgresql': + # PostgreSQL supports LEFT and CHAR_LENGTH + op.execute(""" + UPDATE gateways + SET slug = LEFT(slug, 191), + url = LEFT(url, 191) + WHERE CHAR_LENGTH(slug) > 191 OR CHAR_LENGTH(url) > 191; + """) + elif dialect_name == 'mysql': + # MySQL supports LEFT and CHAR_LENGTH (character-based, not byte-based) + op.execute(""" + UPDATE gateways + SET slug = LEFT(slug, 191), + url = LEFT(url, 191) + WHERE CHAR_LENGTH(slug) > 191 OR CHAR_LENGTH(url) > 191; + """) + else: + # Fallback for other databases + op.execute(""" + UPDATE gateways + SET slug = SUBSTR(slug, 1, 191), + url = SUBSTR(url, 1, 191) + WHERE LENGTH(slug) > 191 OR LENGTH(url) > 191; + """) + + # Resize columns to String(191) + # SQLite requires batch operations for ALTER COLUMN + if dialect_name == 'sqlite': + with op.batch_alter_table('gateways', schema=None) as batch_op: + batch_op.alter_column( + 'slug', + existing_type=sa.String(length=255), + type_=sa.String(length=191), + existing_nullable=False + ) + batch_op.alter_column( + 'url', + existing_type=sa.String(length=767), + type_=sa.String(length=191), + existing_nullable=False + ) + else: + # PostgreSQL and MySQL support direct ALTER COLUMN + op.alter_column( + 'gateways', + 'slug', + existing_type=sa.String(length=255), + type_=sa.String(length=191), + existing_nullable=False + ) + op.alter_column( + 'gateways', + 'url', + existing_type=sa.String(length=767), + type_=sa.String(length=191), + existing_nullable=False + ) + + +def downgrade() -> None: + """Downgrade schema.""" + # Get database dialect for dialect-specific operations + bind = op.get_bind() + dialect_name = bind.dialect.name + + # SQLite requires batch operations for ALTER COLUMN + if dialect_name == 'sqlite': + with op.batch_alter_table('gateways', schema=None) as batch_op: + batch_op.alter_column( + 'slug', + existing_type=sa.String(length=191), + type_=sa.String(length=255), + existing_nullable=False + ) + batch_op.alter_column( + 'url', + existing_type=sa.String(length=191), + type_=sa.String(length=767), + existing_nullable=False + ) + else: + # PostgreSQL and MySQL support direct ALTER COLUMN + op.alter_column( + 'gateways', + 'slug', + existing_type=sa.String(length=191), + type_=sa.String(length=255), + existing_nullable=False + ) + op.alter_column( + 'gateways', + 'url', + existing_type=sa.String(length=191), + type_=sa.String(length=767), + existing_nullable=False + ) diff --git a/mcpgateway/db.py b/mcpgateway/db.py index 5e5e97afe..bf5b37507 100644 --- a/mcpgateway/db.py +++ b/mcpgateway/db.py @@ -2430,8 +2430,8 @@ class Gateway(Base): id: Mapped[str] = mapped_column(String(36), primary_key=True, default=lambda: uuid.uuid4().hex) name: Mapped[str] = mapped_column(String(255), nullable=False) - slug: Mapped[str] = mapped_column(String(255), nullable=False) - url: Mapped[str] = mapped_column(String(767), nullable=False) + slug: Mapped[str] = mapped_column(String(191), nullable=False) + url: Mapped[str] = mapped_column(String(191), nullable=False) description: Mapped[Optional[str]] = mapped_column(Text, nullable=True) transport: Mapped[str] = mapped_column(String(20), default="SSE") capabilities: Mapped[Dict[str, Any]] = mapped_column(JSON) diff --git a/tests/unit/mcpgateway/test_translate_stdio_endpoint.py b/tests/unit/mcpgateway/test_translate_stdio_endpoint.py index 708d605ed..d61503d08 100644 --- a/tests/unit/mcpgateway/test_translate_stdio_endpoint.py +++ b/tests/unit/mcpgateway/test_translate_stdio_endpoint.py @@ -8,7 +8,7 @@ Tests for StdIOEndpoint class modifications to support dynamic environment variables. """ - +import sys import asyncio import json import logging @@ -271,13 +271,14 @@ async def test_multiple_env_vars(self, test_script, caplog): pubsub = _PubSub() - env_vars = { + env_vars = os.environ.copy() + env_vars.update({ "GITHUB_TOKEN": "github-token-123", "TENANT_ID": "acme-corp", "API_KEY": "api-key-456", "ENVIRONMENT": "production", "DEBUG": "false", - } + }) endpoint = StdIOEndpoint( "jq -cMn env", pubsub, env_vars) diff --git a/tests/unit/test_tool_service_output_schema.py b/tests/unit/test_tool_service_output_schema.py new file mode 100644 index 000000000..9aebf6f10 --- /dev/null +++ b/tests/unit/test_tool_service_output_schema.py @@ -0,0 +1,81 @@ +import asyncio +from unittest.mock import MagicMock + +import pytest + +from mcpgateway.services.tool_service import ToolService +from mcpgateway.models import TextContent + + +class FakeResponse: + def __init__(self, json_data, status_code=200): + self._json = json_data + self.status_code = status_code + + def json(self): + return self._json + + def raise_for_status(self): + return None + + +class FakeHttpClient: + def __init__(self, response: FakeResponse): + self._response = response + + async def request(self, method, url, json=None, headers=None): + return self._response + + async def get(self, url, params=None, headers=None): + return self._response + + +class DummyTool: + def __init__(self): + self.name = "dummy" + self.enabled = True + self.reachable = True + self.integration_type = "REST" + self.url = "http://example.local" + self.request_type = "POST" + self.headers = {} + self.auth_type = None + self.auth_value = None + self.jsonpath_filter = "" + # Provide an output_schema to trigger structured-content behavior + self.output_schema = {"type": "object", "properties": {"y": {"type": "number"}}} + # Minimal attributes expected by ToolService.invoke_tool + self.id = 1 + self.gateway_id = None + + +@pytest.mark.asyncio +async def test_invoke_tool_returns_structured_content_when_output_schema_present(): + svc = ToolService() + + # fake DB that returns our dummy tool for the select + db = MagicMock() + fake_tool = DummyTool() + # db.execute(...).scalar_one_or_none() should return the tool + m = MagicMock() + m.scalar_one_or_none.return_value = fake_tool + db.execute.return_value = m + + # Replace the http client with a fake response returning JSON + svc._http_client = FakeHttpClient(FakeResponse({"y": 10.0, "z": 20.0, "result": 30.0}, status_code=200)) + + result = await svc.invoke_tool(db, "dummy", {}) + + dumped = result.model_dump() + assert isinstance(dumped, dict) + # New behavior: when structuredContent is present and valid we remove + # the unstructured textual `content` entry and return the parsed object + # in `structuredContent` (clients should prefer structuredContent). + assert "structuredContent" in dumped + structured = dumped["structuredContent"] + assert isinstance(structured, dict) + assert structured.get("y") == 10.0 + assert structured.get("z") == 20.0 + assert structured.get("result") == 30.0 + # content may be empty when structuredContent is valid + assert dumped.get("content", []) == []