From 8db3bc1f9bd0ff7fec6ff85cc1d0d1ad99ac45df Mon Sep 17 00:00:00 2001 From: Sara Date: Fri, 30 May 2025 21:54:49 -0400 Subject: [PATCH 01/17] adds debug handling --- src/devrev_mcp/server.py | 472 ++++++++++++++++++++++++++++++--------- 1 file changed, 365 insertions(+), 107 deletions(-) diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index ccc1350..02ea633 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -8,6 +8,9 @@ import asyncio import os import requests +import json +import traceback +from functools import wraps from mcp.server.models import InitializationOptions import mcp.types as types @@ -18,6 +21,283 @@ server = Server("devrev_mcp") +# Store DevRev resources (works, comments, etc.) for resource access +devrev_cache = {} + +# Check debug mode and store state +DEBUG_ENABLED = os.environ.get("DRMCP_DEBUG") == "1" +DEBUG_MESSAGE = "๐Ÿ› DEBUG MODE ENABLED - sara wuz here" if DEBUG_ENABLED else "๐Ÿ› DEBUG MODE DISABLED - sara wuz here" + +def debug_error_handler(func): + """ + Decorator that catches exceptions in MCP tools and returns detailed debug information + as the tool response when DRMCP_DEBUG=1. + """ + debug_enabled = DEBUG_ENABLED + + @wraps(func) + async def wrapper(*args, **kwargs): + try: + result = await func(*args, **kwargs) + # Add debug message to all tool responses when debug is enabled + if debug_enabled and result: + # Add debug message as first item in response + debug_content = types.TextContent( + type="text", + text=f"{DEBUG_MESSAGE}\n\n" + ) + if isinstance(result, list) and len(result) > 0 and hasattr(result[0], 'text'): + result[0].text = debug_content.text + result[0].text + else: + result.insert(0, debug_content) + return result + except Exception as e: + if debug_enabled: + # Debug mode: return detailed error information + error_message = f"""ERROR (Debug Mode): {type(e).__name__}: {str(e)} + +Full traceback: +{traceback.format_exc()} + +This is a debug error response. Let's troubleshoot this together. + +{DEBUG_MESSAGE}""" + else: + # Production mode: return generic error message + error_message = f"An error occurred while executing the tool. Please try again or contact support." + + return [ + types.TextContent( + type="text", + text=error_message + ) + ] + + return wrapper + +@debug_error_handler +async def search_tool(arguments: dict | None) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + """Handle search tool execution.""" + if not arguments: + raise ValueError("Missing arguments") + + query = arguments.get("query") + if not query: + raise ValueError("Missing query parameter") + + namespace = arguments.get("namespace") + if not namespace: + raise ValueError("Missing namespace parameter") + + response = make_devrev_request( + "search.hybrid", + {"query": query, "namespace": namespace} + ) + if response.status_code != 200: + error_text = response.text + return [ + types.TextContent( + type="text", + text=f"Search failed with status {response.status_code}: {error_text}" + ) + ] + + search_results = response.json() + return [ + types.TextContent( + type="text", + text=f"Search results for '{query}':\n{search_results}" + ) + ] + +@debug_error_handler +async def get_work_tool(arguments: dict | None) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + """Handle get_work tool execution.""" + if not arguments: + raise ValueError("Missing arguments") + + id = arguments.get("id") + if not id: + raise ValueError("Missing id parameter") + + response = make_devrev_request( + "works.get", + {"id": id} + ) + if response.status_code != 200: + error_text = response.text + return [ + types.TextContent( + type="text", + text=f"Get work failed with status {response.status_code}: {error_text}" + ) + ] + + object_info = response.json() + # Cache the work data for resource access + devrev_cache[id] = json.dumps(object_info) + return [ + types.TextContent( + type="text", + text=f"Work information for '{id}':\n{object_info}" + ) + ] + +@debug_error_handler +async def create_work_tool(arguments: dict | None) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + """Handle create_work tool execution.""" + if not arguments: + raise ValueError("Missing arguments") + + # Mandatory fields + object_type = arguments.get("type") + if not object_type: + raise ValueError("Missing type parameter") + + title = arguments.get("title") + if not title: + raise ValueError("Missing title parameter") + + applies_to_part = arguments.get("applies_to_part") + if not applies_to_part: + raise ValueError("Missing applies_to_part parameter") + + # Optional fields + body = arguments.get("body", "") + owned_by = arguments.get("owned_by", []) + + response = make_devrev_request( + "works.create", + { + "type": object_type, + "title": title, + "body": body, + "applies_to_part": applies_to_part, + "owned_by": owned_by + } + ) + if response.status_code != 201: + error_text = response.text + return [ + types.TextContent( + type="text", + text=f"Create work failed with status {response.status_code}: {error_text}" + ) + ] + + created_work = response.json() + # Cache the created work data for resource access + if 'work' in created_work and 'id' in created_work['work']: + work_id = created_work['work']['id'] + devrev_cache[work_id] = json.dumps(created_work['work']) + + return [ + types.TextContent( + type="text", + text=f"Work created successfully: {created_work}" + ) + ] + +@debug_error_handler +async def get_timeline_entries_tool(arguments: dict | None) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + """Handle get_timeline_entries tool execution.""" + if not arguments: + raise ValueError("Missing arguments") + + # Debug: check arguments type + if not isinstance(arguments, dict): + return [ + types.TextContent( + type="text", + text=f"Error: arguments is not a dict but {type(arguments)}: {arguments}" + ) + ] + + object_id = arguments.get("object_id") + if not object_id: + raise ValueError("Missing object_id parameter") + + try: + response = make_devrev_request( + "timeline-entries.list", + {"object": object_id} + ) + except Exception as e: + return [ + types.TextContent( + type="text", + text=f"Error making timeline request: {e}" + ) + ] + if response.status_code != 200: + error_text = response.text + return [ + types.TextContent( + type="text", + text=f"Get timeline entries failed with status {response.status_code}: {error_text}" + ) + ] + + timeline_data = response.json() + + # Cache individual timeline entries as resources and build summary + entry_summary = [] + entry_count = 0 + if 'timeline_entries' in timeline_data: + for i, entry in enumerate(timeline_data['timeline_entries']): + # Debug: check entry type + if not isinstance(entry, dict): + return [ + types.TextContent( + type="text", + text=f"Error: Entry {i} is not a dict but {type(entry)}: {entry}" + ) + ] + if 'id' in entry: + entry_id = entry['id'] + devrev_cache[entry_id] = json.dumps(entry) + entry_count += 1 + + # Add summary info for this entry + entry_info = { + 'id': entry_id, + 'type': entry.get('type', 'unknown'), + 'created_date': entry.get('created_date'), + 'visibility': entry.get('visibility', {}).get('label', 'unknown') + } + + # Add type-specific summary info + if entry.get('type') == 'timeline_comment': + body_preview = entry.get('body', '')[:100] + ('...' if len(entry.get('body', '')) > 100 else '') + entry_info['body_preview'] = body_preview + entry_info['created_by'] = entry.get('created_by', {}).get('display_name', 'unknown') + + entry_summary.append(entry_info) + + summary_text = f"""Timeline entries for '{object_id}': +Total entries: {entry_count} +Entries cached as resources (access via devrev://): + +""" + + for i, entry in enumerate(entry_summary[:10]): # Show first 10 entries in summary + summary_text += f"{i+1}. {entry['id']} ({entry['type']}) - {entry.get('created_date', 'no date')}\n" + if 'body_preview' in entry: + summary_text += f" Preview: {entry['body_preview']}\n" + if 'created_by' in entry: + summary_text += f" By: {entry['created_by']}\n" + summary_text += "\n" + + if entry_count > 10: + summary_text += f"... and {entry_count - 10} more entries (all available as resources)\n" + + return [ + types.TextContent( + type="text", + text=summary_text + ) + ] + @server.list_tools() async def handle_list_tools() -> list[types.Tool]: """ @@ -38,7 +318,7 @@ async def handle_list_tools() -> list[types.Tool]: }, ), types.Tool( - name="get_object", + name="get_work", description="Get all information about a DevRev issue and ticket using its ID", inputSchema={ "type": "object", @@ -49,7 +329,7 @@ async def handle_list_tools() -> list[types.Tool]: }, ), types.Tool( - name="create_object", + name="create_work", description="Create a new isssue or ticket in DevRev", inputSchema={ "type": "object", @@ -62,9 +342,78 @@ async def handle_list_tools() -> list[types.Tool]: }, "required": ["type", "title", "applies_to_part"], }, + ), + types.Tool( + name="get_timeline_entries", + description="Get timeline entries for a DevRev object (ticket, issue, etc.)", + inputSchema={ + "type": "object", + "properties": { + "object_id": {"type": "string"}, + }, + "required": ["object_id"], + }, ) ] +@server.list_resources() +async def handle_list_resources() -> list[types.Resource]: + """ + List available resources. + Each resource can be accessed via the read_resource handler. + """ + resources = [] + for resource_id in devrev_cache.keys(): + resource_data = devrev_cache[resource_id] + if ':comment/' in resource_id: + # Timeline comment resource + resources.append( + types.Resource( + uri=AnyUrl(f"devrev://{resource_id}"), + name=f"Comment {resource_id.split('/')[-1]}", + description=f"DevRev timeline comment {resource_id}", + mimeType="application/json" + ) + ) + else: + # Work item or other resource + resources.append( + types.Resource( + uri=AnyUrl(f"devrev://{resource_id}"), + name=f"DevRev {resource_id.split('/')[-2] if '/' in resource_id else 'Resource'} {resource_id.split('/')[-1] if '/' in resource_id else resource_id}", + description=f"DevRev resource {resource_id}", + mimeType="application/json" + ) + ) + return resources + +@server.read_resource() +async def handle_read_resource(uri: AnyUrl) -> str: + """ + Read a specific resource by URI. + """ + uri_str = str(uri) + if uri_str.startswith("devrev://"): + resource_id = uri_str.replace("devrev://", "") + if resource_id in devrev_cache: + return devrev_cache[resource_id] + else: + # If not in cache, try to fetch it based on resource type + if ':comment/' in resource_id: + # Timeline comment - cannot fetch individual comments directly + raise ValueError(f"Timeline comment {resource_id} not found in cache") + else: + # Assume it's a work item + response = make_devrev_request("works.get", {"id": resource_id}) + if response.status_code == 200: + resource_data = response.json() + devrev_cache[resource_id] = json.dumps(resource_data) + return json.dumps(resource_data) + else: + raise ValueError(f"Resource {resource_id} not found") + else: + raise ValueError(f"Unknown resource URI: {uri}") + @server.call_tool() async def handle_call_tool( name: str, arguments: dict | None @@ -73,116 +422,25 @@ async def handle_call_tool( Handle tool execution requests. Tools can modify server state and notify clients of changes. """ + # Route to appropriate tool handler if name == "search": - if not arguments: - raise ValueError("Missing arguments") - - query = arguments.get("query") - if not query: - raise ValueError("Missing query parameter") - - namespace = arguments.get("namespace") - if not namespace: - raise ValueError("Missing namespace parameter") - - response = make_devrev_request( - "search.hybrid", - {"query": query, "namespace": namespace} - ) - if response.status_code != 200: - error_text = response.text - return [ - types.TextContent( - type="text", - text=f"Search failed with status {response.status_code}: {error_text}" - ) - ] - - search_results = response.json() - return [ - types.TextContent( - type="text", - text=f"Search results for '{query}':\n{search_results}" - ) - ] - elif name == "get_object": - if not arguments: - raise ValueError("Missing arguments") - - id = arguments.get("id") - if not id: - raise ValueError("Missing id parameter") - - response = make_devrev_request( - "works.get", - {"id": id} - ) - if response.status_code != 200: - error_text = response.text - return [ - types.TextContent( - type="text", - text=f"Get object failed with status {response.status_code}: {error_text}" - ) - ] - - object_info = response.json() - return [ - types.TextContent( - type="text", - text=f"Object information for '{id}':\n{object_info}" - ) - ] - elif name == "create_object": - if not arguments: - raise ValueError("Missing arguments") - - # Mandatory fields - object_type = arguments.get("type") - if not object_type: - raise ValueError("Missing type parameter") - - title = arguments.get("title") - if not title: - raise ValueError("Missing title parameter") - - applies_to_part = arguments.get("applies_to_part") - if not applies_to_part: - raise ValueError("Missing applies_to_part parameter") - - # Optional fields - body = arguments.get("body", "") - owned_by = arguments.get("owned_by", []) - - response = make_devrev_request( - "works.create", - { - "type": object_type, - "title": title, - "body": body, - "applies_to_part": applies_to_part, - "owned_by": owned_by - } - ) - if response.status_code != 201: - error_text = response.text - return [ - types.TextContent( - type="text", - text=f"Create object failed with status {response.status_code}: {error_text}" - ) - ] - - return [ - types.TextContent( - type="text", - text=f"Object created successfully: {response.json()}" - ) - ] + return await search_tool(arguments) + elif name == "get_work": + return await get_work_tool(arguments) + elif name == "create_work": + return await create_work_tool(arguments) + elif name == "get_timeline_entries": + return await get_timeline_entries_tool(arguments) else: raise ValueError(f"Unknown tool: {name}") async def main(): + # Check if debug mode is enabled and print debug message + if DEBUG_ENABLED: + print(DEBUG_MESSAGE) + else: + print(DEBUG_MESSAGE) + # Run the server using stdin/stdout streams async with mcp.server.stdio.stdio_server() as (read_stream, write_stream): await server.run( From e89f337dce9ef14f8ed27bcfd3ee9e4d4ad09e8a Mon Sep 17 00:00:00 2001 From: Sara Date: Fri, 30 May 2025 23:52:53 -0400 Subject: [PATCH 02/17] separates tools out into their own file --- mcp_wrapper.py | 230 ++++++++++++ pyproject.toml | 2 +- run-mcp.sh | 15 + src/devrev_mcp/debug.py | 70 ++++ src/devrev_mcp/server.py | 359 ++----------------- src/devrev_mcp/tools/__init__.py | 23 ++ src/devrev_mcp/tools/base.py | 63 ++++ src/devrev_mcp/tools/create_work.py | 96 +++++ src/devrev_mcp/tools/get_timeline_entries.py | 137 +++++++ src/devrev_mcp/tools/get_work.py | 72 ++++ src/devrev_mcp/tools/search.py | 64 ++++ src/devrev_mcp/utils.py | 16 +- uv.lock | 288 ++++++++------- 13 files changed, 961 insertions(+), 474 deletions(-) create mode 100755 mcp_wrapper.py create mode 100755 run-mcp.sh create mode 100644 src/devrev_mcp/debug.py create mode 100644 src/devrev_mcp/tools/__init__.py create mode 100644 src/devrev_mcp/tools/base.py create mode 100644 src/devrev_mcp/tools/create_work.py create mode 100644 src/devrev_mcp/tools/get_timeline_entries.py create mode 100644 src/devrev_mcp/tools/get_work.py create mode 100644 src/devrev_mcp/tools/search.py diff --git a/mcp_wrapper.py b/mcp_wrapper.py new file mode 100755 index 0000000..cef3619 --- /dev/null +++ b/mcp_wrapper.py @@ -0,0 +1,230 @@ +#!/usr/bin/env python3 +""" +MCP Server Wrapper with Watchdog File Monitoring + +This wrapper manages the actual MCP server as a subprocess and restarts it +when source files change, while maintaining a stable connection to the MCP client. +""" + +import os +import sys +import time +import signal +import subprocess +import threading +from pathlib import Path +from watchdog.observers import Observer +from watchdog.events import FileSystemEventHandler + + +class MCPServerManager: + def __init__(self, server_command, watch_dirs, watch_files): + self.server_command = server_command + self.watch_dirs = watch_dirs + self.watch_files = watch_files + self.server_process = None + self.observer = None + self.restart_requested = False + self.last_restart = 0 + self.restart_delay = 1.0 # Minimum seconds between restarts + + def start_server(self): + """Start the MCP server subprocess.""" + if self.server_process and self.server_process.poll() is None: + return # Already running + + print(f"๐Ÿš€ Starting MCP server: {' '.join(self.server_command)}", file=sys.stderr) + + try: + self.server_process = subprocess.Popen( + self.server_command, + stdin=subprocess.PIPE, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + text=True, + bufsize=0 # Unbuffered for real-time communication + ) + print(f"๐Ÿ“ Server started with PID: {self.server_process.pid}", file=sys.stderr) + except Exception as e: + print(f"โŒ Failed to start server: {e}", file=sys.stderr) + sys.exit(1) + + def stop_server(self): + """Stop the MCP server subprocess.""" + if self.server_process and self.server_process.poll() is None: + print(f"๐Ÿ›‘ Stopping server (PID: {self.server_process.pid})", file=sys.stderr) + self.server_process.terminate() + try: + self.server_process.wait(timeout=5) + except subprocess.TimeoutExpired: + print("โš ๏ธ Server didn't stop gracefully, killing...", file=sys.stderr) + self.server_process.kill() + self.server_process.wait() + self.server_process = None + + def restart_server(self): + """Restart the MCP server subprocess.""" + current_time = time.time() + if current_time - self.last_restart < self.restart_delay: + return # Too soon to restart + + self.last_restart = current_time + print("๐Ÿ”„ Restarting MCP server...", file=sys.stderr) + self.stop_server() + time.sleep(0.5) # Brief pause + self.start_server() + print("โœ… Server restarted", file=sys.stderr) + + def setup_file_watcher(self): + """Set up file watching with watchdog.""" + class RestartHandler(FileSystemEventHandler): + def __init__(self, manager): + self.manager = manager + + def on_modified(self, event): + if event.is_directory: + return + + # Only watch Python files and pyproject.toml + if not (event.src_path.endswith('.py') or event.src_path.endswith('pyproject.toml')): + return + + print(f"๐Ÿ”„ File changed: {event.src_path}", file=sys.stderr) + self.manager.restart_server() + + self.observer = Observer() + handler = RestartHandler(self) + + # Watch directories + for watch_dir in self.watch_dirs: + if watch_dir.exists(): + self.observer.schedule(handler, str(watch_dir), recursive=True) + print(f"๐Ÿ“ Watching directory: {watch_dir}", file=sys.stderr) + + # Watch specific files by watching their parent directories + for watch_file in self.watch_files: + if watch_file.exists(): + self.observer.schedule(handler, str(watch_file.parent), recursive=False) + print(f"๐Ÿ“„ Watching file: {watch_file}", file=sys.stderr) + + self.observer.start() + print("๐Ÿ‘€ File watcher started", file=sys.stderr) + + def forward_io(self): + """Forward stdin/stdout between client and server subprocess.""" + def forward_stdin(): + """Forward stdin from client to server.""" + try: + while self.server_process and self.server_process.poll() is None: + line = sys.stdin.readline() + if not line: + break + if self.server_process and self.server_process.stdin: + self.server_process.stdin.write(line) + self.server_process.stdin.flush() + except Exception as e: + print(f"stdin forwarding error: {e}", file=sys.stderr) + + def forward_stdout(): + """Forward stdout from server to client.""" + try: + while self.server_process and self.server_process.poll() is None: + if self.server_process and self.server_process.stdout: + line = self.server_process.stdout.readline() + if not line: + break + sys.stdout.write(line) + sys.stdout.flush() + except Exception as e: + print(f"stdout forwarding error: {e}", file=sys.stderr) + + def forward_stderr(): + """Forward stderr from server to our stderr.""" + try: + while self.server_process and self.server_process.poll() is None: + if self.server_process and self.server_process.stderr: + line = self.server_process.stderr.readline() + if not line: + break + print(f"[SERVER] {line.rstrip()}", file=sys.stderr) + except Exception as e: + print(f"stderr forwarding error: {e}", file=sys.stderr) + + # Start forwarding threads + stdin_thread = threading.Thread(target=forward_stdin, daemon=True) + stdout_thread = threading.Thread(target=forward_stdout, daemon=True) + stderr_thread = threading.Thread(target=forward_stderr, daemon=True) + + stdin_thread.start() + stdout_thread.start() + stderr_thread.start() + + return stdin_thread, stdout_thread, stderr_thread + + def run(self): + """Main run loop.""" + # Set up signal handlers + def signal_handler(signum, frame): + print("๐Ÿงน Shutting down...", file=sys.stderr) + self.stop_server() + if self.observer: + self.observer.stop() + self.observer.join() + sys.exit(0) + + signal.signal(signal.SIGINT, signal_handler) + signal.signal(signal.SIGTERM, signal_handler) + + try: + # Start file watcher + self.setup_file_watcher() + + # Start server + self.start_server() + + # Set up IO forwarding + threads = self.forward_io() + + print("โœจ MCP wrapper ready! Server will auto-reload when files change.", file=sys.stderr) + + # Wait for server process + while True: + if self.server_process: + exit_code = self.server_process.poll() + if exit_code is not None: + print(f"โš ๏ธ Server exited with code {exit_code}", file=sys.stderr) + # Don't auto-restart if it was an intentional shutdown + if exit_code != 0: + time.sleep(1) + self.start_server() + time.sleep(0.1) + + except KeyboardInterrupt: + signal_handler(signal.SIGINT, None) + + +def main(): + """Main entry point.""" + # Configuration + server_dir = Path(__file__).parent + server_command = [ + "/Users/sara/.local/bin/uv", "run", "devrev-mcp" + ] + + watch_dirs = [server_dir / "src"] + watch_files = [server_dir / "pyproject.toml"] + + # Enable debug mode + os.environ["DRMCP_DEBUG"] = "1" + + print("๐Ÿ”„ Starting MCP Server Wrapper with Python watchdog", file=sys.stderr) + print("๐Ÿ› Debug mode: ENABLED", file=sys.stderr) + print("", file=sys.stderr) + + # Create and run manager + manager = MCPServerManager(server_command, watch_dirs, watch_files) + manager.run() + + +if __name__ == "__main__": + main() \ No newline at end of file diff --git a/pyproject.toml b/pyproject.toml index 0001b5e..f9862a7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ version = "0.1.1" description = "A MCP server project" readme = "README.md" requires-python = ">=3.11" -dependencies = [ "mcp>=1.0.0", "requests"] +dependencies = [ "mcp>=1.0.0", "requests", "watchdog"] [[project.authors]] name = "Sunil Pandey" email = "sunil.pandey@devrev.ai" diff --git a/run-mcp.sh b/run-mcp.sh new file mode 100755 index 0000000..e9e31b2 --- /dev/null +++ b/run-mcp.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +# MCP Server with Python Watchdog File Watching +# Usage: ./run-mcp-watchdog.sh [mcp-args...] + +set -e + +SERVER_DIR="/Users/sara/work/fossa/devrev/mcp-server" +cd "$SERVER_DIR" + +echo "๐Ÿ”„ Starting MCP Server with Python watchdog (no fswatch needed)..." +echo "" + +# Run the Python wrapper that handles file watching and server management +exec /Users/sara/.local/bin/uv run python mcp_wrapper.py "$@" \ No newline at end of file diff --git a/src/devrev_mcp/debug.py b/src/devrev_mcp/debug.py new file mode 100644 index 0000000..2d51afc --- /dev/null +++ b/src/devrev_mcp/debug.py @@ -0,0 +1,70 @@ +""" +Debug utilities for DevRev MCP server. +""" + +import traceback +import os +from functools import wraps +from typing import Dict, Any, List + +import mcp.types as types + +# Check debug mode and store state +DEBUG_ENABLED = os.environ.get("DRMCP_DEBUG") == "1" +DEBUG_MESSAGE = "๐Ÿ› DEBUG MODE ENABLED - sara wuz here" if DEBUG_ENABLED else "๐Ÿ› DEBUG MODE DISABLED - sara wuz here" + +def debug_error_handler(func): + """ + Decorator that catches exceptions in MCP functions and returns detailed debug information + as the response when DRMCP_DEBUG=1. + """ + debug_enabled = DEBUG_ENABLED + + @wraps(func) + async def wrapper(*args, **kwargs): + try: + result = await func(*args, **kwargs) + # Add debug message to all responses when debug is enabled + if debug_enabled and result: + # For tool responses (list of content) + if isinstance(result, list) and len(result) > 0 and hasattr(result[0], 'text'): + debug_content = types.TextContent( + type="text", + text=f"{DEBUG_MESSAGE}\n\n" + ) + result[0].text = debug_content.text + result[0].text + # For other responses (strings, etc.) - just add debug message + elif isinstance(result, str) and debug_enabled: + result = f"{DEBUG_MESSAGE}\n\n{result}" + return result + except Exception as e: + if debug_enabled: + # Debug mode: return detailed error information + error_message = f"""ERROR (Debug Mode): {type(e).__name__}: {str(e)} + +Full traceback: +{traceback.format_exc()} + +This is a debug error response. Let's troubleshoot this together. + +{DEBUG_MESSAGE}""" + else: + # Production mode: return generic error message + error_message = f"An error occurred while executing the function. Please try again or contact support." + + # Return appropriate error format based on expected return type + if hasattr(func, '__annotations__') and func.__annotations__.get('return'): + return_type = func.__annotations__['return'] + if 'List' in str(return_type) and 'TextContent' in str(return_type): + # Tool function - return list of TextContent + return [ + types.TextContent( + type="text", + text=error_message + ) + ] + + # Default: return as string (for resource handlers, etc.) + return error_message + + return wrapper \ No newline at end of file diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index 02ea633..8baacfe 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -5,12 +5,9 @@ This module implements the MCP server for DevRev integration. """ -import asyncio -import os -import requests import json -import traceback -from functools import wraps +import os +from pathlib import Path from mcp.server.models import InitializationOptions import mcp.types as types @@ -18,6 +15,8 @@ from pydantic import AnyUrl import mcp.server.stdio from .utils import make_devrev_request +from .tools import TOOLS, TOOL_MAP +from .debug import debug_error_handler server = Server("devrev_mcp") @@ -28,335 +27,22 @@ DEBUG_ENABLED = os.environ.get("DRMCP_DEBUG") == "1" DEBUG_MESSAGE = "๐Ÿ› DEBUG MODE ENABLED - sara wuz here" if DEBUG_ENABLED else "๐Ÿ› DEBUG MODE DISABLED - sara wuz here" -def debug_error_handler(func): - """ - Decorator that catches exceptions in MCP tools and returns detailed debug information - as the tool response when DRMCP_DEBUG=1. - """ - debug_enabled = DEBUG_ENABLED - - @wraps(func) - async def wrapper(*args, **kwargs): - try: - result = await func(*args, **kwargs) - # Add debug message to all tool responses when debug is enabled - if debug_enabled and result: - # Add debug message as first item in response - debug_content = types.TextContent( - type="text", - text=f"{DEBUG_MESSAGE}\n\n" - ) - if isinstance(result, list) and len(result) > 0 and hasattr(result[0], 'text'): - result[0].text = debug_content.text + result[0].text - else: - result.insert(0, debug_content) - return result - except Exception as e: - if debug_enabled: - # Debug mode: return detailed error information - error_message = f"""ERROR (Debug Mode): {type(e).__name__}: {str(e)} - -Full traceback: -{traceback.format_exc()} - -This is a debug error response. Let's troubleshoot this together. - -{DEBUG_MESSAGE}""" - else: - # Production mode: return generic error message - error_message = f"An error occurred while executing the tool. Please try again or contact support." - - return [ - types.TextContent( - type="text", - text=error_message - ) - ] - - return wrapper - -@debug_error_handler -async def search_tool(arguments: dict | None) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: - """Handle search tool execution.""" - if not arguments: - raise ValueError("Missing arguments") - - query = arguments.get("query") - if not query: - raise ValueError("Missing query parameter") - - namespace = arguments.get("namespace") - if not namespace: - raise ValueError("Missing namespace parameter") - - response = make_devrev_request( - "search.hybrid", - {"query": query, "namespace": namespace} - ) - if response.status_code != 200: - error_text = response.text - return [ - types.TextContent( - type="text", - text=f"Search failed with status {response.status_code}: {error_text}" - ) - ] - - search_results = response.json() - return [ - types.TextContent( - type="text", - text=f"Search results for '{query}':\n{search_results}" - ) - ] - -@debug_error_handler -async def get_work_tool(arguments: dict | None) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: - """Handle get_work tool execution.""" - if not arguments: - raise ValueError("Missing arguments") - - id = arguments.get("id") - if not id: - raise ValueError("Missing id parameter") - - response = make_devrev_request( - "works.get", - {"id": id} - ) - if response.status_code != 200: - error_text = response.text - return [ - types.TextContent( - type="text", - text=f"Get work failed with status {response.status_code}: {error_text}" - ) - ] - - object_info = response.json() - # Cache the work data for resource access - devrev_cache[id] = json.dumps(object_info) - return [ - types.TextContent( - type="text", - text=f"Work information for '{id}':\n{object_info}" - ) - ] - -@debug_error_handler -async def create_work_tool(arguments: dict | None) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: - """Handle create_work tool execution.""" - if not arguments: - raise ValueError("Missing arguments") - - # Mandatory fields - object_type = arguments.get("type") - if not object_type: - raise ValueError("Missing type parameter") - - title = arguments.get("title") - if not title: - raise ValueError("Missing title parameter") - - applies_to_part = arguments.get("applies_to_part") - if not applies_to_part: - raise ValueError("Missing applies_to_part parameter") - - # Optional fields - body = arguments.get("body", "") - owned_by = arguments.get("owned_by", []) - - response = make_devrev_request( - "works.create", - { - "type": object_type, - "title": title, - "body": body, - "applies_to_part": applies_to_part, - "owned_by": owned_by - } - ) - if response.status_code != 201: - error_text = response.text - return [ - types.TextContent( - type="text", - text=f"Create work failed with status {response.status_code}: {error_text}" - ) - ] - - created_work = response.json() - # Cache the created work data for resource access - if 'work' in created_work and 'id' in created_work['work']: - work_id = created_work['work']['id'] - devrev_cache[work_id] = json.dumps(created_work['work']) - - return [ - types.TextContent( - type="text", - text=f"Work created successfully: {created_work}" - ) - ] - -@debug_error_handler -async def get_timeline_entries_tool(arguments: dict | None) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: - """Handle get_timeline_entries tool execution.""" - if not arguments: - raise ValueError("Missing arguments") - - # Debug: check arguments type - if not isinstance(arguments, dict): - return [ - types.TextContent( - type="text", - text=f"Error: arguments is not a dict but {type(arguments)}: {arguments}" - ) - ] - - object_id = arguments.get("object_id") - if not object_id: - raise ValueError("Missing object_id parameter") - - try: - response = make_devrev_request( - "timeline-entries.list", - {"object": object_id} - ) - except Exception as e: - return [ - types.TextContent( - type="text", - text=f"Error making timeline request: {e}" - ) - ] - if response.status_code != 200: - error_text = response.text - return [ - types.TextContent( - type="text", - text=f"Get timeline entries failed with status {response.status_code}: {error_text}" - ) - ] - - timeline_data = response.json() - - # Cache individual timeline entries as resources and build summary - entry_summary = [] - entry_count = 0 - if 'timeline_entries' in timeline_data: - for i, entry in enumerate(timeline_data['timeline_entries']): - # Debug: check entry type - if not isinstance(entry, dict): - return [ - types.TextContent( - type="text", - text=f"Error: Entry {i} is not a dict but {type(entry)}: {entry}" - ) - ] - if 'id' in entry: - entry_id = entry['id'] - devrev_cache[entry_id] = json.dumps(entry) - entry_count += 1 - - # Add summary info for this entry - entry_info = { - 'id': entry_id, - 'type': entry.get('type', 'unknown'), - 'created_date': entry.get('created_date'), - 'visibility': entry.get('visibility', {}).get('label', 'unknown') - } - - # Add type-specific summary info - if entry.get('type') == 'timeline_comment': - body_preview = entry.get('body', '')[:100] + ('...' if len(entry.get('body', '')) > 100 else '') - entry_info['body_preview'] = body_preview - entry_info['created_by'] = entry.get('created_by', {}).get('display_name', 'unknown') - - entry_summary.append(entry_info) - - summary_text = f"""Timeline entries for '{object_id}': -Total entries: {entry_count} -Entries cached as resources (access via devrev://): - -""" - - for i, entry in enumerate(entry_summary[:10]): # Show first 10 entries in summary - summary_text += f"{i+1}. {entry['id']} ({entry['type']}) - {entry.get('created_date', 'no date')}\n" - if 'body_preview' in entry: - summary_text += f" Preview: {entry['body_preview']}\n" - if 'created_by' in entry: - summary_text += f" By: {entry['created_by']}\n" - summary_text += "\n" - - if entry_count > 10: - summary_text += f"... and {entry_count - 10} more entries (all available as resources)\n" - - return [ - types.TextContent( - type="text", - text=summary_text - ) - ] +# Initialize tools with cache access +for tool in TOOLS: + if hasattr(tool, 'set_cache'): + tool.set_cache(devrev_cache) @server.list_tools() +@debug_error_handler async def handle_list_tools() -> list[types.Tool]: """ List available tools. Each tool specifies its arguments using JSON Schema validation. """ - return [ - types.Tool( - name="search", - description="Search DevRev using the provided query", - inputSchema={ - "type": "object", - "properties": { - "query": {"type": "string"}, - "namespace": {"type": "string", "enum": ["article", "issue", "ticket", "part", "dev_user"]}, - }, - "required": ["query", "namespace"], - }, - ), - types.Tool( - name="get_work", - description="Get all information about a DevRev issue and ticket using its ID", - inputSchema={ - "type": "object", - "properties": { - "id": {"type": "string"}, - }, - "required": ["id"], - }, - ), - types.Tool( - name="create_work", - description="Create a new isssue or ticket in DevRev", - inputSchema={ - "type": "object", - "properties": { - "type": {"type": "string", "enum": ["issue", "ticket"]}, - "title": {"type": "string"}, - "body": {"type": "string"}, - "applies_to_part": {"type": "string"}, - "owned_by": {"type": "array", "items": {"type": "string"}} - }, - "required": ["type", "title", "applies_to_part"], - }, - ), - types.Tool( - name="get_timeline_entries", - description="Get timeline entries for a DevRev object (ticket, issue, etc.)", - inputSchema={ - "type": "object", - "properties": { - "object_id": {"type": "string"}, - }, - "required": ["object_id"], - }, - ) - ] + return [tool.to_mcp_tool() for tool in TOOLS] @server.list_resources() +@debug_error_handler async def handle_list_resources() -> list[types.Resource]: """ List available resources. @@ -388,6 +74,7 @@ async def handle_list_resources() -> list[types.Resource]: return resources @server.read_resource() +@debug_error_handler async def handle_read_resource(uri: AnyUrl) -> str: """ Read a specific resource by URI. @@ -399,18 +86,17 @@ async def handle_read_resource(uri: AnyUrl) -> str: return devrev_cache[resource_id] else: # If not in cache, try to fetch it based on resource type - if ':comment/' in resource_id: - # Timeline comment - cannot fetch individual comments directly - raise ValueError(f"Timeline comment {resource_id} not found in cache") - else: - # Assume it's a work item + # For work items, we can fetch them directly + try: response = make_devrev_request("works.get", {"id": resource_id}) if response.status_code == 200: resource_data = response.json() devrev_cache[resource_id] = json.dumps(resource_data) return json.dumps(resource_data) else: - raise ValueError(f"Resource {resource_id} not found") + raise ValueError(f"Resource {resource_id} not found or inaccessible") + except Exception as e: + raise ValueError(f"Resource {resource_id} not found in cache and could not be fetched: {str(e)}") else: raise ValueError(f"Unknown resource URI: {uri}") @@ -423,14 +109,9 @@ async def handle_call_tool( Tools can modify server state and notify clients of changes. """ # Route to appropriate tool handler - if name == "search": - return await search_tool(arguments) - elif name == "get_work": - return await get_work_tool(arguments) - elif name == "create_work": - return await create_work_tool(arguments) - elif name == "get_timeline_entries": - return await get_timeline_entries_tool(arguments) + if name in TOOL_MAP: + tool = TOOL_MAP[name] + return await tool.execute(arguments) else: raise ValueError(f"Unknown tool: {name}") diff --git a/src/devrev_mcp/tools/__init__.py b/src/devrev_mcp/tools/__init__.py new file mode 100644 index 0000000..bbcb50d --- /dev/null +++ b/src/devrev_mcp/tools/__init__.py @@ -0,0 +1,23 @@ +""" +Tools package for DevRev MCP server. +This module automatically discovers and registers all available tools. +""" + +from .base import BaseTool +from .search import SearchTool +from .get_work import GetWorkTool +from .create_work import CreateWorkTool +from .get_timeline_entries import GetTimelineEntresTool + +# Registry of all available tools +TOOLS = [ + SearchTool(), + GetWorkTool(), + CreateWorkTool(), + GetTimelineEntresTool(), +] + +# Create a mapping for easy tool lookup by name +TOOL_MAP = {tool.name: tool for tool in TOOLS} + +__all__ = ['BaseTool', 'TOOLS', 'TOOL_MAP'] \ No newline at end of file diff --git a/src/devrev_mcp/tools/base.py b/src/devrev_mcp/tools/base.py new file mode 100644 index 0000000..b4ea3aa --- /dev/null +++ b/src/devrev_mcp/tools/base.py @@ -0,0 +1,63 @@ +""" +Base tool class for DevRev MCP server tools. +""" + +from abc import ABC, abstractmethod +from typing import Dict, Any, List + +import mcp.types as types +from ..debug import debug_error_handler + + +class BaseTool(ABC): + """ + Abstract base class for all DevRev MCP tools. + + Each tool must implement: + - name: The tool's unique identifier + - description: Human-readable description of what the tool does + - input_schema: JSON Schema for validating tool arguments + - execute: The actual tool logic + """ + + @property + @abstractmethod + def name(self) -> str: + """Return the tool's unique name.""" + pass + + @property + @abstractmethod + def description(self) -> str: + """Return a description of what this tool does.""" + pass + + @property + @abstractmethod + def input_schema(self) -> Dict[str, Any]: + """Return the JSON Schema for validating tool arguments.""" + pass + + @debug_error_handler + async def execute(self, arguments: Dict[str, Any] | None) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + """ + Execute the tool with the given arguments. + This method is automatically wrapped with error handling. + """ + return await self._execute_impl(arguments) + + @abstractmethod + async def _execute_impl(self, arguments: Dict[str, Any] | None) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + """ + Internal implementation of the tool logic. + Subclasses should implement this method instead of execute(). + """ + pass + + def to_mcp_tool(self) -> types.Tool: + """Convert this tool to an MCP Tool object for registration.""" + return types.Tool( + name=self.name, + description=self.description, + inputSchema=self.input_schema + ) \ No newline at end of file diff --git a/src/devrev_mcp/tools/create_work.py b/src/devrev_mcp/tools/create_work.py new file mode 100644 index 0000000..3c83c4b --- /dev/null +++ b/src/devrev_mcp/tools/create_work.py @@ -0,0 +1,96 @@ +""" +Create work tool for DevRev MCP server. +""" + +import json +from typing import Dict, Any, List +import mcp.types as types +from .base import BaseTool +from ..utils import make_devrev_request + + +class CreateWorkTool(BaseTool): + """Tool for creating a new issue or ticket in DevRev.""" + + def __init__(self): + # Reference to the cache - will be set by the server + self.devrev_cache = None + + def set_cache(self, cache: Dict[str, str]): + """Set the cache reference from the server.""" + self.devrev_cache = cache + + @property + def name(self) -> str: + return "create_work" + + @property + def description(self) -> str: + return "Create a new isssue or ticket in DevRev" + + @property + def input_schema(self) -> Dict[str, Any]: + return { + "type": "object", + "properties": { + "type": {"type": "string", "enum": ["issue", "ticket"]}, + "title": {"type": "string"}, + "body": {"type": "string"}, + "applies_to_part": {"type": "string"}, + "owned_by": {"type": "array", "items": {"type": "string"}} + }, + "required": ["type", "title", "applies_to_part"], + } + + async def _execute_impl(self, arguments: Dict[str, Any] | None) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + if not arguments: + raise ValueError("Missing arguments") + + # Mandatory fields + object_type = arguments.get("type") + if not object_type: + raise ValueError("Missing type parameter") + + title = arguments.get("title") + if not title: + raise ValueError("Missing title parameter") + + applies_to_part = arguments.get("applies_to_part") + if not applies_to_part: + raise ValueError("Missing applies_to_part parameter") + + # Optional fields + body = arguments.get("body", "") + owned_by = arguments.get("owned_by", []) + + response = make_devrev_request( + "works.create", + { + "type": object_type, + "title": title, + "body": body, + "applies_to_part": applies_to_part, + "owned_by": owned_by + } + ) + if response.status_code != 201: + error_text = response.text + return [ + types.TextContent( + type="text", + text=f"Create work failed with status {response.status_code}: {error_text}" + ) + ] + + created_work = response.json() + # Cache the created work data for resource access + if self.devrev_cache is not None and 'work' in created_work and 'id' in created_work['work']: + work_id = created_work['work']['id'] + self.devrev_cache[work_id] = json.dumps(created_work['work']) + + return [ + types.TextContent( + type="text", + text=f"Work created successfully: {created_work}" + ) + ] \ No newline at end of file diff --git a/src/devrev_mcp/tools/get_timeline_entries.py b/src/devrev_mcp/tools/get_timeline_entries.py new file mode 100644 index 0000000..eaa18f7 --- /dev/null +++ b/src/devrev_mcp/tools/get_timeline_entries.py @@ -0,0 +1,137 @@ +""" +Get timeline entries tool for DevRev MCP server. +""" + +import json +from typing import Dict, Any, List +import mcp.types as types +from .base import BaseTool +from ..utils import make_devrev_request + + +class GetTimelineEntresTool(BaseTool): + """Tool for getting timeline entries for a DevRev object (ticket, issue, etc.).""" + + def __init__(self): + # Reference to the cache - will be set by the server + self.devrev_cache = None + + def set_cache(self, cache: Dict[str, str]): + """Set the cache reference from the server.""" + self.devrev_cache = cache + + @property + def name(self) -> str: + return "get_timeline_entries" + + @property + def description(self) -> str: + return "Get timeline entries for a DevRev object (ticket, issue, etc.)" + + @property + def input_schema(self) -> Dict[str, Any]: + return { + "type": "object", + "properties": { + "object_id": {"type": "string"}, + }, + "required": ["object_id"], + } + + async def _execute_impl(self, arguments: Dict[str, Any] | None) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + if not arguments: + raise ValueError("Missing arguments") + + # Debug: check arguments type + if not isinstance(arguments, dict): + return [ + types.TextContent( + type="text", + text=f"Error: arguments is not a dict but {type(arguments)}: {arguments}" + ) + ] + + object_id = arguments.get("object_id") + if not object_id: + raise ValueError("Missing object_id parameter") + + try: + response = make_devrev_request( + "timeline-entries.list", + {"object": object_id} + ) + except Exception as e: + return [ + types.TextContent( + type="text", + text=f"Error making timeline request: {e}" + ) + ] + if response.status_code != 200: + error_text = response.text + return [ + types.TextContent( + type="text", + text=f"Get timeline entries failed with status {response.status_code}: {error_text}" + ) + ] + + timeline_data = response.json() + + # Cache individual timeline entries as resources and build summary + entry_summary = [] + entry_count = 0 + if 'timeline_entries' in timeline_data: + for i, entry in enumerate(timeline_data['timeline_entries']): + # Debug: check entry type + if not isinstance(entry, dict): + return [ + types.TextContent( + type="text", + text=f"Error: Entry {i} is not a dict but {type(entry)}: {entry}" + ) + ] + if 'id' in entry and self.devrev_cache is not None: + entry_id = entry['id'] + self.devrev_cache[entry_id] = json.dumps(entry) + entry_count += 1 + + # Add summary info for this entry + entry_info = { + 'id': entry_id, + 'type': entry.get('type', 'unknown'), + 'created_date': entry.get('created_date'), + 'visibility': entry.get('visibility', {}).get('label', 'unknown') if isinstance(entry.get('visibility'), dict) else entry.get('visibility', 'unknown') + } + + # Add type-specific summary info + if entry.get('type') == 'timeline_comment': + body_preview = entry.get('body', '')[:100] + ('...' if len(entry.get('body', '')) > 100 else '') + entry_info['body_preview'] = body_preview + entry_info['created_by'] = entry.get('created_by', {}).get('display_name', 'unknown') + + entry_summary.append(entry_info) + + summary_text = f"""Timeline entries for '{object_id}': +Total entries: {entry_count} +Entries cached as resources (access via devrev://): + +""" + + for i, entry in enumerate(entry_summary[:10]): # Show first 10 entries in summary + summary_text += f"{i+1}. {entry['id']} ({entry['type']}) - {entry.get('created_date', 'no date')}\n" + if 'body_preview' in entry: + summary_text += f" Preview: {entry['body_preview']}\n" + if 'created_by' in entry: + summary_text += f" By: {entry['created_by']}\n" + summary_text += "\n" + + if entry_count > 10: + summary_text += f"... and {entry_count - 10} more entries (all available as resources)\n" + + return [ + types.TextContent( + type="text", + text=summary_text + ) + ] \ No newline at end of file diff --git a/src/devrev_mcp/tools/get_work.py b/src/devrev_mcp/tools/get_work.py new file mode 100644 index 0000000..6b51085 --- /dev/null +++ b/src/devrev_mcp/tools/get_work.py @@ -0,0 +1,72 @@ +""" +Get work tool for DevRev MCP server. +""" + +import json +from typing import Dict, Any, List +import mcp.types as types +from .base import BaseTool +from ..utils import make_devrev_request + + +class GetWorkTool(BaseTool): + """Tool for getting information about a DevRev issue and ticket using its ID.""" + + def __init__(self): + # Reference to the cache - will be set by the server + self.devrev_cache = None + + def set_cache(self, cache: Dict[str, str]): + """Set the cache reference from the server.""" + self.devrev_cache = cache + + @property + def name(self) -> str: + return "get_work" + + @property + def description(self) -> str: + return "Get all information about a DevRev issue and ticket using its ID" + + @property + def input_schema(self) -> Dict[str, Any]: + return { + "type": "object", + "properties": { + "id": {"type": "string"}, + }, + "required": ["id"], + } + + async def _execute_impl(self, arguments: Dict[str, Any] | None) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + if not arguments: + raise ValueError("Missing arguments") + + id = arguments.get("id") + if not id: + raise ValueError("Missing id parameter") + + response = make_devrev_request( + "works.get", + {"id": id} + ) + if response.status_code != 200: + error_text = response.text + return [ + types.TextContent( + type="text", + text=f"Get work failed with status {response.status_code}: {error_text}" + ) + ] + + object_info = response.json() + # Cache the work data for resource access + if self.devrev_cache is not None: + self.devrev_cache[id] = json.dumps(object_info) + + return [ + types.TextContent( + type="text", + text=f"Work information for '{id}':\n{object_info}" + ) + ] \ No newline at end of file diff --git a/src/devrev_mcp/tools/search.py b/src/devrev_mcp/tools/search.py new file mode 100644 index 0000000..e3465a5 --- /dev/null +++ b/src/devrev_mcp/tools/search.py @@ -0,0 +1,64 @@ +""" +Search tool for DevRev MCP server. +""" + +from typing import Dict, Any, List +import mcp.types as types +from .base import BaseTool +from ..utils import make_devrev_request + + +class SearchTool(BaseTool): + """Tool for searching DevRev using the provided query.""" + + @property + def name(self) -> str: + return "search" + + @property + def description(self) -> str: + return "Search DevRev using the provided query" + + @property + def input_schema(self) -> Dict[str, Any]: + return { + "type": "object", + "properties": { + "query": {"type": "string"}, + "namespace": {"type": "string", "enum": ["article", "issue", "ticket", "part", "dev_user"]}, + }, + "required": ["query", "namespace"], + } + + async def _execute_impl(self, arguments: Dict[str, Any] | None) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: + if not arguments: + raise ValueError("Missing arguments") + + query = arguments.get("query") + if not query: + raise ValueError("Missing query parameter") + + namespace = arguments.get("namespace") + if not namespace: + raise ValueError("Missing namespace parameter") + + response = make_devrev_request( + "search.hybrid", + {"query": query, "namespace": namespace} + ) + if response.status_code != 200: + error_text = response.text + return [ + types.TextContent( + type="text", + text=f"Search failed with status {response.status_code}: {error_text}" + ) + ] + + search_results = response.json() + return [ + types.TextContent( + type="text", + text=f"Search results for '{query}':\n{search_results}" + ) + ] \ No newline at end of file diff --git a/src/devrev_mcp/utils.py b/src/devrev_mcp/utils.py index cfa6a37..557ed08 100644 --- a/src/devrev_mcp/utils.py +++ b/src/devrev_mcp/utils.py @@ -22,6 +22,7 @@ def make_devrev_request(endpoint: str, payload: Dict[str, Any]) -> requests.Resp Raises: ValueError: If DEVREV_API_KEY environment variable is not set + requests.RequestException: If the HTTP request fails """ api_key = os.environ.get("DEVREV_API_KEY") if not api_key: @@ -32,8 +33,13 @@ def make_devrev_request(endpoint: str, payload: Dict[str, Any]) -> requests.Resp "Content-Type": "application/json", } - return requests.post( - f"https://api.devrev.ai/{endpoint}", - headers=headers, - json=payload - ) + try: + response = requests.post( + f"https://api.devrev.ai/{endpoint}", + headers=headers, + json=payload, + timeout=30 # Add timeout for better error handling + ) + return response + except requests.RequestException as e: + raise requests.RequestException(f"DevRev API request failed for endpoint '{endpoint}': {e}") from e diff --git a/uv.lock b/uv.lock index 6bd9cae..1793963 100644 --- a/uv.lock +++ b/uv.lock @@ -1,13 +1,14 @@ version = 1 +revision = 2 requires-python = ">=3.11" [[package]] name = "annotated-types" version = "0.7.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081 } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643 }, + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, ] [[package]] @@ -18,72 +19,72 @@ dependencies = [ { name = "idna" }, { name = "sniffio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/9f/09/45b9b7a6d4e45c6bcb5bf61d19e3ab87df68e0601fa8c5293de3542546cc/anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c", size = 173422 } +sdist = { url = "https://files.pythonhosted.org/packages/9f/09/45b9b7a6d4e45c6bcb5bf61d19e3ab87df68e0601fa8c5293de3542546cc/anyio-4.6.2.post1.tar.gz", hash = "sha256:4c8bc31ccdb51c7f7bd251f51c609e038d63e34219b44aa86e47576389880b4c", size = 173422, upload-time = "2024-10-14T14:31:44.021Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e4/f5/f2b75d2fc6f1a260f340f0e7c6a060f4dd2961cc16884ed851b0d18da06a/anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d", size = 90377 }, + { url = "https://files.pythonhosted.org/packages/e4/f5/f2b75d2fc6f1a260f340f0e7c6a060f4dd2961cc16884ed851b0d18da06a/anyio-4.6.2.post1-py3-none-any.whl", hash = "sha256:6d170c36fba3bdd840c73d3868c1e777e33676a69c3a72cf0a0d5d6d8009b61d", size = 90377, upload-time = "2024-10-14T14:31:42.623Z" }, ] [[package]] name = "certifi" version = "2024.8.30" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507 } +sdist = { url = "https://files.pythonhosted.org/packages/b0/ee/9b19140fe824b367c04c5e1b369942dd754c4c5462d5674002f75c4dedc1/certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9", size = 168507, upload-time = "2024-08-30T01:55:04.365Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321 }, + { url = "https://files.pythonhosted.org/packages/12/90/3c9ff0512038035f59d279fddeb79f5f1eccd8859f06d6163c58798b9487/certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8", size = 167321, upload-time = "2024-08-30T01:55:02.591Z" }, ] [[package]] name = "charset-normalizer" version = "3.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620 } +sdist = { url = "https://files.pythonhosted.org/packages/f2/4f/e1808dc01273379acc506d18f1504eb2d299bd4131743b9fc54d7be4df1e/charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e", size = 106620, upload-time = "2024-10-09T07:40:20.413Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9c/61/73589dcc7a719582bf56aae309b6103d2762b526bffe189d635a7fcfd998/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c", size = 193339 }, - { url = "https://files.pythonhosted.org/packages/77/d5/8c982d58144de49f59571f940e329ad6e8615e1e82ef84584c5eeb5e1d72/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944", size = 124366 }, - { url = "https://files.pythonhosted.org/packages/bf/19/411a64f01ee971bed3231111b69eb56f9331a769072de479eae7de52296d/charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee", size = 118874 }, - { url = "https://files.pythonhosted.org/packages/4c/92/97509850f0d00e9f14a46bc751daabd0ad7765cff29cdfb66c68b6dad57f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c", size = 138243 }, - { url = "https://files.pythonhosted.org/packages/e2/29/d227805bff72ed6d6cb1ce08eec707f7cfbd9868044893617eb331f16295/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6", size = 148676 }, - { url = "https://files.pythonhosted.org/packages/13/bc/87c2c9f2c144bedfa62f894c3007cd4530ba4b5351acb10dc786428a50f0/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea", size = 141289 }, - { url = "https://files.pythonhosted.org/packages/eb/5b/6f10bad0f6461fa272bfbbdf5d0023b5fb9bc6217c92bf068fa5a99820f5/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc", size = 142585 }, - { url = "https://files.pythonhosted.org/packages/3b/a0/a68980ab8a1f45a36d9745d35049c1af57d27255eff8c907e3add84cf68f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5", size = 144408 }, - { url = "https://files.pythonhosted.org/packages/d7/a1/493919799446464ed0299c8eef3c3fad0daf1c3cd48bff9263c731b0d9e2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594", size = 139076 }, - { url = "https://files.pythonhosted.org/packages/fb/9d/9c13753a5a6e0db4a0a6edb1cef7aee39859177b64e1a1e748a6e3ba62c2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c", size = 146874 }, - { url = "https://files.pythonhosted.org/packages/75/d2/0ab54463d3410709c09266dfb416d032a08f97fd7d60e94b8c6ef54ae14b/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365", size = 150871 }, - { url = "https://files.pythonhosted.org/packages/8d/c9/27e41d481557be53d51e60750b85aa40eaf52b841946b3cdeff363105737/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129", size = 148546 }, - { url = "https://files.pythonhosted.org/packages/ee/44/4f62042ca8cdc0cabf87c0fc00ae27cd8b53ab68be3605ba6d071f742ad3/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236", size = 143048 }, - { url = "https://files.pythonhosted.org/packages/01/f8/38842422988b795220eb8038745d27a675ce066e2ada79516c118f291f07/charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99", size = 94389 }, - { url = "https://files.pythonhosted.org/packages/0b/6e/b13bd47fa9023b3699e94abf565b5a2f0b0be6e9ddac9812182596ee62e4/charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27", size = 101752 }, - { url = "https://files.pythonhosted.org/packages/d3/0b/4b7a70987abf9b8196845806198975b6aab4ce016632f817ad758a5aa056/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", size = 194445 }, - { url = "https://files.pythonhosted.org/packages/50/89/354cc56cf4dd2449715bc9a0f54f3aef3dc700d2d62d1fa5bbea53b13426/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", size = 125275 }, - { url = "https://files.pythonhosted.org/packages/fa/44/b730e2a2580110ced837ac083d8ad222343c96bb6b66e9e4e706e4d0b6df/charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", size = 119020 }, - { url = "https://files.pythonhosted.org/packages/9d/e4/9263b8240ed9472a2ae7ddc3e516e71ef46617fe40eaa51221ccd4ad9a27/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", size = 139128 }, - { url = "https://files.pythonhosted.org/packages/6b/e3/9f73e779315a54334240353eaea75854a9a690f3f580e4bd85d977cb2204/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", size = 149277 }, - { url = "https://files.pythonhosted.org/packages/1a/cf/f1f50c2f295312edb8a548d3fa56a5c923b146cd3f24114d5adb7e7be558/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", size = 142174 }, - { url = "https://files.pythonhosted.org/packages/16/92/92a76dc2ff3a12e69ba94e7e05168d37d0345fa08c87e1fe24d0c2a42223/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", size = 143838 }, - { url = "https://files.pythonhosted.org/packages/a4/01/2117ff2b1dfc61695daf2babe4a874bca328489afa85952440b59819e9d7/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", size = 146149 }, - { url = "https://files.pythonhosted.org/packages/f6/9b/93a332b8d25b347f6839ca0a61b7f0287b0930216994e8bf67a75d050255/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", size = 140043 }, - { url = "https://files.pythonhosted.org/packages/ab/f6/7ac4a01adcdecbc7a7587767c776d53d369b8b971382b91211489535acf0/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", size = 148229 }, - { url = "https://files.pythonhosted.org/packages/9d/be/5708ad18161dee7dc6a0f7e6cf3a88ea6279c3e8484844c0590e50e803ef/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", size = 151556 }, - { url = "https://files.pythonhosted.org/packages/5a/bb/3d8bc22bacb9eb89785e83e6723f9888265f3a0de3b9ce724d66bd49884e/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", size = 149772 }, - { url = "https://files.pythonhosted.org/packages/f7/fa/d3fc622de05a86f30beea5fc4e9ac46aead4731e73fd9055496732bcc0a4/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", size = 144800 }, - { url = "https://files.pythonhosted.org/packages/9a/65/bdb9bc496d7d190d725e96816e20e2ae3a6fa42a5cac99c3c3d6ff884118/charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", size = 94836 }, - { url = "https://files.pythonhosted.org/packages/3e/67/7b72b69d25b89c0b3cea583ee372c43aa24df15f0e0f8d3982c57804984b/charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", size = 102187 }, - { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617 }, - { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310 }, - { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126 }, - { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342 }, - { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383 }, - { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214 }, - { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104 }, - { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255 }, - { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251 }, - { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474 }, - { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849 }, - { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781 }, - { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970 }, - { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973 }, - { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308 }, - { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446 }, + { url = "https://files.pythonhosted.org/packages/9c/61/73589dcc7a719582bf56aae309b6103d2762b526bffe189d635a7fcfd998/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c", size = 193339, upload-time = "2024-10-09T07:38:24.527Z" }, + { url = "https://files.pythonhosted.org/packages/77/d5/8c982d58144de49f59571f940e329ad6e8615e1e82ef84584c5eeb5e1d72/charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944", size = 124366, upload-time = "2024-10-09T07:38:26.488Z" }, + { url = "https://files.pythonhosted.org/packages/bf/19/411a64f01ee971bed3231111b69eb56f9331a769072de479eae7de52296d/charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee", size = 118874, upload-time = "2024-10-09T07:38:28.115Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/97509850f0d00e9f14a46bc751daabd0ad7765cff29cdfb66c68b6dad57f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c", size = 138243, upload-time = "2024-10-09T07:38:29.822Z" }, + { url = "https://files.pythonhosted.org/packages/e2/29/d227805bff72ed6d6cb1ce08eec707f7cfbd9868044893617eb331f16295/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6", size = 148676, upload-time = "2024-10-09T07:38:30.869Z" }, + { url = "https://files.pythonhosted.org/packages/13/bc/87c2c9f2c144bedfa62f894c3007cd4530ba4b5351acb10dc786428a50f0/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea", size = 141289, upload-time = "2024-10-09T07:38:32.557Z" }, + { url = "https://files.pythonhosted.org/packages/eb/5b/6f10bad0f6461fa272bfbbdf5d0023b5fb9bc6217c92bf068fa5a99820f5/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc", size = 142585, upload-time = "2024-10-09T07:38:33.649Z" }, + { url = "https://files.pythonhosted.org/packages/3b/a0/a68980ab8a1f45a36d9745d35049c1af57d27255eff8c907e3add84cf68f/charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5", size = 144408, upload-time = "2024-10-09T07:38:34.687Z" }, + { url = "https://files.pythonhosted.org/packages/d7/a1/493919799446464ed0299c8eef3c3fad0daf1c3cd48bff9263c731b0d9e2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594", size = 139076, upload-time = "2024-10-09T07:38:36.417Z" }, + { url = "https://files.pythonhosted.org/packages/fb/9d/9c13753a5a6e0db4a0a6edb1cef7aee39859177b64e1a1e748a6e3ba62c2/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c", size = 146874, upload-time = "2024-10-09T07:38:37.59Z" }, + { url = "https://files.pythonhosted.org/packages/75/d2/0ab54463d3410709c09266dfb416d032a08f97fd7d60e94b8c6ef54ae14b/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365", size = 150871, upload-time = "2024-10-09T07:38:38.666Z" }, + { url = "https://files.pythonhosted.org/packages/8d/c9/27e41d481557be53d51e60750b85aa40eaf52b841946b3cdeff363105737/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129", size = 148546, upload-time = "2024-10-09T07:38:40.459Z" }, + { url = "https://files.pythonhosted.org/packages/ee/44/4f62042ca8cdc0cabf87c0fc00ae27cd8b53ab68be3605ba6d071f742ad3/charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236", size = 143048, upload-time = "2024-10-09T07:38:42.178Z" }, + { url = "https://files.pythonhosted.org/packages/01/f8/38842422988b795220eb8038745d27a675ce066e2ada79516c118f291f07/charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99", size = 94389, upload-time = "2024-10-09T07:38:43.339Z" }, + { url = "https://files.pythonhosted.org/packages/0b/6e/b13bd47fa9023b3699e94abf565b5a2f0b0be6e9ddac9812182596ee62e4/charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27", size = 101752, upload-time = "2024-10-09T07:38:44.276Z" }, + { url = "https://files.pythonhosted.org/packages/d3/0b/4b7a70987abf9b8196845806198975b6aab4ce016632f817ad758a5aa056/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6", size = 194445, upload-time = "2024-10-09T07:38:45.275Z" }, + { url = "https://files.pythonhosted.org/packages/50/89/354cc56cf4dd2449715bc9a0f54f3aef3dc700d2d62d1fa5bbea53b13426/charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf", size = 125275, upload-time = "2024-10-09T07:38:46.449Z" }, + { url = "https://files.pythonhosted.org/packages/fa/44/b730e2a2580110ced837ac083d8ad222343c96bb6b66e9e4e706e4d0b6df/charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db", size = 119020, upload-time = "2024-10-09T07:38:48.88Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e4/9263b8240ed9472a2ae7ddc3e516e71ef46617fe40eaa51221ccd4ad9a27/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1", size = 139128, upload-time = "2024-10-09T07:38:49.86Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e3/9f73e779315a54334240353eaea75854a9a690f3f580e4bd85d977cb2204/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03", size = 149277, upload-time = "2024-10-09T07:38:52.306Z" }, + { url = "https://files.pythonhosted.org/packages/1a/cf/f1f50c2f295312edb8a548d3fa56a5c923b146cd3f24114d5adb7e7be558/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284", size = 142174, upload-time = "2024-10-09T07:38:53.458Z" }, + { url = "https://files.pythonhosted.org/packages/16/92/92a76dc2ff3a12e69ba94e7e05168d37d0345fa08c87e1fe24d0c2a42223/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15", size = 143838, upload-time = "2024-10-09T07:38:54.691Z" }, + { url = "https://files.pythonhosted.org/packages/a4/01/2117ff2b1dfc61695daf2babe4a874bca328489afa85952440b59819e9d7/charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8", size = 146149, upload-time = "2024-10-09T07:38:55.737Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9b/93a332b8d25b347f6839ca0a61b7f0287b0930216994e8bf67a75d050255/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2", size = 140043, upload-time = "2024-10-09T07:38:57.44Z" }, + { url = "https://files.pythonhosted.org/packages/ab/f6/7ac4a01adcdecbc7a7587767c776d53d369b8b971382b91211489535acf0/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719", size = 148229, upload-time = "2024-10-09T07:38:58.782Z" }, + { url = "https://files.pythonhosted.org/packages/9d/be/5708ad18161dee7dc6a0f7e6cf3a88ea6279c3e8484844c0590e50e803ef/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631", size = 151556, upload-time = "2024-10-09T07:39:00.467Z" }, + { url = "https://files.pythonhosted.org/packages/5a/bb/3d8bc22bacb9eb89785e83e6723f9888265f3a0de3b9ce724d66bd49884e/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b", size = 149772, upload-time = "2024-10-09T07:39:01.5Z" }, + { url = "https://files.pythonhosted.org/packages/f7/fa/d3fc622de05a86f30beea5fc4e9ac46aead4731e73fd9055496732bcc0a4/charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565", size = 144800, upload-time = "2024-10-09T07:39:02.491Z" }, + { url = "https://files.pythonhosted.org/packages/9a/65/bdb9bc496d7d190d725e96816e20e2ae3a6fa42a5cac99c3c3d6ff884118/charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7", size = 94836, upload-time = "2024-10-09T07:39:04.607Z" }, + { url = "https://files.pythonhosted.org/packages/3e/67/7b72b69d25b89c0b3cea583ee372c43aa24df15f0e0f8d3982c57804984b/charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9", size = 102187, upload-time = "2024-10-09T07:39:06.247Z" }, + { url = "https://files.pythonhosted.org/packages/f3/89/68a4c86f1a0002810a27f12e9a7b22feb198c59b2f05231349fbce5c06f4/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114", size = 194617, upload-time = "2024-10-09T07:39:07.317Z" }, + { url = "https://files.pythonhosted.org/packages/4f/cd/8947fe425e2ab0aa57aceb7807af13a0e4162cd21eee42ef5b053447edf5/charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed", size = 125310, upload-time = "2024-10-09T07:39:08.353Z" }, + { url = "https://files.pythonhosted.org/packages/5b/f0/b5263e8668a4ee9becc2b451ed909e9c27058337fda5b8c49588183c267a/charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250", size = 119126, upload-time = "2024-10-09T07:39:09.327Z" }, + { url = "https://files.pythonhosted.org/packages/ff/6e/e445afe4f7fda27a533f3234b627b3e515a1b9429bc981c9a5e2aa5d97b6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920", size = 139342, upload-time = "2024-10-09T07:39:10.322Z" }, + { url = "https://files.pythonhosted.org/packages/a1/b2/4af9993b532d93270538ad4926c8e37dc29f2111c36f9c629840c57cd9b3/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64", size = 149383, upload-time = "2024-10-09T07:39:12.042Z" }, + { url = "https://files.pythonhosted.org/packages/fb/6f/4e78c3b97686b871db9be6f31d64e9264e889f8c9d7ab33c771f847f79b7/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23", size = 142214, upload-time = "2024-10-09T07:39:13.059Z" }, + { url = "https://files.pythonhosted.org/packages/2b/c9/1c8fe3ce05d30c87eff498592c89015b19fade13df42850aafae09e94f35/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc", size = 144104, upload-time = "2024-10-09T07:39:14.815Z" }, + { url = "https://files.pythonhosted.org/packages/ee/68/efad5dcb306bf37db7db338338e7bb8ebd8cf38ee5bbd5ceaaaa46f257e6/charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d", size = 146255, upload-time = "2024-10-09T07:39:15.868Z" }, + { url = "https://files.pythonhosted.org/packages/0c/75/1ed813c3ffd200b1f3e71121c95da3f79e6d2a96120163443b3ad1057505/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88", size = 140251, upload-time = "2024-10-09T07:39:16.995Z" }, + { url = "https://files.pythonhosted.org/packages/7d/0d/6f32255c1979653b448d3c709583557a4d24ff97ac4f3a5be156b2e6a210/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90", size = 148474, upload-time = "2024-10-09T07:39:18.021Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a0/c1b5298de4670d997101fef95b97ac440e8c8d8b4efa5a4d1ef44af82f0d/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b", size = 151849, upload-time = "2024-10-09T07:39:19.243Z" }, + { url = "https://files.pythonhosted.org/packages/04/4f/b3961ba0c664989ba63e30595a3ed0875d6790ff26671e2aae2fdc28a399/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d", size = 149781, upload-time = "2024-10-09T07:39:20.397Z" }, + { url = "https://files.pythonhosted.org/packages/d8/90/6af4cd042066a4adad58ae25648a12c09c879efa4849c705719ba1b23d8c/charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482", size = 144970, upload-time = "2024-10-09T07:39:21.452Z" }, + { url = "https://files.pythonhosted.org/packages/cc/67/e5e7e0cbfefc4ca79025238b43cdf8a2037854195b37d6417f3d0895c4c2/charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67", size = 94973, upload-time = "2024-10-09T07:39:22.509Z" }, + { url = "https://files.pythonhosted.org/packages/65/97/fc9bbc54ee13d33dc54a7fcf17b26368b18505500fc01e228c27b5222d80/charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b", size = 102308, upload-time = "2024-10-09T07:39:23.524Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9b/08c0432272d77b04803958a4598a51e2a4b51c06640af8b8f0f908c18bf2/charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079", size = 49446, upload-time = "2024-10-09T07:40:19.383Z" }, ] [[package]] @@ -91,20 +92,20 @@ name = "click" version = "8.1.7" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "colorama", marker = "platform_system == 'Windows'" }, + { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121 } +sdist = { url = "https://files.pythonhosted.org/packages/96/d3/f04c7bfcf5c1862a2a5b845c6b2b360488cf47af55dfa79c98f6a6bf98b5/click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de", size = 336121, upload-time = "2023-08-17T17:29:11.868Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941 }, + { url = "https://files.pythonhosted.org/packages/00/2e/d53fa4befbf2cfa713304affc7ca780ce4fc1fd8710527771b58311a3229/click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28", size = 97941, upload-time = "2023-08-17T17:29:10.08Z" }, ] [[package]] name = "colorama" version = "0.4.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697 } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335 }, + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, ] [[package]] @@ -114,21 +115,23 @@ source = { editable = "." } dependencies = [ { name = "mcp" }, { name = "requests" }, + { name = "watchdog" }, ] [package.metadata] requires-dist = [ { name = "mcp", specifier = ">=1.0.0" }, { name = "requests" }, + { name = "watchdog" }, ] [[package]] name = "h11" version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418 } +sdist = { url = "https://files.pythonhosted.org/packages/f5/38/3af3d3633a34a3316095b39c8e8fb4853a28a536e55d347bd8d8e9a14b03/h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d", size = 100418, upload-time = "2022-09-25T15:40:01.519Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259 }, + { url = "https://files.pythonhosted.org/packages/95/04/ff642e65ad6b90db43e668d70ffb6736436c7ce41fcc549f4e9472234127/h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761", size = 58259, upload-time = "2022-09-25T15:39:59.68Z" }, ] [[package]] @@ -139,9 +142,9 @@ dependencies = [ { name = "certifi" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196 } +sdist = { url = "https://files.pythonhosted.org/packages/6a/41/d7d0a89eb493922c37d343b607bc1b5da7f5be7e383740b4753ad8943e90/httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c", size = 85196, upload-time = "2024-11-15T12:30:47.531Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551 }, + { url = "https://files.pythonhosted.org/packages/87/f5/72347bc88306acb359581ac4d52f23c0ef445b57157adedb9aee0cd689d2/httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd", size = 78551, upload-time = "2024-11-15T12:30:45.782Z" }, ] [[package]] @@ -154,27 +157,27 @@ dependencies = [ { name = "httpcore" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/df/676b7cf674dd1bdc71a64ad393c89879f75e4a0ab8395165b498262ae106/httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0", size = 141307 } +sdist = { url = "https://files.pythonhosted.org/packages/10/df/676b7cf674dd1bdc71a64ad393c89879f75e4a0ab8395165b498262ae106/httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0", size = 141307, upload-time = "2024-11-28T14:54:56.977Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/fb/a19866137577ba60c6d8b69498dc36be479b13ba454f691348ddf428f185/httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc", size = 73551 }, + { url = "https://files.pythonhosted.org/packages/8f/fb/a19866137577ba60c6d8b69498dc36be479b13ba454f691348ddf428f185/httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc", size = 73551, upload-time = "2024-11-28T14:54:55.141Z" }, ] [[package]] name = "httpx-sse" version = "0.4.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624 } +sdist = { url = "https://files.pythonhosted.org/packages/4c/60/8f4281fa9bbf3c8034fd54c0e7412e66edbab6bc74c4996bd616f8d0406e/httpx-sse-0.4.0.tar.gz", hash = "sha256:1e81a3a3070ce322add1d3529ed42eb5f70817f45ed6ec915ab753f961139721", size = 12624, upload-time = "2023-12-22T08:01:21.083Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819 }, + { url = "https://files.pythonhosted.org/packages/e1/9b/a181f281f65d776426002f330c31849b86b31fc9d848db62e16f03ff739f/httpx_sse-0.4.0-py3-none-any.whl", hash = "sha256:f329af6eae57eaa2bdfd962b42524764af68075ea87370a2de920af5341e318f", size = 7819, upload-time = "2023-12-22T08:01:19.89Z" }, ] [[package]] name = "idna" version = "3.10" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490 } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442 }, + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] [[package]] @@ -189,9 +192,9 @@ dependencies = [ { name = "sse-starlette" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/97/de/a9ec0a1b6439f90ea59f89004bb2e7ec6890dfaeef809751d9e6577dca7e/mcp-1.0.0.tar.gz", hash = "sha256:dba51ce0b5c6a80e25576f606760c49a91ee90210fed805b530ca165d3bbc9b7", size = 82891 } +sdist = { url = "https://files.pythonhosted.org/packages/97/de/a9ec0a1b6439f90ea59f89004bb2e7ec6890dfaeef809751d9e6577dca7e/mcp-1.0.0.tar.gz", hash = "sha256:dba51ce0b5c6a80e25576f606760c49a91ee90210fed805b530ca165d3bbc9b7", size = 82891, upload-time = "2024-11-25T14:27:35.616Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/89/900c0c8445ec001d3725e475fc553b0feb2e8a51be018f3bb7de51e683db/mcp-1.0.0-py3-none-any.whl", hash = "sha256:bbe70ffa3341cd4da78b5eb504958355c68381fb29971471cea1e642a2af5b8a", size = 36361 }, + { url = "https://files.pythonhosted.org/packages/56/89/900c0c8445ec001d3725e475fc553b0feb2e8a51be018f3bb7de51e683db/mcp-1.0.0-py3-none-any.whl", hash = "sha256:bbe70ffa3341cd4da78b5eb504958355c68381fb29971471cea1e642a2af5b8a", size = 36361, upload-time = "2024-11-25T14:27:34.367Z" }, ] [[package]] @@ -203,9 +206,9 @@ dependencies = [ { name = "pydantic-core" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/41/86/a03390cb12cf64e2a8df07c267f3eb8d5035e0f9a04bb20fb79403d2a00e/pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa", size = 785401 } +sdist = { url = "https://files.pythonhosted.org/packages/41/86/a03390cb12cf64e2a8df07c267f3eb8d5035e0f9a04bb20fb79403d2a00e/pydantic-2.10.2.tar.gz", hash = "sha256:2bc2d7f17232e0841cbba4641e65ba1eb6fafb3a08de3a091ff3ce14a197c4fa", size = 785401, upload-time = "2024-11-26T13:02:29.793Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d5/74/da832196702d0c56eb86b75bfa346db9238617e29b0b7ee3b8b4eccfe654/pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e", size = 456364 }, + { url = "https://files.pythonhosted.org/packages/d5/74/da832196702d0c56eb86b75bfa346db9238617e29b0b7ee3b8b4eccfe654/pydantic-2.10.2-py3-none-any.whl", hash = "sha256:cfb96e45951117c3024e6b67b25cdc33a3cb7b2fa62e239f7af1378358a1d99e", size = 456364, upload-time = "2024-11-26T13:02:27.147Z" }, ] [[package]] @@ -215,50 +218,50 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a6/9f/7de1f19b6aea45aeb441838782d68352e71bfa98ee6fa048d5041991b33e/pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235", size = 412785 } +sdist = { url = "https://files.pythonhosted.org/packages/a6/9f/7de1f19b6aea45aeb441838782d68352e71bfa98ee6fa048d5041991b33e/pydantic_core-2.27.1.tar.gz", hash = "sha256:62a763352879b84aa31058fc931884055fd75089cccbd9d58bb6afd01141b235", size = 412785, upload-time = "2024-11-22T00:24:49.865Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/27/39/46fe47f2ad4746b478ba89c561cafe4428e02b3573df882334bd2964f9cb/pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8", size = 1895553 }, - { url = "https://files.pythonhosted.org/packages/1c/00/0804e84a78b7fdb394fff4c4f429815a10e5e0993e6ae0e0b27dd20379ee/pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330", size = 1807220 }, - { url = "https://files.pythonhosted.org/packages/01/de/df51b3bac9820d38371f5a261020f505025df732ce566c2a2e7970b84c8c/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52", size = 1829727 }, - { url = "https://files.pythonhosted.org/packages/5f/d9/c01d19da8f9e9fbdb2bf99f8358d145a312590374d0dc9dd8dbe484a9cde/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4", size = 1854282 }, - { url = "https://files.pythonhosted.org/packages/5f/84/7db66eb12a0dc88c006abd6f3cbbf4232d26adfd827a28638c540d8f871d/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c", size = 2037437 }, - { url = "https://files.pythonhosted.org/packages/34/ac/a2537958db8299fbabed81167d58cc1506049dba4163433524e06a7d9f4c/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de", size = 2780899 }, - { url = "https://files.pythonhosted.org/packages/4a/c1/3e38cd777ef832c4fdce11d204592e135ddeedb6c6f525478a53d1c7d3e5/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025", size = 2135022 }, - { url = "https://files.pythonhosted.org/packages/7a/69/b9952829f80fd555fe04340539d90e000a146f2a003d3fcd1e7077c06c71/pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e", size = 1987969 }, - { url = "https://files.pythonhosted.org/packages/05/72/257b5824d7988af43460c4e22b63932ed651fe98804cc2793068de7ec554/pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919", size = 1994625 }, - { url = "https://files.pythonhosted.org/packages/73/c3/78ed6b7f3278a36589bcdd01243189ade7fc9b26852844938b4d7693895b/pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c", size = 2090089 }, - { url = "https://files.pythonhosted.org/packages/8d/c8/b4139b2f78579960353c4cd987e035108c93a78371bb19ba0dc1ac3b3220/pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc", size = 2142496 }, - { url = "https://files.pythonhosted.org/packages/3e/f8/171a03e97eb36c0b51981efe0f78460554a1d8311773d3d30e20c005164e/pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9", size = 1811758 }, - { url = "https://files.pythonhosted.org/packages/6a/fe/4e0e63c418c1c76e33974a05266e5633e879d4061f9533b1706a86f77d5b/pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5", size = 1980864 }, - { url = "https://files.pythonhosted.org/packages/50/fc/93f7238a514c155a8ec02fc7ac6376177d449848115e4519b853820436c5/pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89", size = 1864327 }, - { url = "https://files.pythonhosted.org/packages/be/51/2e9b3788feb2aebff2aa9dfbf060ec739b38c05c46847601134cc1fed2ea/pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f", size = 1895239 }, - { url = "https://files.pythonhosted.org/packages/7b/9e/f8063952e4a7d0127f5d1181addef9377505dcce3be224263b25c4f0bfd9/pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02", size = 1805070 }, - { url = "https://files.pythonhosted.org/packages/2c/9d/e1d6c4561d262b52e41b17a7ef8301e2ba80b61e32e94520271029feb5d8/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c", size = 1828096 }, - { url = "https://files.pythonhosted.org/packages/be/65/80ff46de4266560baa4332ae3181fffc4488ea7d37282da1a62d10ab89a4/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac", size = 1857708 }, - { url = "https://files.pythonhosted.org/packages/d5/ca/3370074ad758b04d9562b12ecdb088597f4d9d13893a48a583fb47682cdf/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb", size = 2037751 }, - { url = "https://files.pythonhosted.org/packages/b1/e2/4ab72d93367194317b99d051947c071aef6e3eb95f7553eaa4208ecf9ba4/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529", size = 2733863 }, - { url = "https://files.pythonhosted.org/packages/8a/c6/8ae0831bf77f356bb73127ce5a95fe115b10f820ea480abbd72d3cc7ccf3/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35", size = 2161161 }, - { url = "https://files.pythonhosted.org/packages/f1/f4/b2fe73241da2429400fc27ddeaa43e35562f96cf5b67499b2de52b528cad/pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089", size = 1993294 }, - { url = "https://files.pythonhosted.org/packages/77/29/4bb008823a7f4cc05828198153f9753b3bd4c104d93b8e0b1bfe4e187540/pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381", size = 2001468 }, - { url = "https://files.pythonhosted.org/packages/f2/a9/0eaceeba41b9fad851a4107e0cf999a34ae8f0d0d1f829e2574f3d8897b0/pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb", size = 2091413 }, - { url = "https://files.pythonhosted.org/packages/d8/36/eb8697729725bc610fd73940f0d860d791dc2ad557faaefcbb3edbd2b349/pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae", size = 2154735 }, - { url = "https://files.pythonhosted.org/packages/52/e5/4f0fbd5c5995cc70d3afed1b5c754055bb67908f55b5cb8000f7112749bf/pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c", size = 1833633 }, - { url = "https://files.pythonhosted.org/packages/ee/f2/c61486eee27cae5ac781305658779b4a6b45f9cc9d02c90cb21b940e82cc/pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16", size = 1986973 }, - { url = "https://files.pythonhosted.org/packages/df/a6/e3f12ff25f250b02f7c51be89a294689d175ac76e1096c32bf278f29ca1e/pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e", size = 1883215 }, - { url = "https://files.pythonhosted.org/packages/0f/d6/91cb99a3c59d7b072bded9959fbeab0a9613d5a4935773c0801f1764c156/pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073", size = 1895033 }, - { url = "https://files.pythonhosted.org/packages/07/42/d35033f81a28b27dedcade9e967e8a40981a765795c9ebae2045bcef05d3/pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08", size = 1807542 }, - { url = "https://files.pythonhosted.org/packages/41/c2/491b59e222ec7e72236e512108ecad532c7f4391a14e971c963f624f7569/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf", size = 1827854 }, - { url = "https://files.pythonhosted.org/packages/e3/f3/363652651779113189cefdbbb619b7b07b7a67ebb6840325117cc8cc3460/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737", size = 1857389 }, - { url = "https://files.pythonhosted.org/packages/5f/97/be804aed6b479af5a945daec7538d8bf358d668bdadde4c7888a2506bdfb/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2", size = 2037934 }, - { url = "https://files.pythonhosted.org/packages/42/01/295f0bd4abf58902917e342ddfe5f76cf66ffabfc57c2e23c7681a1a1197/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107", size = 2735176 }, - { url = "https://files.pythonhosted.org/packages/9d/a0/cd8e9c940ead89cc37812a1a9f310fef59ba2f0b22b4e417d84ab09fa970/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51", size = 2160720 }, - { url = "https://files.pythonhosted.org/packages/73/ae/9d0980e286627e0aeca4c352a60bd760331622c12d576e5ea4441ac7e15e/pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a", size = 1992972 }, - { url = "https://files.pythonhosted.org/packages/bf/ba/ae4480bc0292d54b85cfb954e9d6bd226982949f8316338677d56541b85f/pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc", size = 2001477 }, - { url = "https://files.pythonhosted.org/packages/55/b7/e26adf48c2f943092ce54ae14c3c08d0d221ad34ce80b18a50de8ed2cba8/pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960", size = 2091186 }, - { url = "https://files.pythonhosted.org/packages/ba/cc/8491fff5b608b3862eb36e7d29d36a1af1c945463ca4c5040bf46cc73f40/pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23", size = 2154429 }, - { url = "https://files.pythonhosted.org/packages/78/d8/c080592d80edd3441ab7f88f865f51dae94a157fc64283c680e9f32cf6da/pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05", size = 1833713 }, - { url = "https://files.pythonhosted.org/packages/83/84/5ab82a9ee2538ac95a66e51f6838d6aba6e0a03a42aa185ad2fe404a4e8f/pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337", size = 1987897 }, - { url = "https://files.pythonhosted.org/packages/df/c3/b15fb833926d91d982fde29c0624c9f225da743c7af801dace0d4e187e71/pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5", size = 1882983 }, + { url = "https://files.pythonhosted.org/packages/27/39/46fe47f2ad4746b478ba89c561cafe4428e02b3573df882334bd2964f9cb/pydantic_core-2.27.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:ac3b20653bdbe160febbea8aa6c079d3df19310d50ac314911ed8cc4eb7f8cb8", size = 1895553, upload-time = "2024-11-22T00:21:48.859Z" }, + { url = "https://files.pythonhosted.org/packages/1c/00/0804e84a78b7fdb394fff4c4f429815a10e5e0993e6ae0e0b27dd20379ee/pydantic_core-2.27.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a5a8e19d7c707c4cadb8c18f5f60c843052ae83c20fa7d44f41594c644a1d330", size = 1807220, upload-time = "2024-11-22T00:21:50.354Z" }, + { url = "https://files.pythonhosted.org/packages/01/de/df51b3bac9820d38371f5a261020f505025df732ce566c2a2e7970b84c8c/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f7059ca8d64fea7f238994c97d91f75965216bcbe5f695bb44f354893f11d52", size = 1829727, upload-time = "2024-11-22T00:21:51.722Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d9/c01d19da8f9e9fbdb2bf99f8358d145a312590374d0dc9dd8dbe484a9cde/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bed0f8a0eeea9fb72937ba118f9db0cb7e90773462af7962d382445f3005e5a4", size = 1854282, upload-time = "2024-11-22T00:21:53.098Z" }, + { url = "https://files.pythonhosted.org/packages/5f/84/7db66eb12a0dc88c006abd6f3cbbf4232d26adfd827a28638c540d8f871d/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3cb37038123447cf0f3ea4c74751f6a9d7afef0eb71aa07bf5f652b5e6a132c", size = 2037437, upload-time = "2024-11-22T00:21:55.185Z" }, + { url = "https://files.pythonhosted.org/packages/34/ac/a2537958db8299fbabed81167d58cc1506049dba4163433524e06a7d9f4c/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:84286494f6c5d05243456e04223d5a9417d7f443c3b76065e75001beb26f88de", size = 2780899, upload-time = "2024-11-22T00:21:56.633Z" }, + { url = "https://files.pythonhosted.org/packages/4a/c1/3e38cd777ef832c4fdce11d204592e135ddeedb6c6f525478a53d1c7d3e5/pydantic_core-2.27.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acc07b2cfc5b835444b44a9956846b578d27beeacd4b52e45489e93276241025", size = 2135022, upload-time = "2024-11-22T00:21:59.154Z" }, + { url = "https://files.pythonhosted.org/packages/7a/69/b9952829f80fd555fe04340539d90e000a146f2a003d3fcd1e7077c06c71/pydantic_core-2.27.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:4fefee876e07a6e9aad7a8c8c9f85b0cdbe7df52b8a9552307b09050f7512c7e", size = 1987969, upload-time = "2024-11-22T00:22:01.325Z" }, + { url = "https://files.pythonhosted.org/packages/05/72/257b5824d7988af43460c4e22b63932ed651fe98804cc2793068de7ec554/pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:258c57abf1188926c774a4c94dd29237e77eda19462e5bb901d88adcab6af919", size = 1994625, upload-time = "2024-11-22T00:22:03.447Z" }, + { url = "https://files.pythonhosted.org/packages/73/c3/78ed6b7f3278a36589bcdd01243189ade7fc9b26852844938b4d7693895b/pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:35c14ac45fcfdf7167ca76cc80b2001205a8d5d16d80524e13508371fb8cdd9c", size = 2090089, upload-time = "2024-11-22T00:22:04.941Z" }, + { url = "https://files.pythonhosted.org/packages/8d/c8/b4139b2f78579960353c4cd987e035108c93a78371bb19ba0dc1ac3b3220/pydantic_core-2.27.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d1b26e1dff225c31897696cab7d4f0a315d4c0d9e8666dbffdb28216f3b17fdc", size = 2142496, upload-time = "2024-11-22T00:22:06.57Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f8/171a03e97eb36c0b51981efe0f78460554a1d8311773d3d30e20c005164e/pydantic_core-2.27.1-cp311-none-win32.whl", hash = "sha256:2cdf7d86886bc6982354862204ae3b2f7f96f21a3eb0ba5ca0ac42c7b38598b9", size = 1811758, upload-time = "2024-11-22T00:22:08.445Z" }, + { url = "https://files.pythonhosted.org/packages/6a/fe/4e0e63c418c1c76e33974a05266e5633e879d4061f9533b1706a86f77d5b/pydantic_core-2.27.1-cp311-none-win_amd64.whl", hash = "sha256:3af385b0cee8df3746c3f406f38bcbfdc9041b5c2d5ce3e5fc6637256e60bbc5", size = 1980864, upload-time = "2024-11-22T00:22:10Z" }, + { url = "https://files.pythonhosted.org/packages/50/fc/93f7238a514c155a8ec02fc7ac6376177d449848115e4519b853820436c5/pydantic_core-2.27.1-cp311-none-win_arm64.whl", hash = "sha256:81f2ec23ddc1b476ff96563f2e8d723830b06dceae348ce02914a37cb4e74b89", size = 1864327, upload-time = "2024-11-22T00:22:11.478Z" }, + { url = "https://files.pythonhosted.org/packages/be/51/2e9b3788feb2aebff2aa9dfbf060ec739b38c05c46847601134cc1fed2ea/pydantic_core-2.27.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9cbd94fc661d2bab2bc702cddd2d3370bbdcc4cd0f8f57488a81bcce90c7a54f", size = 1895239, upload-time = "2024-11-22T00:22:13.775Z" }, + { url = "https://files.pythonhosted.org/packages/7b/9e/f8063952e4a7d0127f5d1181addef9377505dcce3be224263b25c4f0bfd9/pydantic_core-2.27.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5f8c4718cd44ec1580e180cb739713ecda2bdee1341084c1467802a417fe0f02", size = 1805070, upload-time = "2024-11-22T00:22:15.438Z" }, + { url = "https://files.pythonhosted.org/packages/2c/9d/e1d6c4561d262b52e41b17a7ef8301e2ba80b61e32e94520271029feb5d8/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:15aae984e46de8d376df515f00450d1522077254ef6b7ce189b38ecee7c9677c", size = 1828096, upload-time = "2024-11-22T00:22:17.892Z" }, + { url = "https://files.pythonhosted.org/packages/be/65/80ff46de4266560baa4332ae3181fffc4488ea7d37282da1a62d10ab89a4/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1ba5e3963344ff25fc8c40da90f44b0afca8cfd89d12964feb79ac1411a260ac", size = 1857708, upload-time = "2024-11-22T00:22:19.412Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ca/3370074ad758b04d9562b12ecdb088597f4d9d13893a48a583fb47682cdf/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:992cea5f4f3b29d6b4f7f1726ed8ee46c8331c6b4eed6db5b40134c6fe1768bb", size = 2037751, upload-time = "2024-11-22T00:22:20.979Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e2/4ab72d93367194317b99d051947c071aef6e3eb95f7553eaa4208ecf9ba4/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0325336f348dbee6550d129b1627cb8f5351a9dc91aad141ffb96d4937bd9529", size = 2733863, upload-time = "2024-11-22T00:22:22.951Z" }, + { url = "https://files.pythonhosted.org/packages/8a/c6/8ae0831bf77f356bb73127ce5a95fe115b10f820ea480abbd72d3cc7ccf3/pydantic_core-2.27.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7597c07fbd11515f654d6ece3d0e4e5093edc30a436c63142d9a4b8e22f19c35", size = 2161161, upload-time = "2024-11-22T00:22:24.785Z" }, + { url = "https://files.pythonhosted.org/packages/f1/f4/b2fe73241da2429400fc27ddeaa43e35562f96cf5b67499b2de52b528cad/pydantic_core-2.27.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3bbd5d8cc692616d5ef6fbbbd50dbec142c7e6ad9beb66b78a96e9c16729b089", size = 1993294, upload-time = "2024-11-22T00:22:27.076Z" }, + { url = "https://files.pythonhosted.org/packages/77/29/4bb008823a7f4cc05828198153f9753b3bd4c104d93b8e0b1bfe4e187540/pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:dc61505e73298a84a2f317255fcc72b710b72980f3a1f670447a21efc88f8381", size = 2001468, upload-time = "2024-11-22T00:22:29.346Z" }, + { url = "https://files.pythonhosted.org/packages/f2/a9/0eaceeba41b9fad851a4107e0cf999a34ae8f0d0d1f829e2574f3d8897b0/pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:e1f735dc43da318cad19b4173dd1ffce1d84aafd6c9b782b3abc04a0d5a6f5bb", size = 2091413, upload-time = "2024-11-22T00:22:30.984Z" }, + { url = "https://files.pythonhosted.org/packages/d8/36/eb8697729725bc610fd73940f0d860d791dc2ad557faaefcbb3edbd2b349/pydantic_core-2.27.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:f4e5658dbffe8843a0f12366a4c2d1c316dbe09bb4dfbdc9d2d9cd6031de8aae", size = 2154735, upload-time = "2024-11-22T00:22:32.616Z" }, + { url = "https://files.pythonhosted.org/packages/52/e5/4f0fbd5c5995cc70d3afed1b5c754055bb67908f55b5cb8000f7112749bf/pydantic_core-2.27.1-cp312-none-win32.whl", hash = "sha256:672ebbe820bb37988c4d136eca2652ee114992d5d41c7e4858cdd90ea94ffe5c", size = 1833633, upload-time = "2024-11-22T00:22:35.027Z" }, + { url = "https://files.pythonhosted.org/packages/ee/f2/c61486eee27cae5ac781305658779b4a6b45f9cc9d02c90cb21b940e82cc/pydantic_core-2.27.1-cp312-none-win_amd64.whl", hash = "sha256:66ff044fd0bb1768688aecbe28b6190f6e799349221fb0de0e6f4048eca14c16", size = 1986973, upload-time = "2024-11-22T00:22:37.502Z" }, + { url = "https://files.pythonhosted.org/packages/df/a6/e3f12ff25f250b02f7c51be89a294689d175ac76e1096c32bf278f29ca1e/pydantic_core-2.27.1-cp312-none-win_arm64.whl", hash = "sha256:9a3b0793b1bbfd4146304e23d90045f2a9b5fd5823aa682665fbdaf2a6c28f3e", size = 1883215, upload-time = "2024-11-22T00:22:39.186Z" }, + { url = "https://files.pythonhosted.org/packages/0f/d6/91cb99a3c59d7b072bded9959fbeab0a9613d5a4935773c0801f1764c156/pydantic_core-2.27.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:f216dbce0e60e4d03e0c4353c7023b202d95cbaeff12e5fd2e82ea0a66905073", size = 1895033, upload-time = "2024-11-22T00:22:41.087Z" }, + { url = "https://files.pythonhosted.org/packages/07/42/d35033f81a28b27dedcade9e967e8a40981a765795c9ebae2045bcef05d3/pydantic_core-2.27.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a2e02889071850bbfd36b56fd6bc98945e23670773bc7a76657e90e6b6603c08", size = 1807542, upload-time = "2024-11-22T00:22:43.341Z" }, + { url = "https://files.pythonhosted.org/packages/41/c2/491b59e222ec7e72236e512108ecad532c7f4391a14e971c963f624f7569/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42b0e23f119b2b456d07ca91b307ae167cc3f6c846a7b169fca5326e32fdc6cf", size = 1827854, upload-time = "2024-11-22T00:22:44.96Z" }, + { url = "https://files.pythonhosted.org/packages/e3/f3/363652651779113189cefdbbb619b7b07b7a67ebb6840325117cc8cc3460/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:764be71193f87d460a03f1f7385a82e226639732214b402f9aa61f0d025f0737", size = 1857389, upload-time = "2024-11-22T00:22:47.305Z" }, + { url = "https://files.pythonhosted.org/packages/5f/97/be804aed6b479af5a945daec7538d8bf358d668bdadde4c7888a2506bdfb/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1c00666a3bd2f84920a4e94434f5974d7bbc57e461318d6bb34ce9cdbbc1f6b2", size = 2037934, upload-time = "2024-11-22T00:22:49.093Z" }, + { url = "https://files.pythonhosted.org/packages/42/01/295f0bd4abf58902917e342ddfe5f76cf66ffabfc57c2e23c7681a1a1197/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3ccaa88b24eebc0f849ce0a4d09e8a408ec5a94afff395eb69baf868f5183107", size = 2735176, upload-time = "2024-11-22T00:22:50.822Z" }, + { url = "https://files.pythonhosted.org/packages/9d/a0/cd8e9c940ead89cc37812a1a9f310fef59ba2f0b22b4e417d84ab09fa970/pydantic_core-2.27.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c65af9088ac534313e1963443d0ec360bb2b9cba6c2909478d22c2e363d98a51", size = 2160720, upload-time = "2024-11-22T00:22:52.638Z" }, + { url = "https://files.pythonhosted.org/packages/73/ae/9d0980e286627e0aeca4c352a60bd760331622c12d576e5ea4441ac7e15e/pydantic_core-2.27.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:206b5cf6f0c513baffaeae7bd817717140770c74528f3e4c3e1cec7871ddd61a", size = 1992972, upload-time = "2024-11-22T00:22:54.31Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ba/ae4480bc0292d54b85cfb954e9d6bd226982949f8316338677d56541b85f/pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:062f60e512fc7fff8b8a9d680ff0ddaaef0193dba9fa83e679c0c5f5fbd018bc", size = 2001477, upload-time = "2024-11-22T00:22:56.451Z" }, + { url = "https://files.pythonhosted.org/packages/55/b7/e26adf48c2f943092ce54ae14c3c08d0d221ad34ce80b18a50de8ed2cba8/pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:a0697803ed7d4af5e4c1adf1670af078f8fcab7a86350e969f454daf598c4960", size = 2091186, upload-time = "2024-11-22T00:22:58.226Z" }, + { url = "https://files.pythonhosted.org/packages/ba/cc/8491fff5b608b3862eb36e7d29d36a1af1c945463ca4c5040bf46cc73f40/pydantic_core-2.27.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:58ca98a950171f3151c603aeea9303ef6c235f692fe555e883591103da709b23", size = 2154429, upload-time = "2024-11-22T00:22:59.985Z" }, + { url = "https://files.pythonhosted.org/packages/78/d8/c080592d80edd3441ab7f88f865f51dae94a157fc64283c680e9f32cf6da/pydantic_core-2.27.1-cp313-none-win32.whl", hash = "sha256:8065914ff79f7eab1599bd80406681f0ad08f8e47c880f17b416c9f8f7a26d05", size = 1833713, upload-time = "2024-11-22T00:23:01.715Z" }, + { url = "https://files.pythonhosted.org/packages/83/84/5ab82a9ee2538ac95a66e51f6838d6aba6e0a03a42aa185ad2fe404a4e8f/pydantic_core-2.27.1-cp313-none-win_amd64.whl", hash = "sha256:ba630d5e3db74c79300d9a5bdaaf6200172b107f263c98a0539eeecb857b2337", size = 1987897, upload-time = "2024-11-22T00:23:03.497Z" }, + { url = "https://files.pythonhosted.org/packages/df/c3/b15fb833926d91d982fde29c0624c9f225da743c7af801dace0d4e187e71/pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5", size = 1882983, upload-time = "2024-11-22T00:23:05.983Z" }, ] [[package]] @@ -271,18 +274,18 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218 } +sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928 }, + { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" }, ] [[package]] name = "sniffio" version = "1.3.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372 } +sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235 }, + { url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" }, ] [[package]] @@ -294,9 +297,9 @@ dependencies = [ { name = "starlette" }, { name = "uvicorn" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/72/fc/56ab9f116b2133521f532fce8d03194cf04dcac25f583cf3d839be4c0496/sse_starlette-2.1.3.tar.gz", hash = "sha256:9cd27eb35319e1414e3d2558ee7414487f9529ce3b3cf9b21434fd110e017169", size = 19678 } +sdist = { url = "https://files.pythonhosted.org/packages/72/fc/56ab9f116b2133521f532fce8d03194cf04dcac25f583cf3d839be4c0496/sse_starlette-2.1.3.tar.gz", hash = "sha256:9cd27eb35319e1414e3d2558ee7414487f9529ce3b3cf9b21434fd110e017169", size = 19678, upload-time = "2024-08-01T08:52:50.248Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/52/aa/36b271bc4fa1d2796311ee7c7283a3a1c348bad426d37293609ca4300eef/sse_starlette-2.1.3-py3-none-any.whl", hash = "sha256:8ec846438b4665b9e8c560fcdea6bc8081a3abf7942faa95e5a744999d219772", size = 9383 }, + { url = "https://files.pythonhosted.org/packages/52/aa/36b271bc4fa1d2796311ee7c7283a3a1c348bad426d37293609ca4300eef/sse_starlette-2.1.3-py3-none-any.whl", hash = "sha256:8ec846438b4665b9e8c560fcdea6bc8081a3abf7942faa95e5a744999d219772", size = 9383, upload-time = "2024-08-01T08:52:48.659Z" }, ] [[package]] @@ -306,27 +309,27 @@ source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/1a/4c/9b5764bd22eec91c4039ef4c55334e9187085da2d8a2df7bd570869aae18/starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835", size = 2574159 } +sdist = { url = "https://files.pythonhosted.org/packages/1a/4c/9b5764bd22eec91c4039ef4c55334e9187085da2d8a2df7bd570869aae18/starlette-0.41.3.tar.gz", hash = "sha256:0e4ab3d16522a255be6b28260b938eae2482f98ce5cc934cb08dce8dc3ba5835", size = 2574159, upload-time = "2024-11-18T19:45:04.283Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/96/00/2b325970b3060c7cecebab6d295afe763365822b1306a12eeab198f74323/starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7", size = 73225 }, + { url = "https://files.pythonhosted.org/packages/96/00/2b325970b3060c7cecebab6d295afe763365822b1306a12eeab198f74323/starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7", size = 73225, upload-time = "2024-11-18T19:45:02.027Z" }, ] [[package]] name = "typing-extensions" version = "4.12.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321 } +sdist = { url = "https://files.pythonhosted.org/packages/df/db/f35a00659bc03fec321ba8bce9420de607a1d37f8342eee1863174c69557/typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8", size = 85321, upload-time = "2024-06-07T18:52:15.995Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438 }, + { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438, upload-time = "2024-06-07T18:52:13.582Z" }, ] [[package]] name = "urllib3" version = "2.2.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677 } +sdist = { url = "https://files.pythonhosted.org/packages/ed/63/22ba4ebfe7430b76388e7cd448d5478814d3032121827c12a2cc287e2260/urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9", size = 300677, upload-time = "2024-09-12T10:52:18.401Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338 }, + { url = "https://files.pythonhosted.org/packages/ce/d9/5f4c13cecde62396b0d3fe530a50ccea91e7dfc1ccf0e09c228841bb5ba8/urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac", size = 126338, upload-time = "2024-09-12T10:52:16.589Z" }, ] [[package]] @@ -337,7 +340,34 @@ dependencies = [ { name = "click" }, { name = "h11" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/6a/3c/21dba3e7d76138725ef307e3d7ddd29b763119b3aa459d02cc05fefcff75/uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175", size = 77630 } +sdist = { url = "https://files.pythonhosted.org/packages/6a/3c/21dba3e7d76138725ef307e3d7ddd29b763119b3aa459d02cc05fefcff75/uvicorn-0.32.1.tar.gz", hash = "sha256:ee9519c246a72b1c084cea8d3b44ed6026e78a4a309cbedae9c37e4cb9fbb175", size = 77630, upload-time = "2024-11-20T19:41:13.341Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/50/c1/2d27b0a15826c2b71dcf6e2f5402181ef85acf439617bb2f1453125ce1f3/uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e", size = 63828 }, + { url = "https://files.pythonhosted.org/packages/50/c1/2d27b0a15826c2b71dcf6e2f5402181ef85acf439617bb2f1453125ce1f3/uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e", size = 63828, upload-time = "2024-11-20T19:41:11.244Z" }, +] + +[[package]] +name = "watchdog" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393, upload-time = "2024-11-01T14:06:31.756Z" }, + { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392, upload-time = "2024-11-01T14:06:32.99Z" }, + { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019, upload-time = "2024-11-01T14:06:34.963Z" }, + { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" }, + { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" }, + { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" }, + { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" }, + { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" }, + { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" }, + { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" }, + { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" }, + { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" }, + { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" }, + { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" }, + { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, ] From d99f8c5b5370a67c9c81b689d3766103f7086624 Mon Sep 17 00:00:00 2001 From: Sara Date: Sat, 31 May 2025 11:42:15 -0400 Subject: [PATCH 03/17] adds resources --- src/devrev_mcp/server.py | 83 ++++++++++++++++++++++------------------ 1 file changed, 46 insertions(+), 37 deletions(-) diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index 8baacfe..dca272c 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -43,34 +43,23 @@ async def handle_list_tools() -> list[types.Tool]: @server.list_resources() @debug_error_handler -async def handle_list_resources() -> list[types.Resource]: +async def handle_list_resources() -> list[types.Resource | types.ResourceTemplate]: """ - List available resources. - Each resource can be accessed via the read_resource handler. + List available resources and resource templates. + Resource templates allow dynamic access to DevRev objects by ID. """ resources = [] - for resource_id in devrev_cache.keys(): - resource_data = devrev_cache[resource_id] - if ':comment/' in resource_id: - # Timeline comment resource - resources.append( - types.Resource( - uri=AnyUrl(f"devrev://{resource_id}"), - name=f"Comment {resource_id.split('/')[-1]}", - description=f"DevRev timeline comment {resource_id}", - mimeType="application/json" - ) - ) - else: - # Work item or other resource - resources.append( - types.Resource( - uri=AnyUrl(f"devrev://{resource_id}"), - name=f"DevRev {resource_id.split('/')[-2] if '/' in resource_id else 'Resource'} {resource_id.split('/')[-1] if '/' in resource_id else resource_id}", - description=f"DevRev resource {resource_id}", - mimeType="application/json" - ) - ) + + # Add resource template for dynamic DevRev object access + resources.append( + types.ResourceTemplate( + uriTemplate="devrev://{id}", + name="DevRev Object", + description="Access any DevRev object (tickets, comments, issues, etc.) by its full DevRev ID", + mimeType="application/json" + ) + ) + return resources @server.read_resource() @@ -78,25 +67,45 @@ async def handle_list_resources() -> list[types.Resource]: async def handle_read_resource(uri: AnyUrl) -> str: """ Read a specific resource by URI. + Supports the devrev://{id} template for dynamic access to DevRev objects. """ uri_str = str(uri) if uri_str.startswith("devrev://"): resource_id = uri_str.replace("devrev://", "") + + # First check if already cached if resource_id in devrev_cache: return devrev_cache[resource_id] - else: - # If not in cache, try to fetch it based on resource type - # For work items, we can fetch them directly - try: + + # If not cached, try to fetch based on DevRev ID structure + try: + # Determine resource type based on ID pattern + if ':ticket/' in resource_id or ':issue/' in resource_id: + # Work items (tickets/issues) + response = make_devrev_request("works.get", {"id": resource_id}) + elif ':comment/' in resource_id or ':change_event/' in resource_id: + # Timeline entries - these should already be cached from timeline tool + raise ValueError(f"Timeline entry {resource_id} not found in cache. Use get_timeline_entries tool first.") + elif ':part/' in resource_id: + # Parts + response = make_devrev_request("parts.get", {"id": resource_id}) + elif ':dev_user/' in resource_id: + # Dev users + response = make_devrev_request("dev-users.get", {"id": resource_id}) + else: + # Generic work item fallback response = make_devrev_request("works.get", {"id": resource_id}) - if response.status_code == 200: - resource_data = response.json() - devrev_cache[resource_id] = json.dumps(resource_data) - return json.dumps(resource_data) - else: - raise ValueError(f"Resource {resource_id} not found or inaccessible") - except Exception as e: - raise ValueError(f"Resource {resource_id} not found in cache and could not be fetched: {str(e)}") + + if response.status_code == 200: + resource_data = response.json() + # Cache for future access + devrev_cache[resource_id] = json.dumps(resource_data) + return json.dumps(resource_data) + else: + raise ValueError(f"Resource {resource_id} not found or inaccessible (HTTP {response.status_code})") + + except Exception as e: + raise ValueError(f"Resource {resource_id} not found in cache and could not be fetched: {str(e)}") else: raise ValueError(f"Unknown resource URI: {uri}") From d840f8bb571a7c0fbe31305b7cf229573548189c Mon Sep 17 00:00:00 2001 From: Sara Date: Sat, 31 May 2025 15:48:01 -0400 Subject: [PATCH 04/17] switching over to fastmcp --- pyproject.toml | 11 +- src/devrev_mcp/__init__.py | 17 +- src/devrev_mcp/server.py | 414 +++++++++++++------ src/devrev_mcp/tools/__init__.py | 23 -- src/devrev_mcp/tools/base.py | 63 --- src/devrev_mcp/tools/create_work.py | 96 ----- src/devrev_mcp/tools/get_timeline_entries.py | 137 ------ src/devrev_mcp/tools/get_work.py | 72 ---- src/devrev_mcp/tools/search.py | 64 --- uv.lock | 215 +++++++++- 10 files changed, 516 insertions(+), 596 deletions(-) delete mode 100644 src/devrev_mcp/tools/__init__.py delete mode 100644 src/devrev_mcp/tools/base.py delete mode 100644 src/devrev_mcp/tools/create_work.py delete mode 100644 src/devrev_mcp/tools/get_timeline_entries.py delete mode 100644 src/devrev_mcp/tools/get_work.py delete mode 100644 src/devrev_mcp/tools/search.py diff --git a/pyproject.toml b/pyproject.toml index f9862a7..8120924 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,14 +4,19 @@ version = "0.1.1" description = "A MCP server project" readme = "README.md" requires-python = ">=3.11" -dependencies = [ "mcp>=1.0.0", "requests", "watchdog"] +dependencies = [ + "fastmcp>=2.0.0", + "requests", + "watchdog" +] + [[project.authors]] name = "Sunil Pandey" email = "sunil.pandey@devrev.ai" [build-system] -requires = [ "hatchling",] +requires = ["hatchling"] build-backend = "hatchling.build" [project.scripts] -devrev-mcp = "devrev_mcp:main" +devrev-mcp = "devrev_mcp:main_cli" diff --git a/src/devrev_mcp/__init__.py b/src/devrev_mcp/__init__.py index 38116a1..66decd6 100644 --- a/src/devrev_mcp/__init__.py +++ b/src/devrev_mcp/__init__.py @@ -2,15 +2,16 @@ Copyright (c) 2025 DevRev, Inc. SPDX-License-Identifier: MIT -DevRev MCP server package initialization. +DevRev MCP Server package. +This package provides a FastMCP-based server for interacting with DevRev APIs. """ -from . import server -import asyncio +from .server import main, mcp -def main(): - """Main entry point for the package.""" - asyncio.run(server.main()) +__version__ = "0.1.1" +__all__ = ["main", "mcp"] -# Optionally expose other important items at package level -__all__ = ['main', 'server'] +# Export the main function for the CLI entry point +def main_cli(): + """CLI entry point for the DevRev MCP server.""" + main() diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index dca272c..8902fbe 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -2,146 +2,316 @@ Copyright (c) 2025 DevRev, Inc. SPDX-License-Identifier: MIT -This module implements the MCP server for DevRev integration. +This module implements the FastMCP server for DevRev integration. """ import json import os -from pathlib import Path +from typing import Dict, Any -from mcp.server.models import InitializationOptions -import mcp.types as types -from mcp.server import NotificationOptions, Server -from pydantic import AnyUrl -import mcp.server.stdio +from fastmcp import FastMCP, Context +from mcp import types from .utils import make_devrev_request -from .tools import TOOLS, TOOL_MAP -from .debug import debug_error_handler -server = Server("devrev_mcp") +# Check debug mode and store state +DEBUG_ENABLED = os.environ.get("DRMCP_DEBUG") == "1" +DEBUG_MESSAGE = "๐Ÿ› DEBUG MODE ENABLED - sara wuz here" if DEBUG_ENABLED else "๐Ÿ› DEBUG MODE DISABLED - sara wuz here" + +# Create the FastMCP server +mcp = FastMCP( + name="devrev_mcp", + version="0.1.1", + description="DevRev MCP Server - Provides tools for interacting with DevRev API" +) # Store DevRev resources (works, comments, etc.) for resource access devrev_cache = {} -# Check debug mode and store state -DEBUG_ENABLED = os.environ.get("DRMCP_DEBUG") == "1" -DEBUG_MESSAGE = "๐Ÿ› DEBUG MODE ENABLED - sara wuz here" if DEBUG_ENABLED else "๐Ÿ› DEBUG MODE DISABLED - sara wuz here" +# @mcp.tool() +# async def search(query: str, namespace: str, ctx: Context) -> str: +# """ +# Search DevRev using the provided query. + +# Args: +# query: The search query string +# namespace: The namespace to search in (article, issue, ticket, part, dev_user) + +# Returns: +# JSON string containing search results +# """ +# if namespace not in ["article", "issue", "ticket", "part", "dev_user"]: +# raise ValueError(f"Invalid namespace '{namespace}'. Must be one of: article, issue, ticket, part, dev_user") + +# try: +# await ctx.info(f"Searching DevRev for '{query}' in namespace '{namespace}'") + +# response = make_devrev_request( +# "search.hybrid", +# {"query": query, "namespace": namespace} +# ) + +# if response.status_code != 200: +# error_text = response.text +# await ctx.error(f"Search failed with status {response.status_code}: {error_text}") +# raise ValueError(f"Search failed with status {response.status_code}: {error_text}") + +# search_results = response.json() +# await ctx.info(f"Search completed successfully with {len(search_results.get('results', []))} results") + +# return json.dumps(search_results, indent=2) + +# except Exception as e: +# await ctx.error(f"Search operation failed: {str(e)}") +# raise -# Initialize tools with cache access -for tool in TOOLS: - if hasattr(tool, 'set_cache'): - tool.set_cache(devrev_cache) +# @mcp.tool() +# async def create_object( +# type: str, +# title: str, +# applies_to_part: str, +# body: str = "", +# owned_by: list[str] = None, +# ctx: Context = None +# ) -> str: +# """ +# Create a new issue or ticket in DevRev. + +# Args: +# type: The type of object to create ("issue" or "ticket") +# title: The title/summary of the object +# applies_to_part: The part ID this object applies to +# body: The body/description of the object (optional) +# owned_by: List of user IDs who should own this object (optional) + +# Returns: +# JSON string containing the created object information +# """ +# if type not in ["issue", "ticket"]: +# raise ValueError(f"Invalid type '{type}'. Must be 'issue' or 'ticket'") + +# try: +# await ctx.info(f"Creating new {type}: {title}") + +# payload = { +# "type": type, +# "title": title, +# "applies_to_part": applies_to_part +# } + +# if body: +# payload["body"] = body +# if owned_by: +# payload["owned_by"] = owned_by + +# response = make_devrev_request("works.create", payload) + +# if response.status_code != 200: +# error_text = response.text +# await ctx.error(f"Failed to create {type}: HTTP {response.status_code} - {error_text}") +# raise ValueError(f"Failed to create {type} (HTTP {response.status_code}): {error_text}") + +# result_data = response.json() +# await ctx.info(f"Successfully created {type} with ID: {result_data.get('work', {}).get('id', 'unknown')}") + +# return json.dumps(result_data, indent=2) + +# except Exception as e: +# await ctx.error(f"Failed to create {type}: {str(e)}") +# raise -@server.list_tools() -@debug_error_handler -async def handle_list_tools() -> list[types.Tool]: - """ - List available tools. - Each tool specifies its arguments using JSON Schema validation. - """ - return [tool.to_mcp_tool() for tool in TOOLS] +# @mcp.tool() +# async def update_object( +# id: str, +# type: str, +# title: str = None, +# body: str = None, +# ctx: Context = None +# ) -> str: +# """ +# Update an existing issue or ticket in DevRev. + +# Args: +# id: The ID of the object to update +# type: The type of object ("issue" or "ticket") +# title: New title for the object (optional) +# body: New body/description for the object (optional) + +# Returns: +# JSON string containing the updated object information +# """ +# if type not in ["issue", "ticket"]: +# raise ValueError(f"Invalid type '{type}'. Must be 'issue' or 'ticket'") + +# if not title and not body: +# raise ValueError("At least one of 'title' or 'body' must be provided for update") + +# try: +# await ctx.info(f"Updating {type} {id}") + +# payload = { +# "id": id, +# "type": type +# } + +# if title: +# payload["title"] = title +# if body: +# payload["body"] = body + +# response = make_devrev_request("works.update", payload) + +# if response.status_code != 200: +# error_text = response.text +# await ctx.error(f"Failed to update {type}: HTTP {response.status_code} - {error_text}") +# raise ValueError(f"Failed to update {type} (HTTP {response.status_code}): {error_text}") + +# result_data = response.json() + +# # Update cache if we have this object cached +# if id in devrev_cache: +# del devrev_cache[id] +# await ctx.info(f"Cleared cache for updated object: {id}") + +# await ctx.info(f"Successfully updated {type}: {id}") +# return json.dumps(result_data, indent=2) + +# except Exception as e: +# await ctx.error(f"Failed to update {type}: {str(e)}") +# raise -@server.list_resources() -@debug_error_handler -async def handle_list_resources() -> list[types.Resource | types.ResourceTemplate]: +@mcp.tool() +async def get_object(id: str, ctx: Context) -> str: """ - List available resources and resource templates. - Resource templates allow dynamic access to DevRev objects by ID. + Get all information about a DevRev issue and ticket using its ID. + + Args: + id: The DevRev object ID + + Returns: + JSON string containing the object information """ - resources = [] - - # Add resource template for dynamic DevRev object access - resources.append( - types.ResourceTemplate( - uriTemplate="devrev://{id}", - name="DevRev Object", - description="Access any DevRev object (tickets, comments, issues, etc.) by its full DevRev ID", - mimeType="application/json" - ) - ) - - return resources - -@server.read_resource() -@debug_error_handler -async def handle_read_resource(uri: AnyUrl) -> str: + try: + await ctx.info(f"Fetching object {id} from DevRev") + + response = make_devrev_request("works.get", {"id": id}) + + if response.status_code != 200: + error_text = response.text + await ctx.error(f"Failed to get object {id}: HTTP {response.status_code} - {error_text}") + raise ValueError(f"Failed to get object {id} (HTTP {response.status_code}): {error_text}") + + result_data = response.json() + + # Cache the result + devrev_cache[id] = json.dumps(result_data, indent=2) + + await ctx.info(f"Successfully retrieved object: {id}") + return devrev_cache[id] + + except Exception as e: + await ctx.error(f"Failed to get object {id}: {str(e)}") + raise + +# Add dynamic resource access for DevRev objects +@mcp.resource( + uri="devrev://{id}", + description="Access any DevRev object (tickets, comments, issues, etc.) by its full DevRev ID.", + tags=["devrev_resource"] +) +async def get_devrev_resource(id: str, ctx: Context) -> str: """ - Read a specific resource by URI. - Supports the devrev://{id} template for dynamic access to DevRev objects. + Access any DevRev object (tickets, comments, issues, etc.) by its full DevRev ID. + + Args: + id: The DevRev object ID + + Returns: + JSON string containing the object data """ - uri_str = str(uri) - if uri_str.startswith("devrev://"): - resource_id = uri_str.replace("devrev://", "") - - # First check if already cached - if resource_id in devrev_cache: - return devrev_cache[resource_id] - - # If not cached, try to fetch based on DevRev ID structure - try: - # Determine resource type based on ID pattern - if ':ticket/' in resource_id or ':issue/' in resource_id: - # Work items (tickets/issues) - response = make_devrev_request("works.get", {"id": resource_id}) - elif ':comment/' in resource_id or ':change_event/' in resource_id: - # Timeline entries - these should already be cached from timeline tool - raise ValueError(f"Timeline entry {resource_id} not found in cache. Use get_timeline_entries tool first.") - elif ':part/' in resource_id: - # Parts - response = make_devrev_request("parts.get", {"id": resource_id}) - elif ':dev_user/' in resource_id: - # Dev users - response = make_devrev_request("dev-users.get", {"id": resource_id}) - else: - # Generic work item fallback - response = make_devrev_request("works.get", {"id": resource_id}) - - if response.status_code == 200: - resource_data = response.json() - # Cache for future access - devrev_cache[resource_id] = json.dumps(resource_data) - return json.dumps(resource_data) + try: + # Check cache first + if id in devrev_cache: + await ctx.info(f"Retrieved resource {id} from cache") + return devrev_cache[id] + + # If not cached, fetch using get_object tool logic + await ctx.info(f"Fetching resource {id} from DevRev API") + # Handle special cases for tickets and comments + if ":ticket/" in id: + if ":comment/" in id: + # For comments, use timeline-entries.get endpoint + await ctx.info(f"Fetching comment {id}") + response = make_devrev_request( + "timeline-entries.get", + {"id": id} + ) else: - raise ValueError(f"Resource {resource_id} not found or inaccessible (HTTP {response.status_code})") - - except Exception as e: - raise ValueError(f"Resource {resource_id} not found in cache and could not be fetched: {str(e)}") - else: - raise ValueError(f"Unknown resource URI: {uri}") - -@server.call_tool() -async def handle_call_tool( - name: str, arguments: dict | None -) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + # For tickets, first get the ticket details + ticket_id = f"TKT-{id.split(':ticket/')[1]}" + await ctx.info(f"Fetching ticket {ticket_id}") + response = make_devrev_request( + "works.get", + {"id": ticket_id} + ) + + # Then get all comments via timeline entries + timeline_response = make_devrev_request( + "timeline-entries.list", + {"object": ticket_id} + ) + + if timeline_response.status_code == 200: + # Merge timeline entries into ticket response + result = response.json() + result["timeline_entries"] = timeline_response.json().get("timeline_entries", []) + devrev_cache[id] = json.dumps(result, indent=2) + return devrev_cache[id] + + if response.status_code != 200: + error_text = response.text + await ctx.error(f"Failed to fetch {id}: HTTP {response.status_code} - {error_text}") + raise ValueError(f"Failed to fetch {id} (HTTP {response.status_code}): {error_text}") + + result = response.json() + devrev_cache[id] = json.dumps(result, indent=2) + return devrev_cache[id] + + return await get_object(id, ctx) + + except Exception as e: + await ctx.error(f"Failed to get resource {id}: {str(e)}") + raise ValueError(f"Resource {id} not found in cache and could not be fetched: {str(e)}") + +@mcp.tool( + name="get_timeline_entries", + description="Get all timeline entries for a DevRev ticket using its ID. :ticket/12345>", + tags=["timeline_entries"] +) +async def get_timeline_entries(id: str, ctx: Context) -> str: """ - Handle tool execution requests. - Tools can modify server state and notify clients of changes. + Get all timeline entries for a DevRev ticket using its ID. The API response provided by the + + Args: + id: The DevRev ticket ID - don:core:dvrv-us-1:devo/:ticket/12345 """ - # Route to appropriate tool handler - if name in TOOL_MAP: - tool = TOOL_MAP[name] - return await tool.execute(arguments) - else: - raise ValueError(f"Unknown tool: {name}") - -async def main(): - # Check if debug mode is enabled and print debug message - if DEBUG_ENABLED: - print(DEBUG_MESSAGE) - else: - print(DEBUG_MESSAGE) - - # Run the server using stdin/stdout streams - async with mcp.server.stdio.stdio_server() as (read_stream, write_stream): - await server.run( - read_stream, - write_stream, - InitializationOptions( - server_name="devrev_mcp", - server_version="0.1.1", - capabilities=server.get_capabilities( - notification_options=NotificationOptions(), - experimental_capabilities={}, - ), - ), - ) + try: + await ctx.info(f"Fetching timeline entries for ticket {id}") + + content_list = await ctx.read_resource(id) + if not content_list: + return "No timeline entries found" + + return content_list + except Exception as e: + return f"Failed to get timeline entries for ticket {id}: {str(e)}" + +def main(): + """Main entry point for the DevRev MCP server.""" + # Print debug message + print(DEBUG_MESSAGE) + + # Run the server + mcp.run() + +if __name__ == "__main__": + main() diff --git a/src/devrev_mcp/tools/__init__.py b/src/devrev_mcp/tools/__init__.py deleted file mode 100644 index bbcb50d..0000000 --- a/src/devrev_mcp/tools/__init__.py +++ /dev/null @@ -1,23 +0,0 @@ -""" -Tools package for DevRev MCP server. -This module automatically discovers and registers all available tools. -""" - -from .base import BaseTool -from .search import SearchTool -from .get_work import GetWorkTool -from .create_work import CreateWorkTool -from .get_timeline_entries import GetTimelineEntresTool - -# Registry of all available tools -TOOLS = [ - SearchTool(), - GetWorkTool(), - CreateWorkTool(), - GetTimelineEntresTool(), -] - -# Create a mapping for easy tool lookup by name -TOOL_MAP = {tool.name: tool for tool in TOOLS} - -__all__ = ['BaseTool', 'TOOLS', 'TOOL_MAP'] \ No newline at end of file diff --git a/src/devrev_mcp/tools/base.py b/src/devrev_mcp/tools/base.py deleted file mode 100644 index b4ea3aa..0000000 --- a/src/devrev_mcp/tools/base.py +++ /dev/null @@ -1,63 +0,0 @@ -""" -Base tool class for DevRev MCP server tools. -""" - -from abc import ABC, abstractmethod -from typing import Dict, Any, List - -import mcp.types as types -from ..debug import debug_error_handler - - -class BaseTool(ABC): - """ - Abstract base class for all DevRev MCP tools. - - Each tool must implement: - - name: The tool's unique identifier - - description: Human-readable description of what the tool does - - input_schema: JSON Schema for validating tool arguments - - execute: The actual tool logic - """ - - @property - @abstractmethod - def name(self) -> str: - """Return the tool's unique name.""" - pass - - @property - @abstractmethod - def description(self) -> str: - """Return a description of what this tool does.""" - pass - - @property - @abstractmethod - def input_schema(self) -> Dict[str, Any]: - """Return the JSON Schema for validating tool arguments.""" - pass - - @debug_error_handler - async def execute(self, arguments: Dict[str, Any] | None) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: - """ - Execute the tool with the given arguments. - This method is automatically wrapped with error handling. - """ - return await self._execute_impl(arguments) - - @abstractmethod - async def _execute_impl(self, arguments: Dict[str, Any] | None) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: - """ - Internal implementation of the tool logic. - Subclasses should implement this method instead of execute(). - """ - pass - - def to_mcp_tool(self) -> types.Tool: - """Convert this tool to an MCP Tool object for registration.""" - return types.Tool( - name=self.name, - description=self.description, - inputSchema=self.input_schema - ) \ No newline at end of file diff --git a/src/devrev_mcp/tools/create_work.py b/src/devrev_mcp/tools/create_work.py deleted file mode 100644 index 3c83c4b..0000000 --- a/src/devrev_mcp/tools/create_work.py +++ /dev/null @@ -1,96 +0,0 @@ -""" -Create work tool for DevRev MCP server. -""" - -import json -from typing import Dict, Any, List -import mcp.types as types -from .base import BaseTool -from ..utils import make_devrev_request - - -class CreateWorkTool(BaseTool): - """Tool for creating a new issue or ticket in DevRev.""" - - def __init__(self): - # Reference to the cache - will be set by the server - self.devrev_cache = None - - def set_cache(self, cache: Dict[str, str]): - """Set the cache reference from the server.""" - self.devrev_cache = cache - - @property - def name(self) -> str: - return "create_work" - - @property - def description(self) -> str: - return "Create a new isssue or ticket in DevRev" - - @property - def input_schema(self) -> Dict[str, Any]: - return { - "type": "object", - "properties": { - "type": {"type": "string", "enum": ["issue", "ticket"]}, - "title": {"type": "string"}, - "body": {"type": "string"}, - "applies_to_part": {"type": "string"}, - "owned_by": {"type": "array", "items": {"type": "string"}} - }, - "required": ["type", "title", "applies_to_part"], - } - - async def _execute_impl(self, arguments: Dict[str, Any] | None) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: - if not arguments: - raise ValueError("Missing arguments") - - # Mandatory fields - object_type = arguments.get("type") - if not object_type: - raise ValueError("Missing type parameter") - - title = arguments.get("title") - if not title: - raise ValueError("Missing title parameter") - - applies_to_part = arguments.get("applies_to_part") - if not applies_to_part: - raise ValueError("Missing applies_to_part parameter") - - # Optional fields - body = arguments.get("body", "") - owned_by = arguments.get("owned_by", []) - - response = make_devrev_request( - "works.create", - { - "type": object_type, - "title": title, - "body": body, - "applies_to_part": applies_to_part, - "owned_by": owned_by - } - ) - if response.status_code != 201: - error_text = response.text - return [ - types.TextContent( - type="text", - text=f"Create work failed with status {response.status_code}: {error_text}" - ) - ] - - created_work = response.json() - # Cache the created work data for resource access - if self.devrev_cache is not None and 'work' in created_work and 'id' in created_work['work']: - work_id = created_work['work']['id'] - self.devrev_cache[work_id] = json.dumps(created_work['work']) - - return [ - types.TextContent( - type="text", - text=f"Work created successfully: {created_work}" - ) - ] \ No newline at end of file diff --git a/src/devrev_mcp/tools/get_timeline_entries.py b/src/devrev_mcp/tools/get_timeline_entries.py deleted file mode 100644 index eaa18f7..0000000 --- a/src/devrev_mcp/tools/get_timeline_entries.py +++ /dev/null @@ -1,137 +0,0 @@ -""" -Get timeline entries tool for DevRev MCP server. -""" - -import json -from typing import Dict, Any, List -import mcp.types as types -from .base import BaseTool -from ..utils import make_devrev_request - - -class GetTimelineEntresTool(BaseTool): - """Tool for getting timeline entries for a DevRev object (ticket, issue, etc.).""" - - def __init__(self): - # Reference to the cache - will be set by the server - self.devrev_cache = None - - def set_cache(self, cache: Dict[str, str]): - """Set the cache reference from the server.""" - self.devrev_cache = cache - - @property - def name(self) -> str: - return "get_timeline_entries" - - @property - def description(self) -> str: - return "Get timeline entries for a DevRev object (ticket, issue, etc.)" - - @property - def input_schema(self) -> Dict[str, Any]: - return { - "type": "object", - "properties": { - "object_id": {"type": "string"}, - }, - "required": ["object_id"], - } - - async def _execute_impl(self, arguments: Dict[str, Any] | None) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: - if not arguments: - raise ValueError("Missing arguments") - - # Debug: check arguments type - if not isinstance(arguments, dict): - return [ - types.TextContent( - type="text", - text=f"Error: arguments is not a dict but {type(arguments)}: {arguments}" - ) - ] - - object_id = arguments.get("object_id") - if not object_id: - raise ValueError("Missing object_id parameter") - - try: - response = make_devrev_request( - "timeline-entries.list", - {"object": object_id} - ) - except Exception as e: - return [ - types.TextContent( - type="text", - text=f"Error making timeline request: {e}" - ) - ] - if response.status_code != 200: - error_text = response.text - return [ - types.TextContent( - type="text", - text=f"Get timeline entries failed with status {response.status_code}: {error_text}" - ) - ] - - timeline_data = response.json() - - # Cache individual timeline entries as resources and build summary - entry_summary = [] - entry_count = 0 - if 'timeline_entries' in timeline_data: - for i, entry in enumerate(timeline_data['timeline_entries']): - # Debug: check entry type - if not isinstance(entry, dict): - return [ - types.TextContent( - type="text", - text=f"Error: Entry {i} is not a dict but {type(entry)}: {entry}" - ) - ] - if 'id' in entry and self.devrev_cache is not None: - entry_id = entry['id'] - self.devrev_cache[entry_id] = json.dumps(entry) - entry_count += 1 - - # Add summary info for this entry - entry_info = { - 'id': entry_id, - 'type': entry.get('type', 'unknown'), - 'created_date': entry.get('created_date'), - 'visibility': entry.get('visibility', {}).get('label', 'unknown') if isinstance(entry.get('visibility'), dict) else entry.get('visibility', 'unknown') - } - - # Add type-specific summary info - if entry.get('type') == 'timeline_comment': - body_preview = entry.get('body', '')[:100] + ('...' if len(entry.get('body', '')) > 100 else '') - entry_info['body_preview'] = body_preview - entry_info['created_by'] = entry.get('created_by', {}).get('display_name', 'unknown') - - entry_summary.append(entry_info) - - summary_text = f"""Timeline entries for '{object_id}': -Total entries: {entry_count} -Entries cached as resources (access via devrev://): - -""" - - for i, entry in enumerate(entry_summary[:10]): # Show first 10 entries in summary - summary_text += f"{i+1}. {entry['id']} ({entry['type']}) - {entry.get('created_date', 'no date')}\n" - if 'body_preview' in entry: - summary_text += f" Preview: {entry['body_preview']}\n" - if 'created_by' in entry: - summary_text += f" By: {entry['created_by']}\n" - summary_text += "\n" - - if entry_count > 10: - summary_text += f"... and {entry_count - 10} more entries (all available as resources)\n" - - return [ - types.TextContent( - type="text", - text=summary_text - ) - ] \ No newline at end of file diff --git a/src/devrev_mcp/tools/get_work.py b/src/devrev_mcp/tools/get_work.py deleted file mode 100644 index 6b51085..0000000 --- a/src/devrev_mcp/tools/get_work.py +++ /dev/null @@ -1,72 +0,0 @@ -""" -Get work tool for DevRev MCP server. -""" - -import json -from typing import Dict, Any, List -import mcp.types as types -from .base import BaseTool -from ..utils import make_devrev_request - - -class GetWorkTool(BaseTool): - """Tool for getting information about a DevRev issue and ticket using its ID.""" - - def __init__(self): - # Reference to the cache - will be set by the server - self.devrev_cache = None - - def set_cache(self, cache: Dict[str, str]): - """Set the cache reference from the server.""" - self.devrev_cache = cache - - @property - def name(self) -> str: - return "get_work" - - @property - def description(self) -> str: - return "Get all information about a DevRev issue and ticket using its ID" - - @property - def input_schema(self) -> Dict[str, Any]: - return { - "type": "object", - "properties": { - "id": {"type": "string"}, - }, - "required": ["id"], - } - - async def _execute_impl(self, arguments: Dict[str, Any] | None) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: - if not arguments: - raise ValueError("Missing arguments") - - id = arguments.get("id") - if not id: - raise ValueError("Missing id parameter") - - response = make_devrev_request( - "works.get", - {"id": id} - ) - if response.status_code != 200: - error_text = response.text - return [ - types.TextContent( - type="text", - text=f"Get work failed with status {response.status_code}: {error_text}" - ) - ] - - object_info = response.json() - # Cache the work data for resource access - if self.devrev_cache is not None: - self.devrev_cache[id] = json.dumps(object_info) - - return [ - types.TextContent( - type="text", - text=f"Work information for '{id}':\n{object_info}" - ) - ] \ No newline at end of file diff --git a/src/devrev_mcp/tools/search.py b/src/devrev_mcp/tools/search.py deleted file mode 100644 index e3465a5..0000000 --- a/src/devrev_mcp/tools/search.py +++ /dev/null @@ -1,64 +0,0 @@ -""" -Search tool for DevRev MCP server. -""" - -from typing import Dict, Any, List -import mcp.types as types -from .base import BaseTool -from ..utils import make_devrev_request - - -class SearchTool(BaseTool): - """Tool for searching DevRev using the provided query.""" - - @property - def name(self) -> str: - return "search" - - @property - def description(self) -> str: - return "Search DevRev using the provided query" - - @property - def input_schema(self) -> Dict[str, Any]: - return { - "type": "object", - "properties": { - "query": {"type": "string"}, - "namespace": {"type": "string", "enum": ["article", "issue", "ticket", "part", "dev_user"]}, - }, - "required": ["query", "namespace"], - } - - async def _execute_impl(self, arguments: Dict[str, Any] | None) -> List[types.TextContent | types.ImageContent | types.EmbeddedResource]: - if not arguments: - raise ValueError("Missing arguments") - - query = arguments.get("query") - if not query: - raise ValueError("Missing query parameter") - - namespace = arguments.get("namespace") - if not namespace: - raise ValueError("Missing namespace parameter") - - response = make_devrev_request( - "search.hybrid", - {"query": query, "namespace": namespace} - ) - if response.status_code != 200: - error_text = response.text - return [ - types.TextContent( - type="text", - text=f"Search failed with status {response.status_code}: {error_text}" - ) - ] - - search_results = response.json() - return [ - types.TextContent( - type="text", - text=f"Search results for '{query}':\n{search_results}" - ) - ] \ No newline at end of file diff --git a/uv.lock b/uv.lock index 1793963..75833f0 100644 --- a/uv.lock +++ b/uv.lock @@ -113,18 +113,49 @@ name = "devrev-mcp" version = "0.1.1" source = { editable = "." } dependencies = [ - { name = "mcp" }, + { name = "fastmcp" }, { name = "requests" }, { name = "watchdog" }, ] [package.metadata] requires-dist = [ - { name = "mcp", specifier = ">=1.0.0" }, + { name = "fastmcp", specifier = ">=2.0.0" }, { name = "requests" }, { name = "watchdog" }, ] +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "fastmcp" +version = "2.5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "exceptiongroup" }, + { name = "httpx" }, + { name = "mcp" }, + { name = "openapi-pydantic" }, + { name = "python-dotenv" }, + { name = "rich" }, + { name = "typer" }, + { name = "websockets" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/20/cc/d2c0e63d2b34681bef4e077611dae662ea722add13a83dc4ae08b6e0fd23/fastmcp-2.5.2.tar.gz", hash = "sha256:761c92fb54f561136f631d7d98b4920152978f6f0a66a4cef689a7983fd05c8b", size = 1039189, upload-time = "2025-05-29T18:11:33.088Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3e/ac/caa94ff747e2136829ac2fea33b9583e086ca5431451751bcb2f773e087f/fastmcp-2.5.2-py3-none-any.whl", hash = "sha256:4ea46ef35c1308b369eff7c8a10e4c9639bed046fd646449c1227ac7c3856d83", size = 107502, upload-time = "2025-05-29T18:11:31.577Z" }, +] + [[package]] name = "h11" version = "0.14.0" @@ -149,7 +180,7 @@ wheels = [ [[package]] name = "httpx" -version = "0.28.0" +version = "0.28.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, @@ -157,9 +188,9 @@ dependencies = [ { name = "httpcore" }, { name = "idna" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/10/df/676b7cf674dd1bdc71a64ad393c89879f75e4a0ab8395165b498262ae106/httpx-0.28.0.tar.gz", hash = "sha256:0858d3bab51ba7e386637f22a61d8ccddaeec5f3fe4209da3a6168dbb91573e0", size = 141307, upload-time = "2024-11-28T14:54:56.977Z" } +sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/fb/a19866137577ba60c6d8b69498dc36be479b13ba454f691348ddf428f185/httpx-0.28.0-py3-none-any.whl", hash = "sha256:dc0b419a0cfeb6e8b34e85167c0da2671206f5095f1baa9663d23bcfd6b535fc", size = 73551, upload-time = "2024-11-28T14:54:55.141Z" }, + { url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" }, ] [[package]] @@ -180,21 +211,57 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, ] +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, +] + [[package]] name = "mcp" -version = "1.0.0" +version = "1.9.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "httpx" }, { name = "httpx-sse" }, { name = "pydantic" }, + { name = "pydantic-settings" }, + { name = "python-multipart" }, { name = "sse-starlette" }, { name = "starlette" }, + { name = "uvicorn", marker = "sys_platform != 'emscripten'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/97/de/a9ec0a1b6439f90ea59f89004bb2e7ec6890dfaeef809751d9e6577dca7e/mcp-1.0.0.tar.gz", hash = "sha256:dba51ce0b5c6a80e25576f606760c49a91ee90210fed805b530ca165d3bbc9b7", size = 82891, upload-time = "2024-11-25T14:27:35.616Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/03/77c49cce3ace96e6787af624611b627b2828f0dca0f8df6f330a10eea51e/mcp-1.9.2.tar.gz", hash = "sha256:3c7651c053d635fd235990a12e84509fe32780cd359a5bbef352e20d4d963c05", size = 333066, upload-time = "2025-05-29T14:42:17.76Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/89/900c0c8445ec001d3725e475fc553b0feb2e8a51be018f3bb7de51e683db/mcp-1.0.0-py3-none-any.whl", hash = "sha256:bbe70ffa3341cd4da78b5eb504958355c68381fb29971471cea1e642a2af5b8a", size = 36361, upload-time = "2024-11-25T14:27:34.367Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a6/8f5ee9da9f67c0fd8933f63d6105f02eabdac8a8c0926728368ffbb6744d/mcp-1.9.2-py3-none-any.whl", hash = "sha256:bc29f7fd67d157fef378f89a4210384f5fecf1168d0feb12d22929818723f978", size = 131083, upload-time = "2025-05-29T14:42:16.211Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "openapi-pydantic" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/02/2e/58d83848dd1a79cb92ed8e63f6ba901ca282c5f09d04af9423ec26c56fd7/openapi_pydantic-0.5.1.tar.gz", hash = "sha256:ff6835af6bde7a459fb93eb93bb92b8749b754fc6e51b2f1590a19dc3005ee0d", size = 60892, upload-time = "2025-01-08T19:29:27.083Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/cf/03675d8bd8ecbf4445504d8071adab19f5f993676795708e36402ab38263/openapi_pydantic-0.5.1-py3-none-any.whl", hash = "sha256:a3a09ef4586f5bd760a8df7f43028b60cafb6d9f61de2acba9574766255ab146", size = 96381, upload-time = "2025-01-08T19:29:25.275Z" }, ] [[package]] @@ -264,6 +331,47 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/df/c3/b15fb833926d91d982fde29c0624c9f225da743c7af801dace0d4e187e71/pydantic_core-2.27.1-cp313-none-win_arm64.whl", hash = "sha256:45cf8588c066860b623cd11c4ba687f8d7175d5f7ef65f7129df8a394c502de5", size = 1882983, upload-time = "2024-11-22T00:23:05.983Z" }, ] +[[package]] +name = "pydantic-settings" +version = "2.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "python-dotenv" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/67/1d/42628a2c33e93f8e9acbde0d5d735fa0850f3e6a2f8cb1eb6c40b9a732ac/pydantic_settings-2.9.1.tar.gz", hash = "sha256:c509bf79d27563add44e8446233359004ed85066cd096d8b510f715e6ef5d268", size = 163234, upload-time = "2025-04-18T16:44:48.265Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/5f/d6d641b490fd3ec2c4c13b4244d68deea3a1b970a97be64f34fb5504ff72/pydantic_settings-2.9.1-py3-none-any.whl", hash = "sha256:59b4f431b1defb26fe620c71a7d3968a710d719f5f4cdbbdb7926edeb770f6ef", size = 44356, upload-time = "2025-04-18T16:44:46.617Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7c/2d/c3338d48ea6cc0feb8446d8e6937e1408088a72a39937982cc6111d17f84/pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f", size = 4968581, upload-time = "2025-01-06T17:26:30.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8a/0b/9fcc47d19c48b59121088dd6da2488a49d5f72dacf8262e2790a1d2c7d15/pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c", size = 1225293, upload-time = "2025-01-06T17:26:25.553Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/88/2c/7bb1416c5620485aa793f2de31d3df393d3686aa8a8506d11e10e13c5baf/python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5", size = 39920, upload-time = "2025-03-25T10:14:56.835Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/18/98a99ad95133c6a6e2005fe89faedf294a748bd5dc803008059409ac9b1e/python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d", size = 20256, upload-time = "2025-03-25T10:14:55.034Z" }, +] + +[[package]] +name = "python-multipart" +version = "0.0.20" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/87/f44d7c9f274c7ee665a29b885ec97089ec5dc034c7f3fafa03da9e39a09e/python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13", size = 37158, upload-time = "2024-12-16T19:45:46.972Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/58/38b5afbc1a800eeea951b9285d3912613f2603bdf897a4ab0f4bd7f405fc/python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104", size = 24546, upload-time = "2024-12-16T19:45:44.423Z" }, +] + [[package]] name = "requests" version = "2.32.3" @@ -279,6 +387,28 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" }, ] +[[package]] +name = "rich" +version = "14.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a1/53/830aa4c3066a8ab0ae9a9955976fb770fe9c6102117c8ec4ab3ea62d89e8/rich-14.0.0.tar.gz", hash = "sha256:82f1bc23a6a21ebca4ae0c45af9bdbc492ed20231dcb63f297d6d1021a9d5725", size = 224078, upload-time = "2025-03-30T14:15:14.23Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/9b/63f4c7ebc259242c89b3acafdb37b41d1185c07ff0011164674e9076b491/rich-14.0.0-py3-none-any.whl", hash = "sha256:1c9491e1951aac09caffd42f448ee3d04e58923ffe14993f6e83068dc395d7e0", size = 243229, upload-time = "2025-03-30T14:15:12.283Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + [[package]] name = "sniffio" version = "1.3.1" @@ -314,6 +444,21 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/96/00/2b325970b3060c7cecebab6d295afe763365822b1306a12eeab198f74323/starlette-0.41.3-py3-none-any.whl", hash = "sha256:44cedb2b7c77a9de33a8b74b2b90e9f50d11fcf25d8270ea525ad71a25374ff7", size = 73225, upload-time = "2024-11-18T19:45:02.027Z" }, ] +[[package]] +name = "typer" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c5/8c/7d682431efca5fd290017663ea4588bf6f2c6aad085c7f108c5dbc316e70/typer-0.16.0.tar.gz", hash = "sha256:af377ffaee1dbe37ae9440cb4e8f11686ea5ce4e9bae01b84ae7c63b87f1dd3b", size = 102625, upload-time = "2025-05-26T14:30:31.824Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/42/3efaf858001d2c2913de7f354563e3a3a2f0decae3efe98427125a8f441e/typer-0.16.0-py3-none-any.whl", hash = "sha256:1f79bed11d4d02d4310e3c1b7ba594183bcedb0ac73b27a9e5f28f6fb5b98855", size = 46317, upload-time = "2025-05-26T14:30:30.523Z" }, +] + [[package]] name = "typing-extensions" version = "4.12.2" @@ -323,6 +468,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/26/9f/ad63fc0248c5379346306f8668cda6e2e2e9c95e01216d2b8ffd9ff037d0/typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d", size = 37438, upload-time = "2024-06-07T18:52:13.582Z" }, ] +[[package]] +name = "typing-inspection" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" }, +] + [[package]] name = "urllib3" version = "2.2.3" @@ -371,3 +528,45 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" }, { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, ] + +[[package]] +name = "websockets" +version = "15.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/32/18fcd5919c293a398db67443acd33fde142f283853076049824fc58e6f75/websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431", size = 175423, upload-time = "2025-03-05T20:01:56.276Z" }, + { url = "https://files.pythonhosted.org/packages/76/70/ba1ad96b07869275ef42e2ce21f07a5b0148936688c2baf7e4a1f60d5058/websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57", size = 173082, upload-time = "2025-03-05T20:01:57.563Z" }, + { url = "https://files.pythonhosted.org/packages/86/f2/10b55821dd40eb696ce4704a87d57774696f9451108cff0d2824c97e0f97/websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905", size = 173330, upload-time = "2025-03-05T20:01:59.063Z" }, + { url = "https://files.pythonhosted.org/packages/a5/90/1c37ae8b8a113d3daf1065222b6af61cc44102da95388ac0018fcb7d93d9/websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562", size = 182878, upload-time = "2025-03-05T20:02:00.305Z" }, + { url = "https://files.pythonhosted.org/packages/8e/8d/96e8e288b2a41dffafb78e8904ea7367ee4f891dafc2ab8d87e2124cb3d3/websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792", size = 181883, upload-time = "2025-03-05T20:02:03.148Z" }, + { url = "https://files.pythonhosted.org/packages/93/1f/5d6dbf551766308f6f50f8baf8e9860be6182911e8106da7a7f73785f4c4/websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413", size = 182252, upload-time = "2025-03-05T20:02:05.29Z" }, + { url = "https://files.pythonhosted.org/packages/d4/78/2d4fed9123e6620cbf1706c0de8a1632e1a28e7774d94346d7de1bba2ca3/websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8", size = 182521, upload-time = "2025-03-05T20:02:07.458Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/66d4c1b444dd1a9823c4a81f50231b921bab54eee2f69e70319b4e21f1ca/websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3", size = 181958, upload-time = "2025-03-05T20:02:09.842Z" }, + { url = "https://files.pythonhosted.org/packages/08/ff/e9eed2ee5fed6f76fdd6032ca5cd38c57ca9661430bb3d5fb2872dc8703c/websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf", size = 181918, upload-time = "2025-03-05T20:02:11.968Z" }, + { url = "https://files.pythonhosted.org/packages/d8/75/994634a49b7e12532be6a42103597b71098fd25900f7437d6055ed39930a/websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85", size = 176388, upload-time = "2025-03-05T20:02:13.32Z" }, + { url = "https://files.pythonhosted.org/packages/98/93/e36c73f78400a65f5e236cd376713c34182e6663f6889cd45a4a04d8f203/websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065", size = 176828, upload-time = "2025-03-05T20:02:14.585Z" }, + { url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" }, + { url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" }, + { url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" }, + { url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" }, + { url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" }, + { url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" }, + { url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" }, + { url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" }, + { url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" }, + { url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" }, + { url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" }, + { url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" }, + { url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" }, + { url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" }, + { url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" }, + { url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" }, + { url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" }, +] From 92f0025e24eec8e90c3f78cbbb212054dba8f245 Mon Sep 17 00:00:00 2001 From: Sara Date: Sun, 1 Jun 2025 14:30:58 -0400 Subject: [PATCH 05/17] has it working --- CLAUDE.md | 1 + src/devrev_mcp/resources/__init__.py | 5 + src/devrev_mcp/resources/artifact.py | 65 ++ src/devrev_mcp/resources/ticket.py | 117 +++ src/devrev_mcp/resources/timeline_entry.py | 76 ++ src/devrev_mcp/server.py | 745 +++++++++++++------ src/devrev_mcp/tools/__init__.py | 5 + src/devrev_mcp/tools/create_object.py | 67 ++ src/devrev_mcp/tools/get_object.py | 46 ++ src/devrev_mcp/tools/get_ticket.py | 130 ++++ src/devrev_mcp/tools/get_timeline_entries.py | 264 +++++++ src/devrev_mcp/tools/search.py | 135 ++++ src/devrev_mcp/tools/update_object.py | 74 ++ uv.lock | 2 +- 14 files changed, 1507 insertions(+), 225 deletions(-) create mode 100644 CLAUDE.md create mode 100644 src/devrev_mcp/resources/__init__.py create mode 100644 src/devrev_mcp/resources/artifact.py create mode 100644 src/devrev_mcp/resources/ticket.py create mode 100644 src/devrev_mcp/resources/timeline_entry.py create mode 100644 src/devrev_mcp/tools/__init__.py create mode 100644 src/devrev_mcp/tools/create_object.py create mode 100644 src/devrev_mcp/tools/get_object.py create mode 100644 src/devrev_mcp/tools/get_ticket.py create mode 100644 src/devrev_mcp/tools/get_timeline_entries.py create mode 100644 src/devrev_mcp/tools/search.py create mode 100644 src/devrev_mcp/tools/update_object.py diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..fb3d387 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1 @@ +- Reference https://gofastmcp.com/llms.txt \ No newline at end of file diff --git a/src/devrev_mcp/resources/__init__.py b/src/devrev_mcp/resources/__init__.py new file mode 100644 index 0000000..3a31bfd --- /dev/null +++ b/src/devrev_mcp/resources/__init__.py @@ -0,0 +1,5 @@ +""" +DevRev MCP Resources + +This module contains specialized resource handlers for different DevRev object types. +""" \ No newline at end of file diff --git a/src/devrev_mcp/resources/artifact.py b/src/devrev_mcp/resources/artifact.py new file mode 100644 index 0000000..2c06fd8 --- /dev/null +++ b/src/devrev_mcp/resources/artifact.py @@ -0,0 +1,65 @@ +""" +DevRev Artifact Resource Handler + +Provides specialized resource access for DevRev artifacts with metadata and download URLs. +""" + +import json +from fastmcp import Context +from ..utils import make_devrev_request +from ..debug import debug_error_handler + + +@debug_error_handler +async def artifact(artifact_id: str, ctx: Context, devrev_cache: dict) -> str: + """ + Access DevRev artifact metadata. + + Args: + artifact_id: The DevRev artifact ID + ctx: FastMCP context + devrev_cache: Cache dictionary for storing results + + Returns: + JSON string containing the artifact metadata + """ + try: + cache_key = f"artifact:{artifact_id}" + + # Check cache first + if cache_key in devrev_cache: + await ctx.info(f"Retrieved artifact {artifact_id} from cache") + return devrev_cache[cache_key] + + await ctx.info(f"Fetching artifact {artifact_id} from DevRev API") + + # For artifacts, use artifacts.get endpoint + response = make_devrev_request( + "artifacts.get", + {"id": artifact_id} + ) + + if response.status_code != 200: + error_text = response.text + await ctx.error(f"Failed to fetch artifact {artifact_id}: HTTP {response.status_code} - {error_text}") + raise ValueError(f"Failed to fetch artifact {artifact_id} (HTTP {response.status_code}): {error_text}") + + result = response.json() + + # Add navigation links to timeline entry (artifacts belong to timeline entries) + # Note: We'd need to determine the timeline entry ID from the artifact context + # For now, adding a placeholder structure that could be populated based on API response + result["links"] = { + "timeline_entry": "devrev://timeline-entries/{timeline_entry_id}", # Would need actual ID + "note": "Artifact belongs to a specific timeline entry, which belongs to a ticket" + } + + # Cache the result + devrev_cache[cache_key] = json.dumps(result, indent=2) + await ctx.info(f"Successfully retrieved and cached artifact: {artifact_id}") + + return devrev_cache[cache_key] + + except Exception as e: + await ctx.error(f"Failed to get artifact resource {artifact_id}: {str(e)}") + raise ValueError(f"Artifact resource {artifact_id} not found: {str(e)}") \ No newline at end of file diff --git a/src/devrev_mcp/resources/ticket.py b/src/devrev_mcp/resources/ticket.py new file mode 100644 index 0000000..09ba032 --- /dev/null +++ b/src/devrev_mcp/resources/ticket.py @@ -0,0 +1,117 @@ +""" +DevRev Ticket Resource Handler + +Provides specialized resource access for DevRev tickets with enriched timeline and artifact data. +""" + +import json +from fastmcp import Context +from ..utils import make_devrev_request +from ..debug import debug_error_handler + + +async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: + """ + Access DevRev ticket details with enriched timeline entries and artifact data. + + Args: + ticket_id: The DevRev ticket ID (e.g., 12345 for TKT-12345) + ctx: FastMCP context + devrev_cache: Cache dictionary for storing results + + Returns: + JSON string containing the ticket data with timeline entries and artifacts + """ + try: + await ctx.info(f"[DEBUG] ticket() called with ticket_id: {ticket_id}") + + # Convert simple ID to TKT- format for API calls + if ticket_id.upper().startswith("TKT-"): + # Extract numeric part and reformat + numeric_id = ticket_id[4:] # Remove TKT- or tkt- + normalized_id = f"TKT-{numeric_id}" + else: + normalized_id = f"TKT-{ticket_id}" + cache_key = f"ticket:{ticket_id}" + + await ctx.info(f"[DEBUG] normalized_id: {normalized_id}, cache_key: {cache_key}") + + # Check cache first + if cache_key in devrev_cache: + await ctx.info(f"Retrieved ticket {normalized_id} from cache") + return devrev_cache[cache_key] + + await ctx.info(f"Fetching ticket {normalized_id} from DevRev API") + + # Get ticket details + response = make_devrev_request("works.get", {"id": normalized_id}) + + if response.status_code != 200: + error_text = response.text + await ctx.error(f"Failed to fetch ticket {normalized_id}: HTTP {response.status_code} - {error_text}") + raise ValueError(f"Failed to fetch ticket {normalized_id} (HTTP {response.status_code}): {error_text}") + + result = response.json() + await ctx.info(f"[DEBUG] API response structure: {list(result.keys()) if isinstance(result, dict) else type(result)}") + + # Extract the work object from the API response + if isinstance(result, dict) and "work" in result: + result = result["work"] + + # Get timeline entries for the ticket + try: + timeline_response = make_devrev_request( + "timeline-entries.list", + {"object": normalized_id} + ) + + if timeline_response.status_code == 200: + timeline_data = timeline_response.json() + timeline_entries = timeline_data.get("timeline_entries", []) + result["timeline_entries"] = timeline_entries + await ctx.info(f"Added {len(timeline_entries)} timeline entries to ticket {normalized_id}") + + # Extract and gather artifact data from timeline entries + artifacts = [] + for entry in timeline_entries: + if "artifacts" in entry: + for artifact_id in entry["artifacts"]: + try: + artifact_response = make_devrev_request( + "artifacts.get", + {"id": artifact_id} + ) + if artifact_response.status_code == 200: + artifact_data = artifact_response.json() + artifacts.append(artifact_data) + except Exception as e: + await ctx.warning(f"Could not fetch artifact {artifact_id}: {str(e)}") + + result["artifacts"] = artifacts + await ctx.info(f"Added {len(artifacts)} artifacts to ticket {normalized_id}") + + else: + await ctx.warning(f"Could not fetch timeline entries for ticket {normalized_id}") + result["timeline_entries"] = [] + result["artifacts"] = [] + except Exception as e: + await ctx.warning(f"Error fetching timeline entries for ticket {normalized_id}: {str(e)}") + result["timeline_entries"] = [] + result["artifacts"] = [] + + # Add navigation links + result["links"] = { + "timeline": f"devrev://tickets/{ticket_id}/timeline", + "artifacts": f"devrev://tickets/{ticket_id}/artifacts" + } + + # Cache the enriched result + devrev_cache[cache_key] = json.dumps(result, indent=2) + await ctx.info(f"Successfully retrieved and cached ticket: {normalized_id}") + + return devrev_cache[cache_key] + + except Exception as e: + await ctx.error(f"Failed to get ticket resource {ticket_id}: {str(e)}") + # Return empty JSON object instead of raising exception + return json.dumps({"error": f"Ticket resource {ticket_id} not found: {str(e)}"}, indent=2) \ No newline at end of file diff --git a/src/devrev_mcp/resources/timeline_entry.py b/src/devrev_mcp/resources/timeline_entry.py new file mode 100644 index 0000000..b912ac9 --- /dev/null +++ b/src/devrev_mcp/resources/timeline_entry.py @@ -0,0 +1,76 @@ +""" +DevRev Timeline Entry Resource Handler + +Provides specialized resource access for DevRev timeline entries with conversation data. +""" + +import json +from fastmcp import Context +from ..utils import make_devrev_request +from ..debug import debug_error_handler + + +@debug_error_handler +async def timeline_entry(timeline_id: str, ctx: Context, devrev_cache: dict) -> str: + """ + Access specific timeline entry details. + + Args: + timeline_id: The DevRev timeline entry ID (full don: format) + ctx: FastMCP context + devrev_cache: Cache dictionary for storing results + + Returns: + JSON string containing the timeline entry data + """ + try: + cache_key = f"timeline:{timeline_id}" + + # Check cache first + if cache_key in devrev_cache: + await ctx.info(f"Retrieved timeline entry {timeline_id} from cache") + return devrev_cache[cache_key] + + await ctx.info(f"Fetching timeline entry {timeline_id} from DevRev API") + + # For timeline entries, use timeline-entries.get endpoint + response = make_devrev_request( + "timeline-entries.get", + {"id": timeline_id} + ) + + if response.status_code != 200: + error_text = response.text + await ctx.error(f"Failed to fetch timeline entry {timeline_id}: HTTP {response.status_code} - {error_text}") + raise ValueError(f"Failed to fetch timeline entry {timeline_id} (HTTP {response.status_code}): {error_text}") + + result = response.json() + + # Add navigation links + # Extract ticket ID from the timeline entry if available + ticket_id = None + if "object" in result: + object_id = result["object"] + if "TKT-" in object_id: + ticket_id = object_id.replace("TKT-", "") + + links = {} + if ticket_id: + links["ticket"] = f"devrev://tickets/{ticket_id}" + links["ticket_timeline"] = f"devrev://tickets/{ticket_id}/timeline" + + # Add links to artifacts if any are attached + if "artifacts" in result and result["artifacts"]: + links["artifacts"] = [f"devrev://artifacts/{artifact_id}" for artifact_id in result["artifacts"]] + + result["links"] = links + + # Cache the result + devrev_cache[cache_key] = json.dumps(result, indent=2) + await ctx.info(f"Successfully retrieved and cached timeline entry: {timeline_id}") + + return devrev_cache[cache_key] + + except Exception as e: + await ctx.error(f"Failed to get timeline resource {timeline_id}: {str(e)}") + raise ValueError(f"Timeline resource {timeline_id} not found: {str(e)}") \ No newline at end of file diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index fb7ab9b..8c520c6 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -5,13 +5,22 @@ This module implements the FastMCP server for DevRev integration. """ -import json import os from typing import Dict, Any from fastmcp import FastMCP, Context from mcp import types -from .utils import make_devrev_request + +# Import modular resources and tools +from .resources.ticket import ticket as ticket_resource +from .resources.timeline_entry import timeline_entry as timeline_entry_resource +from .resources.artifact import artifact as artifact_resource +from .tools.get_object import get_object as get_object_tool +from .tools.get_timeline_entries import get_timeline_entries as get_timeline_entries_tool +from .tools.get_ticket import get_ticket as get_ticket_tool +from .tools.search import search as search_tool +from .tools.create_object import create_object as create_object_tool +from .tools.update_object import update_object as update_object_tool # Check debug mode and store state DEBUG_ENABLED = os.environ.get("DRMCP_DEBUG") == "1" @@ -27,200 +36,509 @@ # Store DevRev resources (works, comments, etc.) for resource access devrev_cache = {} -# @mcp.tool() -# async def search(query: str, namespace: str, ctx: Context) -> str: -# """ -# Search DevRev using the provided query. - -# Args: -# query: The search query string -# namespace: The namespace to search in (article, issue, ticket, part, dev_user) - -# Returns: -# JSON string containing search results -# """ -# if namespace not in ["article", "issue", "ticket", "part", "dev_user"]: -# raise ValueError(f"Invalid namespace '{namespace}'. Must be one of: article, issue, ticket, part, dev_user") - -# try: -# await ctx.info(f"Searching DevRev for '{query}' in namespace '{namespace}'") - -# response = make_devrev_request( -# "search.hybrid", -# {"query": query, "namespace": namespace} -# ) - -# if response.status_code != 200: -# error_text = response.text -# await ctx.error(f"Search failed with status {response.status_code}: {error_text}") -# raise ValueError(f"Search failed with status {response.status_code}: {error_text}") - -# search_results = response.json() -# await ctx.info(f"Search completed successfully with {len(search_results.get('results', []))} results") - -# return json.dumps(search_results, indent=2) +@mcp.tool( + name="search", + description="Search DevRev objects using hybrid search. Supports natural language queries across tickets, issues, articles, parts, and users. Returns enriched results with metadata, ownership, status, and organizational context for efficient triage and analysis.", + tags=["search", "devrev", "tickets", "issues", "articles", "hybrid-search"] +) +async def search(query: str, namespace: str, ctx: Context) -> str: + """ + Search DevRev using the provided query and return parsed, useful information. + + Args: + query: The search query string + namespace: The namespace to search in (article, issue, ticket, part, dev_user) + + Returns: + JSON string containing parsed search results with key information + """ + return await search_tool(query, namespace, ctx) + +@mcp.tool( + name="create_object", + description="Create new DevRev tickets or issues with full metadata support. Supports both customer-facing tickets and internal issues with proper assignment, categorization, and detailed descriptions for workflow automation.", + tags=["create", "devrev", "tickets", "issues", "workflow", "automation"] +) +async def create_object( + type: str, + title: str, + applies_to_part: str, + body: str = "", + owned_by: list[str] = None, + ctx: Context = None +) -> str: + """ + Create a new issue or ticket in DevRev. + + Args: + type: The type of object to create ("issue" or "ticket") + title: The title/summary of the object + applies_to_part: The part ID this object applies to + body: The body/description of the object (optional) + owned_by: List of user IDs who should own this object (optional) + + Returns: + JSON string containing the created object information + """ + return await create_object_tool(type, title, applies_to_part, body, owned_by, ctx) + +@mcp.tool( + name="update_object", + description="Update existing DevRev tickets or issues with new information, descriptions, or titles. Maintains object history and audit trails while allowing incremental updates as investigations progress.", + tags=["update", "devrev", "tickets", "issues", "maintenance", "audit"] +) +async def update_object( + id: str, + type: str, + title: str = None, + body: str = None, + ctx: Context = None +) -> str: + """ + Update an existing issue or ticket in DevRev. + + Args: + id: The ID of the object to update + type: The type of object ("issue" or "ticket") + title: New title for the object (optional) + body: New body/description for the object (optional) + + Returns: + JSON string containing the updated object information + """ + return await update_object_tool(id, type, title, body, ctx, devrev_cache) + +@mcp.tool( + name="get_object", + description="Retrieve comprehensive information about any DevRev object including tickets, issues, parts, and users. Returns complete metadata, relationships, assignment details, and history for thorough analysis and investigation.", + tags=["retrieve", "devrev", "objects", "metadata", "investigation", "analysis"] +) +async def get_object(id: str, ctx: Context) -> str: + """ + Get all information about a DevRev issue and ticket using its ID. + + Args: + id: The DevRev object ID -# except Exception as e: -# await ctx.error(f"Search operation failed: {str(e)}") -# raise + Returns: + JSON string containing the object information + """ + return await get_object_tool(id, ctx, devrev_cache) + +# Specialized resource handlers for different DevRev object types -# @mcp.tool() -# async def create_object( -# type: str, -# title: str, -# applies_to_part: str, -# body: str = "", -# owned_by: list[str] = None, -# ctx: Context = None -# ) -> str: -# """ -# Create a new issue or ticket in DevRev. - -# Args: -# type: The type of object to create ("issue" or "ticket") -# title: The title/summary of the object -# applies_to_part: The part ID this object applies to -# body: The body/description of the object (optional) -# owned_by: List of user IDs who should own this object (optional) - -# Returns: -# JSON string containing the created object information -# """ -# if type not in ["issue", "ticket"]: -# raise ValueError(f"Invalid type '{type}'. Must be 'issue' or 'ticket'") - -# try: -# await ctx.info(f"Creating new {type}: {title}") +@mcp.resource( + uri="devrev://tickets/{ticket_id}", + description="Access comprehensive DevRev ticket information with navigation links to related resources. Includes customer details, status progression, assignment history, and navigation to timeline and artifacts.", + tags=["ticket", "devrev", "customer-support", "navigation"] +) +async def ticket(ticket_id: str, ctx: Context) -> str: + """ + Access DevRev ticket details with navigation links. + + Args: + ticket_id: The DevRev ticket ID (e.g., 12345 for TKT-12345) + + Returns: + JSON string containing the ticket data with navigation links + """ + result = await ticket_resource(ticket_id, ctx, devrev_cache) + + # Debug: Log the result details + await ctx.info(f"ticket_resource returned result type: {type(result)}, length: {len(result) if result else 0}") + if result: + await ctx.info(f"Result preview: {repr(result[:100])}") + + # Debug: Check if result is empty + if not result: + await ctx.error(f"ticket_resource returned empty result for ticket_id: {ticket_id}") + raise ValueError(f"Empty result from ticket_resource for ticket {ticket_id}") + + # Parse the result and add navigation links + import json + ticket_data = json.loads(result) + ticket_data["links"] = { + "timeline": f"devrev://tickets/{ticket_id}/timeline", + "artifacts": f"devrev://tickets/{ticket_id}/artifacts" + } + + # Return JSON string as expected by MCP framework + return json.dumps(ticket_data, indent=2) + +@mcp.resource( + uri="devrev://tickets/{ticket_id}/timeline", + description="Access enriched timeline for a ticket with customer context, conversation flow, and artifacts. Returns token-efficient structured format focusing on support workflow.", + tags=["timeline", "enriched", "devrev", "conversation", "artifacts"] +) +async def ticket_timeline(ticket_id: str, ctx: Context) -> str: + """ + Access enriched timeline for a ticket with structured conversation format. + + Args: + ticket_id: The DevRev ticket ID (e.g., 12345 for TKT-12345) + + Returns: + JSON string containing enriched timeline with customer context and conversation flow + """ + from .utils import make_devrev_request + import json + + try: + # Normalize ticket ID to handle various formats - extract just the number then format properly + if ticket_id.upper().startswith("TKT-"): + # Extract numeric part and reformat + numeric_id = ticket_id[4:] # Remove TKT- or tkt- + normalized_id = f"TKT-{numeric_id}" + else: + normalized_id = f"TKT-{ticket_id}" -# payload = { -# "type": type, -# "title": title, -# "applies_to_part": applies_to_part -# } + # Get ticket details for customer and workspace info + ticket_response = make_devrev_request("works.get", {"id": normalized_id}) + if ticket_response.status_code != 200: + raise ValueError(f"Failed to fetch ticket {normalized_id}") -# if body: -# payload["body"] = body -# if owned_by: -# payload["owned_by"] = owned_by + ticket_data = ticket_response.json() + work = ticket_data.get("work", {}) -# response = make_devrev_request("works.create", payload) + # Get timeline entries + timeline_response = make_devrev_request( + "timeline-entries.list", + {"object": normalized_id} + ) -# if response.status_code != 200: -# error_text = response.text -# await ctx.error(f"Failed to create {type}: HTTP {response.status_code} - {error_text}") -# raise ValueError(f"Failed to create {type} (HTTP {response.status_code}): {error_text}") + if timeline_response.status_code != 200: + raise ValueError(f"Failed to fetch timeline for {normalized_id}") -# result_data = response.json() -# await ctx.info(f"Successfully created {type} with ID: {result_data.get('work', {}).get('id', 'unknown')}") + timeline_data = timeline_response.json() + all_entries = timeline_data.get("timeline_entries", []) -# return json.dumps(result_data, indent=2) + await ctx.info(f"DEBUG: Found {len(all_entries)} timeline entries for {normalized_id}") -# except Exception as e: -# await ctx.error(f"Failed to create {type}: {str(e)}") -# raise - -# @mcp.tool() -# async def update_object( -# id: str, -# type: str, -# title: str = None, -# body: str = None, -# ctx: Context = None -# ) -> str: -# """ -# Update an existing issue or ticket in DevRev. - -# Args: -# id: The ID of the object to update -# type: The type of object ("issue" or "ticket") -# title: New title for the object (optional) -# body: New body/description for the object (optional) - -# Returns: -# JSON string containing the updated object information -# """ -# if type not in ["issue", "ticket"]: -# raise ValueError(f"Invalid type '{type}'. Must be 'issue' or 'ticket'") - -# if not title and not body: -# raise ValueError("At least one of 'title' or 'body' must be provided for update") - -# try: -# await ctx.info(f"Updating {type} {id}") + # Extract customer information + customer_info = {} + created_by = work.get("created_by", {}) + if created_by: + customer_info = { + "name": created_by.get("display_name", "Unknown"), + "email": created_by.get("email", ""), + "type": "customer" if created_by.get("type") == "user" else "system" + } -# payload = { -# "id": id, -# "type": type -# } + # Build enriched schema + result = { + "summary": { + "ticket_id": normalized_id, + "customer": customer_info.get("email", customer_info.get("name", "Unknown")), + "workspace": work.get("owned_by", [{}])[0].get("display_name", "Unknown Workspace") if work.get("owned_by") else "Unknown Workspace", + "subject": work.get("title", "No title"), + "current_stage": work.get("stage", {}).get("name", "unknown"), + "created_date": work.get("created_date"), + "total_artifacts": 0 + }, + "conversation_thread": [], + "key_events": [], + "all_artifacts": [] + } -# if title: -# payload["title"] = title -# if body: -# payload["body"] = body + # Process timeline entries into conversation and events + conversation_seq = 1 + artifacts_found = {} # artifact_id -> artifact_info dict -# response = make_devrev_request("works.update", payload) + for entry in all_entries: + entry_type = entry.get("type", "") + timestamp = entry.get("created_date", "") + + # Handle conversation entries (comments) + if entry_type == "timeline_comment": + body = entry.get("body", "") + author = entry.get("created_by", {}) + + # Determine speaker type + speaker_type = "support" + if author.get("email") == customer_info.get("email"): + speaker_type = "customer" + elif "system" in author.get("display_name", "").lower(): + speaker_type = "system" + + conversation_entry = { + "seq": conversation_seq, + "timestamp": timestamp, + "event_type": entry_type, + "speaker": { + "name": author.get("display_name", author.get("email", "Unknown")), + "type": speaker_type + }, + "message": body, + "artifacts": [] + } + + # Add artifacts if present + if entry.get("artifacts"): + for artifact in entry["artifacts"]: + artifact_id = artifact.get("id") + artifact_info = { + "id": artifact_id, + "display_id": artifact.get("display_id"), + "type": artifact.get("file", {}).get("type", "unknown"), + "attached_to_message": conversation_seq, + "resource_uri": f"devrev://artifacts/{artifact_id}" + } + conversation_entry["artifacts"].append(artifact_info) + artifacts_found[artifact_id] = artifact_info + + # Add timeline entry navigation link + entry_id = entry.get("id", "").split("/")[-1] if entry.get("id") else "" + if entry_id: + conversation_entry["timeline_entry_uri"] = f"devrev://tickets/{ticket_id}/timeline/{entry_id}" + + result["conversation_thread"].append(conversation_entry) + conversation_seq += 1 + + # Update last message timestamps + if speaker_type == "customer": + result["summary"]["last_customer_message"] = timestamp + elif speaker_type == "support": + result["summary"]["last_support_response"] = timestamp + + # Handle key events + elif entry_type in ["work_created", "stage_updated", "part_suggested", "work_updated"]: + event_info = { + "type": entry_type.replace("work_", "").replace("_", " "), + "event_type": entry_type, + "timestamp": timestamp + } + + # Add context for stage updates + if entry_type == "stage_updated" and entry.get("stage_updated"): + stage_info = entry["stage_updated"] + event_info["from_stage"] = stage_info.get("old_stage", {}).get("name") + event_info["to_stage"] = stage_info.get("new_stage", {}).get("name") + + # Add author information if available + author = entry.get("created_by", {}) + if author: + event_info["actor"] = { + "name": author.get("display_name", author.get("email", "System")), + "type": "customer" if author.get("email") == customer_info.get("email") else "support" + } + + result["key_events"].append(event_info) + + # Handle all other event types to preserve information + else: + # Skip entries without meaningful content + if not entry_type or entry_type in ["", "unknown"]: + continue + + # Determine if this is likely a conversation-like entry + body = entry.get("body", "").strip() + author = entry.get("created_by", {}) + + if body: # Has content, treat as conversation + speaker_type = "support" + if author.get("email") == customer_info.get("email"): + speaker_type = "customer" + elif "system" in author.get("display_name", "").lower(): + speaker_type = "system" + + conversation_entry = { + "seq": conversation_seq, + "timestamp": timestamp, + "event_type": entry_type, + "speaker": { + "name": author.get("display_name", author.get("email", "Unknown")), + "type": speaker_type + }, + "message": body, + "artifacts": [] + } + + # Add timeline entry navigation link + entry_id = entry.get("id", "").split("/")[-1] if entry.get("id") else "" + if entry_id: + conversation_entry["timeline_entry_uri"] = f"devrev://tickets/{ticket_id}/timeline/{entry_id}" + + result["conversation_thread"].append(conversation_entry) + conversation_seq += 1 + + # Update last message timestamps + if speaker_type == "customer": + result["summary"]["last_customer_message"] = timestamp + elif speaker_type == "support": + result["summary"]["last_support_response"] = timestamp + + else: # No content, treat as event + event_info = { + "type": entry_type.replace("_", " "), + "event_type": entry_type, + "timestamp": timestamp + } + + # Add author information if available + if author: + event_info["actor"] = { + "name": author.get("display_name", author.get("email", "System")), + "type": "customer" if author.get("email") == customer_info.get("email") else "support" + } + + result["key_events"].append(event_info) -# if response.status_code != 200: -# error_text = response.text -# await ctx.error(f"Failed to update {type}: HTTP {response.status_code} - {error_text}") -# raise ValueError(f"Failed to update {type} (HTTP {response.status_code}): {error_text}") + # Set artifact count and list + result["all_artifacts"] = list(artifacts_found.values()) + result["summary"]["total_artifacts"] = len(artifacts_found) -# result_data = response.json() + # Add navigation links + result["links"] = { + "ticket": f"devrev://tickets/{ticket_id}" + } -# # Update cache if we have this object cached -# if id in devrev_cache: -# del devrev_cache[id] -# await ctx.info(f"Cleared cache for updated object: {id}") + if result["all_artifacts"]: + result["links"]["artifacts"] = f"devrev://tickets/{ticket_id}/artifacts" -# await ctx.info(f"Successfully updated {type}: {id}") -# return json.dumps(result_data, indent=2) + return json.dumps(result, indent=2) -# except Exception as e: -# await ctx.error(f"Failed to update {type}: {str(e)}") -# raise + except Exception as e: + await ctx.error(f"Failed to get timeline for ticket {ticket_id}: {str(e)}") + raise ValueError(f"Timeline for ticket {ticket_id} not found: {str(e)}") -@mcp.tool() -async def get_object(id: str, ctx: Context) -> str: +@mcp.resource( + uri="devrev://tickets/{ticket_id}/timeline/{entry_id}", + description="Access individual timeline entry with detailed conversation data and navigation links.", + tags=["timeline", "entry", "devrev", "conversation"] +) +async def timeline_entry(ticket_id: str, entry_id: str, ctx: Context) -> str: """ - Get all information about a DevRev issue and ticket using its ID. + Access specific timeline entry details. Args: - id: The DevRev object ID + ticket_id: The DevRev ticket ID + entry_id: The timeline entry ID Returns: - JSON string containing the object information + JSON string containing the timeline entry data with links """ - try: - await ctx.info(f"Fetching object {id} from DevRev") - - response = make_devrev_request("works.get", {"id": id}) - - if response.status_code != 200: - error_text = response.text - await ctx.error(f"Failed to get object {id}: HTTP {response.status_code} - {error_text}") - raise ValueError(f"Failed to get object {id} (HTTP {response.status_code}): {error_text}") - - result_data = response.json() - - # Cache the result - devrev_cache[id] = json.dumps(result_data, indent=2) - - await ctx.info(f"Successfully retrieved object: {id}") - return devrev_cache[id] - - except Exception as e: - await ctx.error(f"Failed to get object {id}: {str(e)}") - raise + # Construct full timeline ID if needed + if not entry_id.startswith("don:core:"): + # This is a simplified ID, we'll need to fetch it via the ticket timeline + return await ticket_timeline(ticket_id, ctx) + + result = await timeline_entry_resource(entry_id, ctx, devrev_cache) + + # Add navigation links + import json + entry_data = json.loads(result) + entry_data["links"] = { + "ticket": f"devrev://tickets/{ticket_id}", + "timeline": f"devrev://tickets/{ticket_id}/timeline" + } + + return json.dumps(entry_data, indent=2) + +@mcp.resource( + uri="devrev://tickets/{ticket_id}/artifacts", + description="Access all artifacts associated with a specific ticket. Returns list of files, screenshots, and documents attached to the ticket.", + tags=["artifacts", "collection", "devrev", "ticket-artifacts"] +) +async def ticket_artifacts(ticket_id: str, ctx: Context) -> str: + """ + Access all artifacts for a ticket. + + Args: + ticket_id: The DevRev ticket ID (e.g., 12345 for TKT-12345) + + Returns: + JSON string containing artifacts with navigation links + """ + # Get ticket data to extract artifacts + ticket_data_str = await ticket_resource(ticket_id, ctx, devrev_cache) + + import json + ticket_data = json.loads(ticket_data_str) + artifacts = ticket_data.get("artifacts", []) + + # Add navigation links to each artifact + for artifact in artifacts: + artifact_id = artifact.get("id", "").split("/")[-1] if artifact.get("id") else "" + if artifact_id: + artifact["links"] = { + "self": f"devrev://artifacts/{artifact_id}", + "ticket": f"devrev://tickets/{ticket_id}" + } + + result = { + "artifacts": artifacts, + "links": { + "ticket": f"devrev://tickets/{ticket_id}" + } + } + + return json.dumps(result, indent=2) + +@mcp.resource( + uri="devrev://artifacts/{artifact_id}", + description="Access DevRev artifact metadata with temporary download URLs and reverse links to associated tickets.", + tags=["artifact", "devrev", "files", "reverse-links"] +) +async def artifact(artifact_id: str, ctx: Context) -> str: + """ + Access DevRev artifact metadata with reverse links. + + Args: + artifact_id: The DevRev artifact ID + + Returns: + JSON string containing the artifact metadata with reverse links + """ + result = await artifact_resource(artifact_id, ctx, devrev_cache) + + # Add reverse links (would need to be implemented based on DevRev API capabilities) + import json + artifact_data = json.loads(result) + artifact_data["links"] = { + "tickets": f"devrev://artifacts/{artifact_id}/tickets" + } + + return json.dumps(artifact_data, indent=2) + +@mcp.resource( + uri="devrev://artifacts/{artifact_id}/tickets", + description="Access all tickets that reference this artifact. Provides reverse lookup from artifacts to tickets.", + tags=["artifact", "reverse-links", "devrev", "tickets"] +) +async def artifact_tickets(artifact_id: str, ctx: Context) -> str: + """ + Access tickets that reference this artifact. + + Args: + artifact_id: The DevRev artifact ID + + Returns: + JSON string containing linked tickets + """ + # This would require a search or reverse lookup in DevRev API + # For now, return a placeholder structure + import json + + result = { + "linked_tickets": [], # Would be populated with actual ticket URIs + "message": "Reverse artifact lookup not yet implemented", + "links": { + "artifact": f"devrev://artifacts/{artifact_id}" + } + } + + return json.dumps(result, indent=2) # Add dynamic resource access for DevRev objects @mcp.resource( uri="devrev://{id}", - description="Access any DevRev object (tickets, comments, issues, etc.) by its full DevRev ID.", - tags=["devrev_resource"] + description="Universal DevRev object accessor supporting any object type including tickets, issues, comments, parts, and users. Automatically routes to specialized handlers based on object type for optimal data enrichment and presentation.", + tags=["devrev", "universal", "router", "objects", "tickets", "issues", "comments"] ) async def get_devrev_resource(id: str, ctx: Context) -> str: """ Access any DevRev object (tickets, comments, issues, etc.) by its full DevRev ID. + Routes to specialized handlers based on object type. Args: id: The DevRev object ID @@ -229,81 +547,60 @@ async def get_devrev_resource(id: str, ctx: Context) -> str: JSON string containing the object data """ try: - # Check cache first - if id in devrev_cache: - await ctx.info(f"Retrieved resource {id} from cache") - return devrev_cache[id] + await ctx.info(f"Routing resource request for {id} to specialized handler") - # If not cached, fetch using get_object tool logic - await ctx.info(f"Fetching resource {id} from DevRev API") - # Handle special cases for tickets and comments + # Route to specialized handlers based on ID pattern if ":ticket/" in id: if ":comment/" in id: - # For comments, use timeline-entries.get endpoint - await ctx.info(f"Fetching comment {id}") - response = make_devrev_request( - "timeline-entries.get", - {"id": id} - ) + # This is a timeline entry (comment) + return await timeline_entry(id, ctx) else: - # For tickets, first get the ticket details - ticket_id = f"TKT-{id.split(':ticket/')[1]}" - await ctx.info(f"Fetching ticket {ticket_id}") - response = make_devrev_request( - "works.get", - {"id": ticket_id} - ) - - # Then get all comments via timeline entries - timeline_response = make_devrev_request( - "timeline-entries.list", - {"object": ticket_id} - ) - - if timeline_response.status_code == 200: - # Merge timeline entries into ticket response - result = response.json() - result["timeline_entries"] = timeline_response.json().get("timeline_entries", []) - devrev_cache[id] = json.dumps(result, indent=2) - return devrev_cache[id] - - if response.status_code != 200: - error_text = response.text - await ctx.error(f"Failed to fetch {id}: HTTP {response.status_code} - {error_text}") - raise ValueError(f"Failed to fetch {id} (HTTP {response.status_code}): {error_text}") - - result = response.json() - devrev_cache[id] = json.dumps(result, indent=2) - return devrev_cache[id] - - return await get_object(id, ctx) + # This is a ticket + return await ticket(id, ctx) + elif ":artifact/" in id: + # This is an artifact + artifact_id = id.split(":artifact/")[1] + return await artifact(artifact_id, ctx) + else: + # Fall back to generic object handler for other types + await ctx.info(f"Using generic object handler for {id}") + return await get_object(id, ctx) except Exception as e: await ctx.error(f"Failed to get resource {id}: {str(e)}") - raise ValueError(f"Resource {id} not found in cache and could not be fetched: {str(e)}") + raise ValueError(f"Resource {id} not found: {str(e)}") @mcp.tool( name="get_timeline_entries", - description="Get all timeline entries for a DevRev ticket using its ID. :ticket/12345>", - tags=["timeline_entries"] + description="Retrieve chronological timeline of all activity on a DevRev ticket including comments, status changes, assignments, and system events. Essential for understanding ticket progression, customer interactions, and audit trails. Accepts flexible ID formats (TKT-12345, 12345, or full don: format) and provides multiple output formats for different use cases.", + tags=["timeline", "devrev", "tickets", "history", "conversations", "audit"] ) -async def get_timeline_entries(id: str, ctx: Context) -> str: +async def get_timeline_entries(id: str, format: str = "summary", ctx: Context = None) -> str: """ - Get all timeline entries for a DevRev ticket using its ID. The API response provided by the + Get all timeline entries for a DevRev ticket using its ID with flexible formatting. Args: - id: The DevRev ticket ID - don:core:dvrv-us-1:devo/:ticket/12345 + id: The DevRev ticket ID - accepts TKT-12345, 12345, or full don:core format + format: Output format - "summary" (key info), "detailed" (conversation focus), or "full" (complete data) """ - try: - await ctx.info(f"Fetching timeline entries for ticket {id}") - - content_list = await ctx.read_resource(id) - if not content_list: - return "No timeline entries found" - - return content_list - except Exception as e: - return f"Failed to get timeline entries for ticket {id}: {str(e)}" + return await get_timeline_entries_tool(id, ctx, format) + +@mcp.tool( + name="get_ticket", + description="Get a DevRev ticket with all associated timeline entries and artifacts. Provides enriched ticket data with complete conversation history and attached files for comprehensive support analysis.", + tags=["ticket", "devrev", "enriched", "timeline", "artifacts", "support"] +) +async def get_ticket(id: str, ctx: Context) -> str: + """ + Get a DevRev ticket with all associated timeline entries and artifacts. + + Args: + id: The DevRev ticket ID - accepts TKT-12345, 12345, or full don:core format + + Returns: + JSON string containing the ticket data with timeline entries and artifacts + """ + return await get_ticket_tool(id, ctx) def main(): """Main entry point for the DevRev MCP server.""" diff --git a/src/devrev_mcp/tools/__init__.py b/src/devrev_mcp/tools/__init__.py new file mode 100644 index 0000000..abbdd46 --- /dev/null +++ b/src/devrev_mcp/tools/__init__.py @@ -0,0 +1,5 @@ +""" +DevRev MCP Tools + +This module contains specialized tools for interacting with the DevRev API. +""" \ No newline at end of file diff --git a/src/devrev_mcp/tools/create_object.py b/src/devrev_mcp/tools/create_object.py new file mode 100644 index 0000000..d30021d --- /dev/null +++ b/src/devrev_mcp/tools/create_object.py @@ -0,0 +1,67 @@ +""" +DevRev Create Object Tool + +Creates new issues or tickets in DevRev. +""" + +import json +from fastmcp import Context +from ..utils import make_devrev_request +from ..debug import debug_error_handler + + +@debug_error_handler +async def create_object( + type: str, + title: str, + applies_to_part: str, + body: str = "", + owned_by: list[str] = None, + ctx: Context = None +) -> str: + """ + Create a new issue or ticket in DevRev. + + Args: + type: The type of object to create ("issue" or "ticket") + title: The title/summary of the object + applies_to_part: The part ID this object applies to + body: The body/description of the object (optional) + owned_by: List of user IDs who should own this object (optional) + ctx: FastMCP context + + Returns: + JSON string containing the created object information + """ + if type not in ["issue", "ticket"]: + raise ValueError(f"Invalid type '{type}'. Must be 'issue' or 'ticket'") + + try: + await ctx.info(f"Creating new {type}: {title}") + + payload = { + "type": type, + "title": title, + "applies_to_part": applies_to_part + } + + if body: + payload["body"] = body + if owned_by: + payload["owned_by"] = owned_by + + response = make_devrev_request("works.create", payload) + + if response.status_code != 200: + error_text = response.text + await ctx.error(f"Failed to create {type}: HTTP {response.status_code} - {error_text}") + raise ValueError(f"Failed to create {type} (HTTP {response.status_code}): {error_text}") + + result_data = response.json() + await ctx.info(f"Successfully created {type} with ID: {result_data.get('work', {}).get('id', 'unknown')}") + + return json.dumps(result_data, indent=2) + + except Exception as e: + await ctx.error(f"Failed to create {type}: {str(e)}") + raise \ No newline at end of file diff --git a/src/devrev_mcp/tools/get_object.py b/src/devrev_mcp/tools/get_object.py new file mode 100644 index 0000000..d1a6001 --- /dev/null +++ b/src/devrev_mcp/tools/get_object.py @@ -0,0 +1,46 @@ +""" +DevRev Get Object Tool + +Provides a tool for fetching DevRev objects by ID. +""" + +import json +from fastmcp import Context +from ..utils import make_devrev_request +from ..debug import debug_error_handler + + +@debug_error_handler +async def get_object(id: str, ctx: Context, devrev_cache: dict) -> str: + """ + Get all information about a DevRev issue and ticket using its ID. + + Args: + id: The DevRev object ID + ctx: FastMCP context + devrev_cache: Cache dictionary for storing results + + Returns: + JSON string containing the object information + """ + try: + await ctx.info(f"Fetching object {id} from DevRev") + + response = make_devrev_request("works.get", {"id": id}) + + if response.status_code != 200: + error_text = response.text + await ctx.error(f"Failed to get object {id}: HTTP {response.status_code} - {error_text}") + raise ValueError(f"Failed to get object {id} (HTTP {response.status_code}): {error_text}") + + result_data = response.json() + + # Cache the result + devrev_cache[id] = json.dumps(result_data, indent=2) + + await ctx.info(f"Successfully retrieved object: {id}") + return devrev_cache[id] + + except Exception as e: + await ctx.error(f"Failed to get object {id}: {str(e)}") + raise \ No newline at end of file diff --git a/src/devrev_mcp/tools/get_ticket.py b/src/devrev_mcp/tools/get_ticket.py new file mode 100644 index 0000000..8db44dc --- /dev/null +++ b/src/devrev_mcp/tools/get_ticket.py @@ -0,0 +1,130 @@ +""" +DevRev Get Ticket Tool + +Provides a tool for fetching DevRev tickets with enriched timeline entries and artifact data. +""" + +import json +from fastmcp import Context +from ..debug import debug_error_handler + + +@debug_error_handler +async def get_ticket( + id: str, + ctx: Context +) -> str: + """ + Get a DevRev ticket with all associated timeline entries and artifacts. + + Args: + id: The DevRev ticket ID - accepts TKT-12345, 12345, or full don:core format + ctx: FastMCP context + + Returns: + JSON string containing the ticket data with timeline entries and artifacts + """ + try: + # Normalize the ticket ID to just the number + if id.upper().startswith("TKT-"): + ticket_id = id[4:] # Remove TKT- prefix + elif id.startswith("don:core:"): + # Extract ID from don:core format + ticket_id = id.split(":")[-1] + else: + ticket_id = id + + await ctx.info(f"Fetching ticket {ticket_id} with timeline entries and artifacts") + + # Get the main ticket data + ticket_uri = f"devrev://tickets/{ticket_id}" + try: + resource_contents = await ctx.read_resource(ticket_uri) + + # MCP framework returns list[ReadResourceContents] with .content attribute + if resource_contents and len(resource_contents) > 0: + # Handle multiple contents by trying each until we find valid JSON + if len(resource_contents) > 1: + await ctx.warning(f"Multiple resource contents returned ({len(resource_contents)}), trying each for valid JSON") + + ticket_data = None + for i, content_item in enumerate(resource_contents): + try: + ticket_data = json.loads(content_item.content) + if i > 0: + await ctx.info(f"Successfully parsed JSON from content item {i}") + break + except json.JSONDecodeError as e: + await ctx.warning(f"Content item {i} is not valid JSON: {e}") + continue + + if ticket_data is None: + raise ValueError(f"No valid JSON found in any of the {len(resource_contents)} resource contents") + else: + raise ValueError(f"No resource contents returned for {ticket_uri}") + except Exception as ticket_error: + await ctx.error(f"Error reading ticket resource {ticket_uri}: {str(ticket_error)}") + raise ticket_error + + if not ticket_data: + return f"No ticket found with ID {ticket_id}" + + # Handle case where ticket_data is unexpectedly a list + if isinstance(ticket_data, list): + await ctx.warning(f"ticket_data is unexpectedly a list, using first item") + if len(ticket_data) > 0: + ticket_data = ticket_data[0] + else: + return f"No ticket data found for ID {ticket_id}" + + # Ensure ticket_data is a dict + if not isinstance(ticket_data, dict): + await ctx.error(f"ticket_data is not a dict, type: {type(ticket_data)}, value: {repr(ticket_data)}") + return f"Invalid ticket data format for ID {ticket_id} (type: {type(ticket_data)})" + + # Get timeline entries + timeline_uri = f"devrev://tickets/{ticket_id}/timeline" + try: + timeline_contents = await ctx.read_resource(timeline_uri) + if timeline_contents and len(timeline_contents) > 0: + # Try each content item for valid JSON + timeline_data = None + for i, content_item in enumerate(timeline_contents): + try: + timeline_data = json.loads(content_item.content) + break + except json.JSONDecodeError: + continue + + ticket_data["timeline_entries"] = timeline_data if timeline_data else [] + else: + ticket_data["timeline_entries"] = [] + except Exception as timeline_error: + await ctx.warning(f"Error reading timeline entries: {str(timeline_error)}") + ticket_data["timeline_entries"] = [] + + # Get artifacts if any are referenced + artifacts = [] + if "artifact_uris" in ticket_data: + for uri in ticket_data["artifact_uris"]: + try: + artifact_contents = await ctx.read_resource(uri) + if artifact_contents and len(artifact_contents) > 0: + # Try each content item for valid JSON + for content_item in artifact_contents: + try: + artifact_data = json.loads(content_item.content) + artifacts.append(artifact_data) + break + except json.JSONDecodeError: + continue + except Exception as artifact_error: + await ctx.warning(f"Error reading artifact {uri}: {str(artifact_error)}") + + ticket_data["artifacts"] = artifacts + + return json.dumps(ticket_data, indent=2) + + except Exception as e: + await ctx.error(f"Failed to get ticket {id}: {str(e)}") + raise diff --git a/src/devrev_mcp/tools/get_timeline_entries.py b/src/devrev_mcp/tools/get_timeline_entries.py new file mode 100644 index 0000000..8436442 --- /dev/null +++ b/src/devrev_mcp/tools/get_timeline_entries.py @@ -0,0 +1,264 @@ +""" +DevRev Get Timeline Entries Tool + +Provides a tool for fetching timeline entries for DevRev tickets with flexible formatting. +""" + +import json +from fastmcp import Context +from ..debug import debug_error_handler + + +@debug_error_handler +async def get_timeline_entries( + id: str, + ctx: Context, + format: str = "summary" +) -> str: + """ + Get timeline entries for a DevRev ticket with flexible formatting options. + + Args: + id: The DevRev ticket ID - accepts TKT-12345, 12345, or full don:core format + ctx: FastMCP context + format: Output format - "summary" (key info), "detailed" (conversation focus), or "full" (complete data) + + Returns: + Formatted timeline entries based on the requested format + """ + try: + # Normalize the ticket ID to just the number + ticket_id = _normalize_ticket_id(id) + await ctx.info(f"Fetching timeline entries for ticket {ticket_id} in {format} format") + + # Use the resource URI to get the enriched timeline + resource_uri = f"devrev://tickets/{ticket_id}/timeline" + try: + content = await ctx.read_resource(resource_uri) + except Exception as resource_error: + await ctx.error(f"Error reading resource {resource_uri}: {str(resource_error)}") + raise resource_error + + if not content: + return f"No timeline entries found for ticket {ticket_id}" + + # Handle the resource response - FastMCP can return different structures + # Extract the actual timeline data from the response + if isinstance(content, list) and len(content) > 0: + # It's a list, likely containing a ReadResourceContents object + first_item = content[0] + if hasattr(first_item, 'content'): + # ReadResourceContents object + try: + timeline_data = json.loads(first_item.content) + except (json.JSONDecodeError, AttributeError): + if format == "full": + return str(first_item.content) if hasattr(first_item, 'content') else str(first_item) + else: + return f"Error: Could not parse timeline data for ticket {ticket_id}" + else: + # Direct data in the list + timeline_data = first_item + elif hasattr(content, 'content'): + # It's a ReadResourceContents object, get the content + try: + timeline_data = json.loads(content.content) + except (json.JSONDecodeError, AttributeError): + if format == "full": + return str(content.content) if hasattr(content, 'content') else str(content) + else: + return f"Error: Could not parse timeline data for ticket {ticket_id}" + elif isinstance(content, str): + try: + timeline_data = json.loads(content) + except json.JSONDecodeError: + # If it's already a string, return as-is for full format + if format == "full": + return content + else: + return f"Error: Could not parse timeline data for ticket {ticket_id}" + else: + # Content is already parsed (dict, list, etc.) + timeline_data = content + + # Debug: Check what we actually received + await ctx.info(f"DEBUG: timeline_data type: {type(timeline_data)}") + if isinstance(timeline_data, dict): + await ctx.info(f"DEBUG: timeline_data keys: {list(timeline_data.keys())}") + elif isinstance(timeline_data, list): + await ctx.info(f"DEBUG: timeline_data length: {len(timeline_data)}") + if timeline_data: + await ctx.info(f"DEBUG: first item type: {type(timeline_data[0])}") + + # Format based on requested type + if format == "summary": + return _format_summary(timeline_data, ticket_id) + elif format == "detailed": + return _format_detailed(timeline_data, ticket_id) + else: # format == "full" + try: + return json.dumps(timeline_data, indent=2, default=str) + except (TypeError, ValueError) as e: + await ctx.error(f"Could not serialize timeline data to JSON: {str(e)}") + return str(timeline_data) + + except Exception as e: + await ctx.error(f"Failed to get timeline entries for {id}: {str(e)}") + return f"Failed to get timeline entries for ticket {id}: {str(e)}" + + +def _normalize_ticket_id(id: str) -> str: + """ + Normalize various ticket ID formats to just the numeric ID. + + Accepts: + - TKT-12345 -> 12345 + - tkt-12345 -> 12345 + - don:core:dvrv-us-1:devo/118WAPdKBc:ticket/12345 -> 12345 + - 12345 -> 12345 + """ + if id.startswith("don:core:") and ":ticket/" in id: + # Extract from full DevRev ID + return id.split(":ticket/")[1] + elif id.upper().startswith("TKT-"): + # Extract from TKT- format (case insensitive) + return id[4:] # Remove first 4 characters (TKT- or tkt-) + else: + # Assume it's already just the ticket number + return id + + +def _format_summary(timeline_data, ticket_id: str) -> str: + """ + Format timeline data as a concise summary focusing on key metrics and latest activity. + """ + # Handle both dict and list formats + if isinstance(timeline_data, list): + # If it's a list, treat it as the conversation thread + conversation = timeline_data + summary = {} + else: + # If it's a dict, extract the expected fields + summary = timeline_data.get("summary", {}) + conversation = timeline_data.get("conversation_thread", []) + + # Build summary text + lines = [ + f"**TKT-{ticket_id} Timeline Summary:**", + "", + f"**Subject:** {summary.get('subject', 'Unknown')}", + f"**Status:** {summary.get('current_stage', 'Unknown')}", + f"**Customer:** {summary.get('customer', 'Unknown')}", + f"**Created:** {summary.get('created_date', 'Unknown')}", + ] + + # Add message counts + customer_messages = [msg for msg in conversation if msg.get("speaker", {}).get("type") == "customer"] + support_messages = [msg for msg in conversation if msg.get("speaker", {}).get("type") == "support"] + + lines.extend([ + "", + f"**Activity:** {len(customer_messages)} customer messages, {len(support_messages)} support responses", + ]) + + # Add last activity timestamps + if summary.get("last_customer_message"): + lines.append(f"**Last customer message:** {summary['last_customer_message']}") + if summary.get("last_support_response"): + lines.append(f"**Last support response:** {summary['last_support_response']}") + + # Add latest messages preview + if conversation: + lines.extend([ + "", + "**Recent Activity:**" + ]) + + # Show last 3 messages + recent_messages = conversation[-3:] if len(conversation) > 3 else conversation + for msg in recent_messages: + speaker = msg.get("speaker", {}) + timestamp = msg.get("timestamp", "")[:10] # Just date part + message_preview = msg.get("message", "")[:100] + ("..." if len(msg.get("message", "")) > 100 else "") + lines.append(f"- **{speaker.get('name', 'Unknown')}** ({timestamp}): {message_preview}") + + # Add artifacts info + if isinstance(timeline_data, dict): + artifacts = timeline_data.get("all_artifacts", []) + if artifacts: + lines.extend([ + "", + f"**Attachments:** {len(artifacts)} file(s) attached" + ]) + + return "\n".join(lines) + + +def _format_detailed(timeline_data, ticket_id: str) -> str: + """ + Format timeline data with focus on conversation flow and key events. + """ + # Handle both dict and list formats + if isinstance(timeline_data, list): + # If it's a list, treat it as the conversation thread + conversation = timeline_data + summary = {} + else: + # If it's a dict, extract the expected fields + summary = timeline_data.get("summary", {}) + conversation = timeline_data.get("conversation_thread", []) + + lines = [ + f"**TKT-{ticket_id} Detailed Timeline:**", + "", + f"**Subject:** {summary.get('subject', 'Unknown')}", + f"**Status:** {summary.get('current_stage', 'Unknown')}", + f"**Customer:** {summary.get('customer', 'Unknown')}", + "", + "**Conversation Thread:**" + ] + + # Add each conversation entry + for msg in conversation: + speaker = msg.get("speaker", {}) + timestamp = msg.get("timestamp", "") + message = msg.get("message", "") + artifacts = msg.get("artifacts", []) + + # Format timestamp to be more readable + display_time = timestamp[:19].replace("T", " ") if timestamp else "Unknown time" + + lines.extend([ + "", + f"**{msg.get('seq', '?')}. {speaker.get('name', 'Unknown')} ({speaker.get('type', 'unknown')}) - {display_time}**" + ]) + + # Add message content with proper formatting + if message: + # Handle multi-line messages by indenting them + message_lines = message.split("\n") + for line in message_lines: + if line.strip(): + lines.append(f" {line}") + else: + lines.append("") + + # Add artifacts info + if artifacts: + lines.append(f" *Attachments: {len(artifacts)} file(s)*") + + # Add key events summary + if isinstance(timeline_data, dict): + key_events = timeline_data.get("key_events", []) + if key_events: + lines.extend([ + "", + "**Key Events:**" + ]) + for event in key_events[-5:]: # Show last 5 events + event_time = event.get("timestamp", "")[:19].replace("T", " ") + event_type = event.get("type", "unknown") + actor = event.get("actor", {}).get("name", "System") + lines.append(f"- {event_time}: {event_type} by {actor}") + + return "\n".join(lines) \ No newline at end of file diff --git a/src/devrev_mcp/tools/search.py b/src/devrev_mcp/tools/search.py new file mode 100644 index 0000000..1fffa53 --- /dev/null +++ b/src/devrev_mcp/tools/search.py @@ -0,0 +1,135 @@ +""" +Copyright (c) 2025 DevRev, Inc. +SPDX-License-Identifier: MIT + +This module provides search functionality for DevRev objects. +""" + +import json +from typing import Dict, Any, List +from fastmcp import Context + +from ..utils import make_devrev_request +from ..debug import debug_error_handler + + +@debug_error_handler +async def search(query: str, namespace: str, ctx: Context) -> str: + """ + Search DevRev using the provided query and return parsed, useful information. + + Args: + query: The search query string + namespace: The namespace to search in (article, issue, ticket, part, dev_user) + ctx: FastMCP context for logging + + Returns: + JSON string containing parsed search results with key information + """ + if namespace not in ["article", "issue", "ticket", "part", "dev_user"]: + raise ValueError(f"Invalid namespace '{namespace}'. Must be one of: article, issue, ticket, part, dev_user") + + try: + await ctx.info(f"Searching DevRev for '{query}' in namespace '{namespace}'") + + response = make_devrev_request( + "search.hybrid", + {"query": query, "namespace": namespace} + ) + + if response.status_code != 200: + error_text = response.text + await ctx.error(f"Search failed with status {response.status_code}: {error_text}") + raise ValueError(f"Search failed with status {response.status_code}: {error_text}") + + search_results = response.json() + parsed_results = _parse_search_results(search_results, namespace) + + await ctx.info(f"Search completed successfully with {len(parsed_results.get('results', []))} results") + + return json.dumps(parsed_results, indent=2) + + except Exception as e: + await ctx.error(f"Search operation failed: {str(e)}") + raise + + +def _parse_search_results(raw_results: Dict[str, Any], namespace: str) -> Dict[str, Any]: + """ + Parse raw search results to extract useful information. + + Args: + raw_results: Raw search results from DevRev API + namespace: The namespace that was searched + + Returns: + Parsed results with key information extracted + """ + parsed = { + "query_info": { + "namespace": namespace, + "total_results": len(raw_results.get("results", [])) + }, + "results": [] + } + + for result in raw_results.get("results", []): + if result.get("type") == "work" and "work" in result: + work = result["work"] + parsed_result = _parse_work_result(work) + parsed["results"].append(parsed_result) + elif result.get("type") == "article" and "article" in result: + article = result["article"] + parsed_result = _parse_article_result(article) + parsed["results"].append(parsed_result) + else: + # For other types, include basic info + parsed_result = { + "type": result.get("type", "unknown"), + "id": result.get("id"), + "snippet": result.get("snippet", "") + } + parsed["results"].append(parsed_result) + + return parsed + + +def _parse_work_result(work: Dict[str, Any]) -> Dict[str, Any]: + """Parse work (ticket/issue) search result.""" + return { + "type": "work", + "work_type": work.get("type"), + "id": work.get("id"), + "display_id": work.get("display_id"), + "title": work.get("title"), + "severity": work.get("severity"), + "stage": work.get("stage", {}).get("name"), + "owned_by": [ + { + "name": owner.get("display_name"), + "email": owner.get("email"), + "id": owner.get("id") + } + for owner in work.get("owned_by", []) + ], + "rev_org": { + "name": work.get("rev_org", {}).get("display_name"), + "id": work.get("rev_org", {}).get("id") + } + } + + +def _parse_article_result(article: Dict[str, Any]) -> Dict[str, Any]: + """Parse article search result.""" + return { + "type": "article", + "id": article.get("id"), + "display_id": article.get("display_id"), + "title": article.get("title"), + "status": article.get("status"), + "authored_by": { + "name": article.get("authored_by", {}).get("display_name"), + "email": article.get("authored_by", {}).get("email"), + "id": article.get("authored_by", {}).get("id") + } + } \ No newline at end of file diff --git a/src/devrev_mcp/tools/update_object.py b/src/devrev_mcp/tools/update_object.py new file mode 100644 index 0000000..08f2182 --- /dev/null +++ b/src/devrev_mcp/tools/update_object.py @@ -0,0 +1,74 @@ +""" +DevRev Update Object Tool + +Updates existing issues or tickets in DevRev. +""" + +import json +from fastmcp import Context +from ..utils import make_devrev_request +from ..debug import debug_error_handler + + +@debug_error_handler +async def update_object( + id: str, + type: str, + title: str = None, + body: str = None, + ctx: Context = None, + devrev_cache: dict = None +) -> str: + """ + Update an existing issue or ticket in DevRev. + + Args: + id: The ID of the object to update + type: The type of object ("issue" or "ticket") + title: New title for the object (optional) + body: New body/description for the object (optional) + ctx: FastMCP context + devrev_cache: Cache dictionary for invalidating cached results + + Returns: + JSON string containing the updated object information + """ + if type not in ["issue", "ticket"]: + raise ValueError(f"Invalid type '{type}'. Must be 'issue' or 'ticket'") + + if not title and not body: + raise ValueError("At least one of 'title' or 'body' must be provided for update") + + try: + await ctx.info(f"Updating {type} {id}") + + payload = { + "id": id, + "type": type + } + + if title: + payload["title"] = title + if body: + payload["body"] = body + + response = make_devrev_request("works.update", payload) + + if response.status_code != 200: + error_text = response.text + await ctx.error(f"Failed to update {type}: HTTP {response.status_code} - {error_text}") + raise ValueError(f"Failed to update {type} (HTTP {response.status_code}): {error_text}") + + result_data = response.json() + + # Update cache if we have this object cached + if devrev_cache and id in devrev_cache: + del devrev_cache[id] + await ctx.info(f"Cleared cache for updated object: {id}") + + await ctx.info(f"Successfully updated {type}: {id}") + return json.dumps(result_data, indent=2) + + except Exception as e: + await ctx.error(f"Failed to update {type}: {str(e)}") + raise \ No newline at end of file diff --git a/uv.lock b/uv.lock index 75833f0..e9c6cd5 100644 --- a/uv.lock +++ b/uv.lock @@ -110,7 +110,7 @@ wheels = [ [[package]] name = "devrev-mcp" -version = "0.1.1" +version = "0.1.2" source = { editable = "." } dependencies = [ { name = "fastmcp" }, From 724ff223a2ad6019789163ebe56c35b344de545e Mon Sep 17 00:00:00 2001 From: Sara Date: Sun, 1 Jun 2025 14:37:33 -0400 Subject: [PATCH 06/17] removing unnecessary debug info will add better debug info using fastmcp capabilities --- mcp_wrapper.py | 5 -- src/devrev_mcp/debug.py | 70 -------------------- src/devrev_mcp/resources/artifact.py | 2 - src/devrev_mcp/resources/ticket.py | 1 - src/devrev_mcp/resources/timeline_entry.py | 2 - src/devrev_mcp/server.py | 7 -- src/devrev_mcp/tools/create_object.py | 2 - src/devrev_mcp/tools/get_object.py | 2 - src/devrev_mcp/tools/get_ticket.py | 3 - src/devrev_mcp/tools/get_timeline_entries.py | 2 - src/devrev_mcp/tools/search.py | 2 - src/devrev_mcp/tools/update_object.py | 2 - 12 files changed, 100 deletions(-) delete mode 100644 src/devrev_mcp/debug.py diff --git a/mcp_wrapper.py b/mcp_wrapper.py index cef3619..bd93b2f 100755 --- a/mcp_wrapper.py +++ b/mcp_wrapper.py @@ -6,7 +6,6 @@ when source files change, while maintaining a stable connection to the MCP client. """ -import os import sys import time import signal @@ -214,11 +213,7 @@ def main(): watch_dirs = [server_dir / "src"] watch_files = [server_dir / "pyproject.toml"] - # Enable debug mode - os.environ["DRMCP_DEBUG"] = "1" - print("๐Ÿ”„ Starting MCP Server Wrapper with Python watchdog", file=sys.stderr) - print("๐Ÿ› Debug mode: ENABLED", file=sys.stderr) print("", file=sys.stderr) # Create and run manager diff --git a/src/devrev_mcp/debug.py b/src/devrev_mcp/debug.py deleted file mode 100644 index 2d51afc..0000000 --- a/src/devrev_mcp/debug.py +++ /dev/null @@ -1,70 +0,0 @@ -""" -Debug utilities for DevRev MCP server. -""" - -import traceback -import os -from functools import wraps -from typing import Dict, Any, List - -import mcp.types as types - -# Check debug mode and store state -DEBUG_ENABLED = os.environ.get("DRMCP_DEBUG") == "1" -DEBUG_MESSAGE = "๐Ÿ› DEBUG MODE ENABLED - sara wuz here" if DEBUG_ENABLED else "๐Ÿ› DEBUG MODE DISABLED - sara wuz here" - -def debug_error_handler(func): - """ - Decorator that catches exceptions in MCP functions and returns detailed debug information - as the response when DRMCP_DEBUG=1. - """ - debug_enabled = DEBUG_ENABLED - - @wraps(func) - async def wrapper(*args, **kwargs): - try: - result = await func(*args, **kwargs) - # Add debug message to all responses when debug is enabled - if debug_enabled and result: - # For tool responses (list of content) - if isinstance(result, list) and len(result) > 0 and hasattr(result[0], 'text'): - debug_content = types.TextContent( - type="text", - text=f"{DEBUG_MESSAGE}\n\n" - ) - result[0].text = debug_content.text + result[0].text - # For other responses (strings, etc.) - just add debug message - elif isinstance(result, str) and debug_enabled: - result = f"{DEBUG_MESSAGE}\n\n{result}" - return result - except Exception as e: - if debug_enabled: - # Debug mode: return detailed error information - error_message = f"""ERROR (Debug Mode): {type(e).__name__}: {str(e)} - -Full traceback: -{traceback.format_exc()} - -This is a debug error response. Let's troubleshoot this together. - -{DEBUG_MESSAGE}""" - else: - # Production mode: return generic error message - error_message = f"An error occurred while executing the function. Please try again or contact support." - - # Return appropriate error format based on expected return type - if hasattr(func, '__annotations__') and func.__annotations__.get('return'): - return_type = func.__annotations__['return'] - if 'List' in str(return_type) and 'TextContent' in str(return_type): - # Tool function - return list of TextContent - return [ - types.TextContent( - type="text", - text=error_message - ) - ] - - # Default: return as string (for resource handlers, etc.) - return error_message - - return wrapper \ No newline at end of file diff --git a/src/devrev_mcp/resources/artifact.py b/src/devrev_mcp/resources/artifact.py index 2c06fd8..1bc69c2 100644 --- a/src/devrev_mcp/resources/artifact.py +++ b/src/devrev_mcp/resources/artifact.py @@ -7,10 +7,8 @@ import json from fastmcp import Context from ..utils import make_devrev_request -from ..debug import debug_error_handler -@debug_error_handler async def artifact(artifact_id: str, ctx: Context, devrev_cache: dict) -> str: """ Access DevRev artifact metadata. diff --git a/src/devrev_mcp/resources/ticket.py b/src/devrev_mcp/resources/ticket.py index 09ba032..a01e2b8 100644 --- a/src/devrev_mcp/resources/ticket.py +++ b/src/devrev_mcp/resources/ticket.py @@ -7,7 +7,6 @@ import json from fastmcp import Context from ..utils import make_devrev_request -from ..debug import debug_error_handler async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: diff --git a/src/devrev_mcp/resources/timeline_entry.py b/src/devrev_mcp/resources/timeline_entry.py index b912ac9..6f0b7a2 100644 --- a/src/devrev_mcp/resources/timeline_entry.py +++ b/src/devrev_mcp/resources/timeline_entry.py @@ -7,10 +7,8 @@ import json from fastmcp import Context from ..utils import make_devrev_request -from ..debug import debug_error_handler -@debug_error_handler async def timeline_entry(timeline_id: str, ctx: Context, devrev_cache: dict) -> str: """ Access specific timeline entry details. diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index 8c520c6..b916421 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -5,7 +5,6 @@ This module implements the FastMCP server for DevRev integration. """ -import os from typing import Dict, Any from fastmcp import FastMCP, Context @@ -22,9 +21,6 @@ from .tools.create_object import create_object as create_object_tool from .tools.update_object import update_object as update_object_tool -# Check debug mode and store state -DEBUG_ENABLED = os.environ.get("DRMCP_DEBUG") == "1" -DEBUG_MESSAGE = "๐Ÿ› DEBUG MODE ENABLED - sara wuz here" if DEBUG_ENABLED else "๐Ÿ› DEBUG MODE DISABLED - sara wuz here" # Create the FastMCP server mcp = FastMCP( @@ -604,9 +600,6 @@ async def get_ticket(id: str, ctx: Context) -> str: def main(): """Main entry point for the DevRev MCP server.""" - # Print debug message - print(DEBUG_MESSAGE) - # Run the server mcp.run() diff --git a/src/devrev_mcp/tools/create_object.py b/src/devrev_mcp/tools/create_object.py index d30021d..f6a4b45 100644 --- a/src/devrev_mcp/tools/create_object.py +++ b/src/devrev_mcp/tools/create_object.py @@ -7,10 +7,8 @@ import json from fastmcp import Context from ..utils import make_devrev_request -from ..debug import debug_error_handler -@debug_error_handler async def create_object( type: str, title: str, diff --git a/src/devrev_mcp/tools/get_object.py b/src/devrev_mcp/tools/get_object.py index d1a6001..d18f401 100644 --- a/src/devrev_mcp/tools/get_object.py +++ b/src/devrev_mcp/tools/get_object.py @@ -7,10 +7,8 @@ import json from fastmcp import Context from ..utils import make_devrev_request -from ..debug import debug_error_handler -@debug_error_handler async def get_object(id: str, ctx: Context, devrev_cache: dict) -> str: """ Get all information about a DevRev issue and ticket using its ID. diff --git a/src/devrev_mcp/tools/get_ticket.py b/src/devrev_mcp/tools/get_ticket.py index 8db44dc..6aac1b2 100644 --- a/src/devrev_mcp/tools/get_ticket.py +++ b/src/devrev_mcp/tools/get_ticket.py @@ -6,10 +6,8 @@ import json from fastmcp import Context -from ..debug import debug_error_handler -@debug_error_handler async def get_ticket( id: str, ctx: Context @@ -41,7 +39,6 @@ async def get_ticket( try: resource_contents = await ctx.read_resource(ticket_uri) - # MCP framework returns list[ReadResourceContents] with .content attribute if resource_contents and len(resource_contents) > 0: # Handle multiple contents by trying each until we find valid JSON if len(resource_contents) > 1: diff --git a/src/devrev_mcp/tools/get_timeline_entries.py b/src/devrev_mcp/tools/get_timeline_entries.py index 8436442..9c9f26a 100644 --- a/src/devrev_mcp/tools/get_timeline_entries.py +++ b/src/devrev_mcp/tools/get_timeline_entries.py @@ -6,10 +6,8 @@ import json from fastmcp import Context -from ..debug import debug_error_handler -@debug_error_handler async def get_timeline_entries( id: str, ctx: Context, diff --git a/src/devrev_mcp/tools/search.py b/src/devrev_mcp/tools/search.py index 1fffa53..c417d45 100644 --- a/src/devrev_mcp/tools/search.py +++ b/src/devrev_mcp/tools/search.py @@ -10,10 +10,8 @@ from fastmcp import Context from ..utils import make_devrev_request -from ..debug import debug_error_handler -@debug_error_handler async def search(query: str, namespace: str, ctx: Context) -> str: """ Search DevRev using the provided query and return parsed, useful information. diff --git a/src/devrev_mcp/tools/update_object.py b/src/devrev_mcp/tools/update_object.py index 08f2182..a4a0ee2 100644 --- a/src/devrev_mcp/tools/update_object.py +++ b/src/devrev_mcp/tools/update_object.py @@ -7,10 +7,8 @@ import json from fastmcp import Context from ..utils import make_devrev_request -from ..debug import debug_error_handler -@debug_error_handler async def update_object( id: str, type: str, From 6c3e4130962d41df7c24ef4a59f50ed2c19978c7 Mon Sep 17 00:00:00 2001 From: Sara Date: Sun, 1 Jun 2025 14:38:27 -0400 Subject: [PATCH 07/17] removes unnecessary watchdog pretty sure fastmcp has support for this --- mcp_wrapper.py | 225 ------------------------------------------------- 1 file changed, 225 deletions(-) delete mode 100755 mcp_wrapper.py diff --git a/mcp_wrapper.py b/mcp_wrapper.py deleted file mode 100755 index bd93b2f..0000000 --- a/mcp_wrapper.py +++ /dev/null @@ -1,225 +0,0 @@ -#!/usr/bin/env python3 -""" -MCP Server Wrapper with Watchdog File Monitoring - -This wrapper manages the actual MCP server as a subprocess and restarts it -when source files change, while maintaining a stable connection to the MCP client. -""" - -import sys -import time -import signal -import subprocess -import threading -from pathlib import Path -from watchdog.observers import Observer -from watchdog.events import FileSystemEventHandler - - -class MCPServerManager: - def __init__(self, server_command, watch_dirs, watch_files): - self.server_command = server_command - self.watch_dirs = watch_dirs - self.watch_files = watch_files - self.server_process = None - self.observer = None - self.restart_requested = False - self.last_restart = 0 - self.restart_delay = 1.0 # Minimum seconds between restarts - - def start_server(self): - """Start the MCP server subprocess.""" - if self.server_process and self.server_process.poll() is None: - return # Already running - - print(f"๐Ÿš€ Starting MCP server: {' '.join(self.server_command)}", file=sys.stderr) - - try: - self.server_process = subprocess.Popen( - self.server_command, - stdin=subprocess.PIPE, - stdout=subprocess.PIPE, - stderr=subprocess.PIPE, - text=True, - bufsize=0 # Unbuffered for real-time communication - ) - print(f"๐Ÿ“ Server started with PID: {self.server_process.pid}", file=sys.stderr) - except Exception as e: - print(f"โŒ Failed to start server: {e}", file=sys.stderr) - sys.exit(1) - - def stop_server(self): - """Stop the MCP server subprocess.""" - if self.server_process and self.server_process.poll() is None: - print(f"๐Ÿ›‘ Stopping server (PID: {self.server_process.pid})", file=sys.stderr) - self.server_process.terminate() - try: - self.server_process.wait(timeout=5) - except subprocess.TimeoutExpired: - print("โš ๏ธ Server didn't stop gracefully, killing...", file=sys.stderr) - self.server_process.kill() - self.server_process.wait() - self.server_process = None - - def restart_server(self): - """Restart the MCP server subprocess.""" - current_time = time.time() - if current_time - self.last_restart < self.restart_delay: - return # Too soon to restart - - self.last_restart = current_time - print("๐Ÿ”„ Restarting MCP server...", file=sys.stderr) - self.stop_server() - time.sleep(0.5) # Brief pause - self.start_server() - print("โœ… Server restarted", file=sys.stderr) - - def setup_file_watcher(self): - """Set up file watching with watchdog.""" - class RestartHandler(FileSystemEventHandler): - def __init__(self, manager): - self.manager = manager - - def on_modified(self, event): - if event.is_directory: - return - - # Only watch Python files and pyproject.toml - if not (event.src_path.endswith('.py') or event.src_path.endswith('pyproject.toml')): - return - - print(f"๐Ÿ”„ File changed: {event.src_path}", file=sys.stderr) - self.manager.restart_server() - - self.observer = Observer() - handler = RestartHandler(self) - - # Watch directories - for watch_dir in self.watch_dirs: - if watch_dir.exists(): - self.observer.schedule(handler, str(watch_dir), recursive=True) - print(f"๐Ÿ“ Watching directory: {watch_dir}", file=sys.stderr) - - # Watch specific files by watching their parent directories - for watch_file in self.watch_files: - if watch_file.exists(): - self.observer.schedule(handler, str(watch_file.parent), recursive=False) - print(f"๐Ÿ“„ Watching file: {watch_file}", file=sys.stderr) - - self.observer.start() - print("๐Ÿ‘€ File watcher started", file=sys.stderr) - - def forward_io(self): - """Forward stdin/stdout between client and server subprocess.""" - def forward_stdin(): - """Forward stdin from client to server.""" - try: - while self.server_process and self.server_process.poll() is None: - line = sys.stdin.readline() - if not line: - break - if self.server_process and self.server_process.stdin: - self.server_process.stdin.write(line) - self.server_process.stdin.flush() - except Exception as e: - print(f"stdin forwarding error: {e}", file=sys.stderr) - - def forward_stdout(): - """Forward stdout from server to client.""" - try: - while self.server_process and self.server_process.poll() is None: - if self.server_process and self.server_process.stdout: - line = self.server_process.stdout.readline() - if not line: - break - sys.stdout.write(line) - sys.stdout.flush() - except Exception as e: - print(f"stdout forwarding error: {e}", file=sys.stderr) - - def forward_stderr(): - """Forward stderr from server to our stderr.""" - try: - while self.server_process and self.server_process.poll() is None: - if self.server_process and self.server_process.stderr: - line = self.server_process.stderr.readline() - if not line: - break - print(f"[SERVER] {line.rstrip()}", file=sys.stderr) - except Exception as e: - print(f"stderr forwarding error: {e}", file=sys.stderr) - - # Start forwarding threads - stdin_thread = threading.Thread(target=forward_stdin, daemon=True) - stdout_thread = threading.Thread(target=forward_stdout, daemon=True) - stderr_thread = threading.Thread(target=forward_stderr, daemon=True) - - stdin_thread.start() - stdout_thread.start() - stderr_thread.start() - - return stdin_thread, stdout_thread, stderr_thread - - def run(self): - """Main run loop.""" - # Set up signal handlers - def signal_handler(signum, frame): - print("๐Ÿงน Shutting down...", file=sys.stderr) - self.stop_server() - if self.observer: - self.observer.stop() - self.observer.join() - sys.exit(0) - - signal.signal(signal.SIGINT, signal_handler) - signal.signal(signal.SIGTERM, signal_handler) - - try: - # Start file watcher - self.setup_file_watcher() - - # Start server - self.start_server() - - # Set up IO forwarding - threads = self.forward_io() - - print("โœจ MCP wrapper ready! Server will auto-reload when files change.", file=sys.stderr) - - # Wait for server process - while True: - if self.server_process: - exit_code = self.server_process.poll() - if exit_code is not None: - print(f"โš ๏ธ Server exited with code {exit_code}", file=sys.stderr) - # Don't auto-restart if it was an intentional shutdown - if exit_code != 0: - time.sleep(1) - self.start_server() - time.sleep(0.1) - - except KeyboardInterrupt: - signal_handler(signal.SIGINT, None) - - -def main(): - """Main entry point.""" - # Configuration - server_dir = Path(__file__).parent - server_command = [ - "/Users/sara/.local/bin/uv", "run", "devrev-mcp" - ] - - watch_dirs = [server_dir / "src"] - watch_files = [server_dir / "pyproject.toml"] - - print("๐Ÿ”„ Starting MCP Server Wrapper with Python watchdog", file=sys.stderr) - print("", file=sys.stderr) - - # Create and run manager - manager = MCPServerManager(server_command, watch_dirs, watch_files) - manager.run() - - -if __name__ == "__main__": - main() \ No newline at end of file From 12a8df04f1086e3ffc8a68f9982392f250ea8fe3 Mon Sep 17 00:00:00 2001 From: Sara Date: Sun, 1 Jun 2025 15:09:00 -0400 Subject: [PATCH 08/17] cleaning things up --- pyproject.toml | 3 +-- run-mcp.sh | 15 --------------- src/devrev_mcp/resources/ticket.py | 5 ----- uv.lock | 29 ----------------------------- 4 files changed, 1 insertion(+), 51 deletions(-) delete mode 100755 run-mcp.sh diff --git a/pyproject.toml b/pyproject.toml index dd4e89f..3cfdfe3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,8 +6,7 @@ readme = "README.md" requires-python = ">=3.11" dependencies = [ "fastmcp>=2.0.0", - "requests", - "watchdog" + "requests" ] [[project.authors]] diff --git a/run-mcp.sh b/run-mcp.sh deleted file mode 100755 index e9e31b2..0000000 --- a/run-mcp.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -# MCP Server with Python Watchdog File Watching -# Usage: ./run-mcp-watchdog.sh [mcp-args...] - -set -e - -SERVER_DIR="/Users/sara/work/fossa/devrev/mcp-server" -cd "$SERVER_DIR" - -echo "๐Ÿ”„ Starting MCP Server with Python watchdog (no fswatch needed)..." -echo "" - -# Run the Python wrapper that handles file watching and server management -exec /Users/sara/.local/bin/uv run python mcp_wrapper.py "$@" \ No newline at end of file diff --git a/src/devrev_mcp/resources/ticket.py b/src/devrev_mcp/resources/ticket.py index a01e2b8..6b1630b 100644 --- a/src/devrev_mcp/resources/ticket.py +++ b/src/devrev_mcp/resources/ticket.py @@ -22,8 +22,6 @@ async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: JSON string containing the ticket data with timeline entries and artifacts """ try: - await ctx.info(f"[DEBUG] ticket() called with ticket_id: {ticket_id}") - # Convert simple ID to TKT- format for API calls if ticket_id.upper().startswith("TKT-"): # Extract numeric part and reformat @@ -33,8 +31,6 @@ async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: normalized_id = f"TKT-{ticket_id}" cache_key = f"ticket:{ticket_id}" - await ctx.info(f"[DEBUG] normalized_id: {normalized_id}, cache_key: {cache_key}") - # Check cache first if cache_key in devrev_cache: await ctx.info(f"Retrieved ticket {normalized_id} from cache") @@ -51,7 +47,6 @@ async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: raise ValueError(f"Failed to fetch ticket {normalized_id} (HTTP {response.status_code}): {error_text}") result = response.json() - await ctx.info(f"[DEBUG] API response structure: {list(result.keys()) if isinstance(result, dict) else type(result)}") # Extract the work object from the API response if isinstance(result, dict) and "work" in result: diff --git a/uv.lock b/uv.lock index e9c6cd5..f2dc754 100644 --- a/uv.lock +++ b/uv.lock @@ -115,14 +115,12 @@ source = { editable = "." } dependencies = [ { name = "fastmcp" }, { name = "requests" }, - { name = "watchdog" }, ] [package.metadata] requires-dist = [ { name = "fastmcp", specifier = ">=2.0.0" }, { name = "requests" }, - { name = "watchdog" }, ] [[package]] @@ -502,33 +500,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/50/c1/2d27b0a15826c2b71dcf6e2f5402181ef85acf439617bb2f1453125ce1f3/uvicorn-0.32.1-py3-none-any.whl", hash = "sha256:82ad92fd58da0d12af7482ecdb5f2470a04c9c9a53ced65b9bbb4a205377602e", size = 63828, upload-time = "2024-11-20T19:41:11.244Z" }, ] -[[package]] -name = "watchdog" -version = "6.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/db/7d/7f3d619e951c88ed75c6037b246ddcf2d322812ee8ea189be89511721d54/watchdog-6.0.0.tar.gz", hash = "sha256:9ddf7c82fda3ae8e24decda1338ede66e1c99883db93711d8fb941eaa2d8c282", size = 131220, upload-time = "2024-11-01T14:07:13.037Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/24/d9be5cd6642a6aa68352ded4b4b10fb0d7889cb7f45814fb92cecd35f101/watchdog-6.0.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6eb11feb5a0d452ee41f824e271ca311a09e250441c262ca2fd7ebcf2461a06c", size = 96393, upload-time = "2024-11-01T14:06:31.756Z" }, - { url = "https://files.pythonhosted.org/packages/63/7a/6013b0d8dbc56adca7fdd4f0beed381c59f6752341b12fa0886fa7afc78b/watchdog-6.0.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ef810fbf7b781a5a593894e4f439773830bdecb885e6880d957d5b9382a960d2", size = 88392, upload-time = "2024-11-01T14:06:32.99Z" }, - { url = "https://files.pythonhosted.org/packages/d1/40/b75381494851556de56281e053700e46bff5b37bf4c7267e858640af5a7f/watchdog-6.0.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:afd0fe1b2270917c5e23c2a65ce50c2a4abb63daafb0d419fde368e272a76b7c", size = 89019, upload-time = "2024-11-01T14:06:34.963Z" }, - { url = "https://files.pythonhosted.org/packages/39/ea/3930d07dafc9e286ed356a679aa02d777c06e9bfd1164fa7c19c288a5483/watchdog-6.0.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:bdd4e6f14b8b18c334febb9c4425a878a2ac20efd1e0b231978e7b150f92a948", size = 96471, upload-time = "2024-11-01T14:06:37.745Z" }, - { url = "https://files.pythonhosted.org/packages/12/87/48361531f70b1f87928b045df868a9fd4e253d9ae087fa4cf3f7113be363/watchdog-6.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c7c15dda13c4eb00d6fb6fc508b3c0ed88b9d5d374056b239c4ad1611125c860", size = 88449, upload-time = "2024-11-01T14:06:39.748Z" }, - { url = "https://files.pythonhosted.org/packages/5b/7e/8f322f5e600812e6f9a31b75d242631068ca8f4ef0582dd3ae6e72daecc8/watchdog-6.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6f10cb2d5902447c7d0da897e2c6768bca89174d0c6e1e30abec5421af97a5b0", size = 89054, upload-time = "2024-11-01T14:06:41.009Z" }, - { url = "https://files.pythonhosted.org/packages/68/98/b0345cabdce2041a01293ba483333582891a3bd5769b08eceb0d406056ef/watchdog-6.0.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:490ab2ef84f11129844c23fb14ecf30ef3d8a6abafd3754a6f75ca1e6654136c", size = 96480, upload-time = "2024-11-01T14:06:42.952Z" }, - { url = "https://files.pythonhosted.org/packages/85/83/cdf13902c626b28eedef7ec4f10745c52aad8a8fe7eb04ed7b1f111ca20e/watchdog-6.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:76aae96b00ae814b181bb25b1b98076d5fc84e8a53cd8885a318b42b6d3a5134", size = 88451, upload-time = "2024-11-01T14:06:45.084Z" }, - { url = "https://files.pythonhosted.org/packages/fe/c4/225c87bae08c8b9ec99030cd48ae9c4eca050a59bf5c2255853e18c87b50/watchdog-6.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a175f755fc2279e0b7312c0035d52e27211a5bc39719dd529625b1930917345b", size = 89057, upload-time = "2024-11-01T14:06:47.324Z" }, - { url = "https://files.pythonhosted.org/packages/a9/c7/ca4bf3e518cb57a686b2feb4f55a1892fd9a3dd13f470fca14e00f80ea36/watchdog-6.0.0-py3-none-manylinux2014_aarch64.whl", hash = "sha256:7607498efa04a3542ae3e05e64da8202e58159aa1fa4acddf7678d34a35d4f13", size = 79079, upload-time = "2024-11-01T14:06:59.472Z" }, - { url = "https://files.pythonhosted.org/packages/5c/51/d46dc9332f9a647593c947b4b88e2381c8dfc0942d15b8edc0310fa4abb1/watchdog-6.0.0-py3-none-manylinux2014_armv7l.whl", hash = "sha256:9041567ee8953024c83343288ccc458fd0a2d811d6a0fd68c4c22609e3490379", size = 79078, upload-time = "2024-11-01T14:07:01.431Z" }, - { url = "https://files.pythonhosted.org/packages/d4/57/04edbf5e169cd318d5f07b4766fee38e825d64b6913ca157ca32d1a42267/watchdog-6.0.0-py3-none-manylinux2014_i686.whl", hash = "sha256:82dc3e3143c7e38ec49d61af98d6558288c415eac98486a5c581726e0737c00e", size = 79076, upload-time = "2024-11-01T14:07:02.568Z" }, - { url = "https://files.pythonhosted.org/packages/ab/cc/da8422b300e13cb187d2203f20b9253e91058aaf7db65b74142013478e66/watchdog-6.0.0-py3-none-manylinux2014_ppc64.whl", hash = "sha256:212ac9b8bf1161dc91bd09c048048a95ca3a4c4f5e5d4a7d1b1a7d5752a7f96f", size = 79077, upload-time = "2024-11-01T14:07:03.893Z" }, - { url = "https://files.pythonhosted.org/packages/2c/3b/b8964e04ae1a025c44ba8e4291f86e97fac443bca31de8bd98d3263d2fcf/watchdog-6.0.0-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:e3df4cbb9a450c6d49318f6d14f4bbc80d763fa587ba46ec86f99f9e6876bb26", size = 79078, upload-time = "2024-11-01T14:07:05.189Z" }, - { url = "https://files.pythonhosted.org/packages/62/ae/a696eb424bedff7407801c257d4b1afda455fe40821a2be430e173660e81/watchdog-6.0.0-py3-none-manylinux2014_s390x.whl", hash = "sha256:2cce7cfc2008eb51feb6aab51251fd79b85d9894e98ba847408f662b3395ca3c", size = 79077, upload-time = "2024-11-01T14:07:06.376Z" }, - { url = "https://files.pythonhosted.org/packages/b5/e8/dbf020b4d98251a9860752a094d09a65e1b436ad181faf929983f697048f/watchdog-6.0.0-py3-none-manylinux2014_x86_64.whl", hash = "sha256:20ffe5b202af80ab4266dcd3e91aae72bf2da48c0d33bdb15c66658e685e94e2", size = 79078, upload-time = "2024-11-01T14:07:07.547Z" }, - { url = "https://files.pythonhosted.org/packages/07/f6/d0e5b343768e8bcb4cda79f0f2f55051bf26177ecd5651f84c07567461cf/watchdog-6.0.0-py3-none-win32.whl", hash = "sha256:07df1fdd701c5d4c8e55ef6cf55b8f0120fe1aef7ef39a1c6fc6bc2e606d517a", size = 79065, upload-time = "2024-11-01T14:07:09.525Z" }, - { url = "https://files.pythonhosted.org/packages/db/d9/c495884c6e548fce18a8f40568ff120bc3a4b7b99813081c8ac0c936fa64/watchdog-6.0.0-py3-none-win_amd64.whl", hash = "sha256:cbafb470cf848d93b5d013e2ecb245d4aa1c8fd0504e863ccefa32445359d680", size = 79070, upload-time = "2024-11-01T14:07:10.686Z" }, - { url = "https://files.pythonhosted.org/packages/33/e8/e40370e6d74ddba47f002a32919d91310d6074130fe4e17dabcafc15cbf1/watchdog-6.0.0-py3-none-win_ia64.whl", hash = "sha256:a1914259fa9e1454315171103c6a30961236f508b9b623eae470268bbcc6a22f", size = 79067, upload-time = "2024-11-01T14:07:11.845Z" }, -] - [[package]] name = "websockets" version = "15.0.1" From e7c98f1bacf68929ad6170ddf4432d640e5298b4 Mon Sep 17 00:00:00 2001 From: Sara Date: Sun, 1 Jun 2025 16:08:07 -0400 Subject: [PATCH 09/17] adds download_artifact tool --- CLAUDE.md | 3 +- src/devrev_mcp/resources/artifact.py | 28 ++++ src/devrev_mcp/server.py | 64 +++++++-- src/devrev_mcp/tools/download_artifact.py | 155 ++++++++++++++++++++++ 4 files changed, 239 insertions(+), 11 deletions(-) create mode 100644 src/devrev_mcp/tools/download_artifact.py diff --git a/CLAUDE.md b/CLAUDE.md index fb3d387..d287fa6 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1 +1,2 @@ -- Reference https://gofastmcp.com/llms.txt \ No newline at end of file +- Reference https://gofastmcp.com/llms.txt +- Reference https://developer.devrev.ai/llms.txt \ No newline at end of file diff --git a/src/devrev_mcp/resources/artifact.py b/src/devrev_mcp/resources/artifact.py index 1bc69c2..7257c85 100644 --- a/src/devrev_mcp/resources/artifact.py +++ b/src/devrev_mcp/resources/artifact.py @@ -44,6 +44,34 @@ async def artifact(artifact_id: str, ctx: Context, devrev_cache: dict) -> str: result = response.json() + # Try to get download URL if available through artifacts.locate + artifact_info = result.get("artifact", {}) + if artifact_info and not any(key in artifact_info.get("file", {}) for key in ["download_url", "url"]): + try: + await ctx.info(f"Attempting to get download URL for artifact {artifact_id}") + locate_response = make_devrev_request( + "artifacts.locate", + {"id": artifact_id} + ) + + if locate_response.status_code == 200: + locate_data = locate_response.json() + locate_artifact = locate_data.get("artifact", {}) + if locate_artifact: + # Merge locate data into the main artifact data + if "download_url" in locate_artifact: + artifact_info["download_url"] = locate_artifact["download_url"] + if "file" in locate_artifact and "download_url" in locate_artifact["file"]: + if "file" not in artifact_info: + artifact_info["file"] = {} + artifact_info["file"]["download_url"] = locate_artifact["file"]["download_url"] + await ctx.info(f"Successfully added download URL for artifact {artifact_id}") + else: + await ctx.info(f"artifacts.locate not available for {artifact_id}: HTTP {locate_response.status_code}") + except Exception as locate_error: + await ctx.info(f"Could not locate download URL for artifact {artifact_id}: {str(locate_error)}") + # Continue without download URL + # Add navigation links to timeline entry (artifacts belong to timeline entries) # Note: We'd need to determine the timeline entry ID from the artifact context # For now, adding a placeholder structure that could be populated based on API response diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index b916421..c76551e 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -20,6 +20,7 @@ from .tools.search import search as search_tool from .tools.create_object import create_object as create_object_tool from .tools.update_object import update_object as update_object_tool +from .tools.download_artifact import download_artifact as download_artifact_tool # Create the FastMCP server @@ -196,17 +197,42 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: ticket_data = ticket_response.json() work = ticket_data.get("work", {}) - # Get timeline entries - timeline_response = make_devrev_request( - "timeline-entries.list", - {"object": normalized_id} - ) + # Get timeline entries with pagination + all_entries = [] + cursor = None + page_count = 0 + max_pages = 50 # Safety limit to prevent infinite loops - if timeline_response.status_code != 200: - raise ValueError(f"Failed to fetch timeline for {normalized_id}") - - timeline_data = timeline_response.json() - all_entries = timeline_data.get("timeline_entries", []) + while page_count < max_pages: + request_payload = { + "object": normalized_id, + "limit": 50 # Use DevRev's default limit + } + if cursor: + request_payload["cursor"] = cursor + request_payload["mode"] = "after" # Get entries after this cursor + + timeline_response = make_devrev_request( + "timeline-entries.list", + request_payload + ) + + if timeline_response.status_code != 200: + raise ValueError(f"Failed to fetch timeline for {normalized_id}") + + timeline_data = timeline_response.json() + page_entries = timeline_data.get("timeline_entries", []) + all_entries.extend(page_entries) + + # Check for next page using DevRev's cursor system + cursor = timeline_data.get("next_cursor") + page_count += 1 + + await ctx.info(f"DEBUG: Fetched page {page_count} with {len(page_entries)} entries, total so far: {len(all_entries)}") + + # Break if no more pages or no entries in this page + if not cursor or len(page_entries) == 0: + break await ctx.info(f"DEBUG: Found {len(all_entries)} timeline entries for {normalized_id}") @@ -598,6 +624,24 @@ async def get_ticket(id: str, ctx: Context) -> str: """ return await get_ticket_tool(id, ctx) +@mcp.tool( + name="download_artifact", + description="Download a DevRev artifact to a specified directory. Retrieves the artifact file and saves it locally with proper metadata.", + tags=["download", "artifact", "devrev", "files", "local-storage"] +) +async def download_artifact(artifact_id: str, download_directory: str, ctx: Context) -> str: + """ + Download a DevRev artifact to a specified directory. + + Args: + artifact_id: The DevRev artifact ID to download + download_directory: The local directory path where the artifact should be saved + + Returns: + JSON string containing download result and file information + """ + return await download_artifact_tool(artifact_id, download_directory, ctx) + def main(): """Main entry point for the DevRev MCP server.""" # Run the server diff --git a/src/devrev_mcp/tools/download_artifact.py b/src/devrev_mcp/tools/download_artifact.py new file mode 100644 index 0000000..d425bce --- /dev/null +++ b/src/devrev_mcp/tools/download_artifact.py @@ -0,0 +1,155 @@ +""" +DevRev Download Artifact Tool + +Provides functionality to download DevRev artifacts to a specified directory. +""" + +import json +import os +import requests +from pathlib import Path +from fastmcp import Context +from ..utils import make_devrev_request + + +async def download_artifact(artifact_id: str, download_directory: str, ctx: Context) -> str: + """ + Download a DevRev artifact to a specified directory. + + Args: + artifact_id: The DevRev artifact ID + download_directory: The directory path where the artifact should be downloaded + ctx: FastMCP context + + Returns: + JSON string containing download result information + """ + try: + await ctx.info(f"Starting download of artifact {artifact_id} to {download_directory}") + + # Ensure download directory exists + os.makedirs(download_directory, exist_ok=True) + + # First, get artifact information using artifacts.get + artifact_response = make_devrev_request( + "artifacts.get", + {"id": artifact_id} + ) + + if artifact_response.status_code != 200: + error_text = artifact_response.text + await ctx.error(f"Failed to fetch artifact {artifact_id}: HTTP {artifact_response.status_code} - {error_text}") + raise ValueError(f"Failed to fetch artifact {artifact_id} (HTTP {artifact_response.status_code}): {error_text}") + + artifact_data = artifact_response.json() + artifact_info = artifact_data.get("artifact", {}) + + await ctx.info(f"Retrieved artifact metadata: {artifact_info.get('display_id', artifact_id)}") + + # Check if download URL is available in the artifact data + download_url = None + file_info = artifact_info.get("file", {}) + + # Look for download URL in various possible locations + if "download_url" in file_info: + download_url = file_info["download_url"] + elif "url" in file_info: + download_url = file_info["url"] + elif "download_url" in artifact_info: + download_url = artifact_info["download_url"] + elif "url" in artifact_info: + download_url = artifact_info["url"] + + # If no direct download URL, try to locate the artifact with additional endpoint + if not download_url: + await ctx.info("No direct download URL found, attempting to locate artifact...") + # Try a different approach - some APIs have a separate locate endpoint + locate_response = make_devrev_request( + "artifacts.locate", + {"id": artifact_id} + ) + + if locate_response.status_code == 200: + locate_data = locate_response.json() + # Check for URL directly in the response (primary location) + download_url = locate_data.get("url") + # Also check under artifact key as fallback + if not download_url: + artifact_locate_info = locate_data.get("artifact", {}) + download_url = artifact_locate_info.get("download_url") or artifact_locate_info.get("url") + else: + await ctx.info(f"artifacts.locate not available or failed: HTTP {locate_response.status_code}") + + if not download_url: + error_msg = "No download URL found for artifact. The artifact may not be downloadable or the API doesn't provide direct download URLs." + await ctx.error(error_msg) + return json.dumps({ + "success": False, + "error": error_msg, + "artifact_id": artifact_id, + "artifact_info": artifact_info + }, indent=2) + + # Extract filename from artifact info or URL, prioritizing the actual filename + filename = file_info.get("name") or file_info.get("filename") + if not filename: + # Extract from URL as fallback + from urllib.parse import urlparse + parsed_url = urlparse(download_url) + filename = os.path.basename(parsed_url.path) + if not filename: + # Use display_id as fallback + filename = artifact_info.get("display_id") + if not filename: + filename = f"artifact_{artifact_id}" + + # Download the file + download_path = Path(download_directory) / filename + + await ctx.info(f"Downloading artifact from {download_url} to {download_path}") + + # Download with streaming to handle large files + with requests.get(download_url, stream=True, timeout=60) as response: + response.raise_for_status() + + with open(download_path, 'wb') as f: + for chunk in response.iter_content(chunk_size=8192): + f.write(chunk) + + file_size = os.path.getsize(download_path) + + result = { + "success": True, + "artifact_id": artifact_id, + "filename": filename, + "download_path": str(download_path), + "file_size": file_size, + "download_directory": download_directory, + "artifact_info": { + "display_id": artifact_info.get("display_id"), + "type": file_info.get("type"), + "mime_type": file_info.get("mime_type"), + "created_date": artifact_info.get("created_date") + } + } + + await ctx.info(f"Successfully downloaded artifact {artifact_id} ({file_size} bytes) to {download_path}") + return json.dumps(result, indent=2) + + except requests.RequestException as e: + error_msg = f"Failed to download artifact {artifact_id}: {str(e)}" + await ctx.error(error_msg) + return json.dumps({ + "success": False, + "error": error_msg, + "artifact_id": artifact_id + }, indent=2) + + except Exception as e: + error_msg = f"Failed to download artifact {artifact_id}: {str(e)}" + await ctx.error(error_msg) + return json.dumps({ + "success": False, + "error": error_msg, + "artifact_id": artifact_id + }, indent=2) \ No newline at end of file From e878e2cabec3bc464907a8ea17e684d443e42d36 Mon Sep 17 00:00:00 2001 From: Sara Date: Mon, 2 Jun 2025 12:54:21 -0400 Subject: [PATCH 10/17] adds visibility understanding --- README.md | 131 +++++++++- src/devrev_mcp/resources/README.md | 251 +++++++++++++++++++ src/devrev_mcp/server.py | 87 +++---- src/devrev_mcp/tools/get_timeline_entries.py | 96 ++++++- src/devrev_mcp/types.py | 157 ++++++++++++ 5 files changed, 662 insertions(+), 60 deletions(-) create mode 100644 src/devrev_mcp/resources/README.md create mode 100644 src/devrev_mcp/types.py diff --git a/README.md b/README.md index 2e1d400..27a7b1d 100644 --- a/README.md +++ b/README.md @@ -6,10 +6,114 @@ A Model Context Protocol server for DevRev. It is used to search and retrieve in ## Tools -- `search`: Search for information using the DevRev search API with the provided query and namespace. -- `get_object`: Get all information about a DevRev issue or ticket using its ID. -- `create_object`: Create a new issue or ticket in DevRev with specified properties. -- `update_object`: Update an existing issue or ticket in DevRev by modifying its title or body content. +### Core Tools + +#### `search` +Search for information using the DevRev search API with hybrid search capabilities. + +**Description**: Search DevRev objects using hybrid search. Supports natural language queries across tickets, issues, articles, parts, and users. Returns enriched results with metadata, ownership, status, and organizational context for efficient triage and analysis. + +**Parameters**: +- `query` (string, required): The search query string +- `namespace` (string, required): The namespace to search in. Must be one of: + - `article` - Knowledge base articles and documentation + - `issue` - Internal development issues and bugs + - `ticket` - Customer support tickets + - `part` - Product parts and components + - `dev_user` - DevRev users and team members + +**Example Usage**: +``` +Search for "login error" in tickets namespace to find customer support tickets +Search for "authentication bug" in issues namespace to find development issues +``` + +#### `get_object` +Retrieve comprehensive information about any DevRev object. + +**Description**: Retrieve comprehensive information about any DevRev object including tickets, issues, parts, and users. Returns complete metadata, relationships, assignment details, and history for thorough analysis and investigation. + +**Parameters**: +- `id` (string, required): The DevRev object ID (e.g., "TKT-12345", "ISS-67890") + +**Example Usage**: +``` +Get details for ticket TKT-12345 +Get information about issue ISS-67890 +``` + +#### `create_object` +Create new DevRev tickets or issues with full metadata support. + +**Description**: Create new DevRev tickets or issues with full metadata support. Supports both customer-facing tickets and internal issues with proper assignment, categorization, and detailed descriptions for workflow automation. + +**Parameters**: +- `type` (string, required): The type of object to create ("issue" or "ticket") +- `title` (string, required): The title/summary of the object +- `applies_to_part` (string, required): The part ID this object applies to +- `body` (string, optional): The body/description of the object +- `owned_by` (array of strings, optional): List of user IDs who should own this object + +**Example Usage**: +``` +Create a customer ticket for a login issue +Create an internal issue for a bug fix +``` + +#### `update_object` +Update existing DevRev tickets or issues with new information. + +**Description**: Update existing DevRev tickets or issues with new information, descriptions, or titles. Maintains object history and audit trails while allowing incremental updates as investigations progress. + +**Parameters**: +- `id` (string, required): The ID of the object to update +- `type` (string, required): The type of object ("issue" or "ticket") +- `title` (string, optional): New title for the object +- `body` (string, optional): New body/description for the object + +### Advanced Tools + +#### `get_timeline_entries` +Retrieve chronological timeline of all activity on a DevRev ticket. + +**Description**: Retrieve chronological timeline of all activity on a DevRev ticket including comments, status changes, assignments, and system events. Essential for understanding ticket progression, customer interactions, and audit trails. Accepts flexible ID formats (TKT-12345, 12345, or full don: format) and provides multiple output formats for different use cases. + +**Parameters**: +- `id` (string, required): The ticket ID in various formats (TKT-12345, 12345, etc.) +- `format` (string, optional): Output format ("summary" by default) + +#### `get_ticket` +Get a DevRev ticket with all associated timeline entries and artifacts. + +**Description**: Get a DevRev ticket with all associated timeline entries and artifacts. Provides enriched ticket data with complete conversation history and attached files for comprehensive support analysis. + +**Parameters**: +- `id` (string, required): The ticket ID + +#### `download_artifact` +Download a DevRev artifact to a specified directory. + +**Description**: Download a DevRev artifact to a specified directory. Retrieves the artifact file and saves it locally with proper metadata. + +**Parameters**: +- `artifact_id` (string, required): The artifact ID to download +- `download_directory` (string, required): Local directory path to save the artifact + +## Resources + +The MCP server provides several resource endpoints for accessing DevRev data through URI-based routing: + +### Ticket Resources + +- `devrev://tickets/{ticket_id}` - Access comprehensive ticket information with navigation links +- `devrev://tickets/{ticket_id}/timeline` - Access enriched timeline for a ticket with customer context +- `devrev://tickets/{ticket_id}/timeline/{entry_id}` - Access individual timeline entry details +- `devrev://tickets/{ticket_id}/artifacts` - Access all artifacts associated with a ticket + +### Artifact Resources + +- `devrev://artifacts/{artifact_id}` - Access artifact metadata with download URLs +- `devrev://artifacts/{artifact_id}/tickets` - Get all tickets that reference an artifact ## Configuration @@ -65,3 +169,22 @@ On Windows: `%APPDATA%/Claude/claude_desktop_config.json` ``` + +## Features + +- **Hybrid Search**: Advanced search capabilities across all DevRev object types +- **Rich Metadata**: Complete object information including relationships and history +- **Timeline Analysis**: Detailed conversation flows and activity tracking +- **Artifact Management**: File handling with download capabilities +- **Resource Navigation**: URI-based resource access with automatic routing +- **Caching**: Intelligent caching for improved performance +- **Error Handling**: Comprehensive error handling with detailed logging + +## Development + +This MCP server is built using FastMCP and provides modular tools and resources for DevRev integration. The codebase is organized into: + +- `tools/` - Individual tool implementations +- `resources/` - Resource handlers for different object types +- `types.py` - Type definitions and data structures +- `utils.py` - Shared utilities and API helpers diff --git a/src/devrev_mcp/resources/README.md b/src/devrev_mcp/resources/README.md new file mode 100644 index 0000000..bc87a3b --- /dev/null +++ b/src/devrev_mcp/resources/README.md @@ -0,0 +1,251 @@ +# DevRev MCP Resources + +This directory contains resource handlers that provide URI-based access to DevRev data through the Model Context Protocol (MCP). Resources allow you to access DevRev objects using structured URIs and receive enriched data with navigation links and related information. + +## Overview + +Resources in this MCP server provide specialized access to different types of DevRev objects. Unlike tools which perform actions, resources provide read-only access to data through URI patterns. Each resource handler enriches the data with additional context, navigation links, and related objects. + +## Available Resources + +### Ticket Resources + +#### `ticket.py` - Individual Ticket Access +**URI Pattern**: `devrev://tickets/{ticket_id}` + +**Description**: Access comprehensive DevRev ticket information with navigation links to related resources. Includes customer details, status progression, assignment history, and navigation to timeline and artifacts. + +**Features**: +- Complete ticket metadata (title, status, severity, stage) +- Customer and organization information +- Owner and assignment details +- Timeline entries with conversation history +- Associated artifacts (files, screenshots, attachments) +- Navigation links to related resources +- Intelligent caching for performance + +**Example URIs**: +- `devrev://tickets/12345` - Access ticket TKT-12345 +- `devrev://tickets/TKT-67890` - Access ticket using full ID format + +**Returned Data Structure**: +```json +{ + "id": "TKT-12345", + "title": "Customer login issue", + "severity": "high", + "stage": { "name": "in_progress" }, + "owned_by": [{"name": "John Doe", "email": "john@example.com"}], + "rev_org": {"display_name": "Acme Corp"}, + "timeline_entries": [...], + "artifacts": [...], + "links": { + "timeline": "devrev://tickets/12345/timeline", + "artifacts": "devrev://tickets/12345/artifacts" + } +} +``` + +#### Ticket Timeline Access +**URI Pattern**: `devrev://tickets/{ticket_id}/timeline` + +**Description**: Access enriched timeline for a ticket with customer context, conversation flow, and artifacts. Returns token-efficient structured format focusing on support workflow. + +**Features**: +- Chronological conversation flow +- Customer and agent interactions +- Status change history +- Artifact references within conversations +- Support workflow context +- Customer journey tracking + +#### Individual Timeline Entry +**URI Pattern**: `devrev://tickets/{ticket_id}/timeline/{entry_id}` + +**Description**: Access individual timeline entry with detailed conversation data and navigation links. + +**Features**: +- Detailed entry metadata +- Author information +- Entry type and content +- Associated artifacts +- Navigation to related entries + +#### Ticket Artifacts Collection +**URI Pattern**: `devrev://tickets/{ticket_id}/artifacts` + +**Description**: Access all artifacts associated with a specific ticket. Returns list of files, screenshots, and documents attached to the ticket. + +**Features**: +- Complete artifact listing +- File metadata (size, type, name) +- Upload timestamps and authors +- Download URLs (temporary) +- Reverse links to timeline entries + +### Artifact Resources + +#### `artifact.py` - Individual Artifact Access +**URI Pattern**: `devrev://artifacts/{artifact_id}` + +**Description**: Access DevRev artifact metadata with temporary download URLs and reverse links to associated tickets. + +**Features**: +- Complete artifact metadata +- File information (name, size, MIME type) +- Temporary download URLs +- Upload information (author, timestamp) +- Reverse links to associated tickets +- Content analysis metadata + +**Example Data Structure**: +```json +{ + "id": "artifact_123", + "filename": "screenshot.png", + "size": 1024000, + "mime_type": "image/png", + "uploaded_by": {"name": "Customer Support"}, + "created_date": "2024-01-15T10:30:00Z", + "download_url": "https://...", + "associated_tickets": ["TKT-12345"], + "links": { + "tickets": "devrev://artifacts/artifact_123/tickets" + } +} +``` + +#### Artifact Reverse Links +**URI Pattern**: `devrev://artifacts/{artifact_id}/tickets` + +**Description**: Access all tickets that reference this artifact. Provides reverse lookup from artifacts to tickets. + +**Features**: +- Complete ticket list referencing the artifact +- Ticket metadata and status +- Timeline entry references +- Support workflow context + +### Timeline Entry Resources + +#### `timeline_entry.py` - Individual Entry Access +**URI Pattern**: Various patterns through timeline access + +**Description**: Provides specialized handling for individual timeline entries with rich conversation context. + +**Features**: +- Entry type classification (comment, status_change, system_event) +- Author and participant information +- Content formatting and rendering +- Associated artifacts and attachments +- Conversation threading +- Support workflow context + +## Resource Implementation Details + +### Caching Strategy + +All resources implement intelligent caching to improve performance: +- **Cache Keys**: Structured as `{resource_type}:{object_id}` +- **Cache Duration**: Configurable per resource type +- **Cache Invalidation**: Automatic on object updates +- **Memory Management**: LRU eviction for memory efficiency + +### Error Handling + +Resources provide comprehensive error handling: +- **Not Found**: Returns structured error with suggestions +- **Permission Denied**: Clear error messages with context +- **API Failures**: Graceful degradation with partial data +- **Network Issues**: Retry logic with exponential backoff + +### Navigation Links + +Resources include navigation links to related objects: +- **Hierarchical Navigation**: Parent-child relationships +- **Cross-References**: Related tickets, artifacts, users +- **Timeline Navigation**: Previous/next entries +- **Search Context**: Back to search results + +### Data Enrichment + +Each resource enriches basic DevRev data: +- **Relationship Resolution**: Loads related objects +- **Computed Fields**: Derived values and summaries +- **Context Addition**: Workflow and business context +- **Format Optimization**: Token-efficient representations + +## Usage Examples + +### Accessing a Ticket with Full Context +``` +Resource: devrev://tickets/12345 +Returns: Complete ticket with timeline, artifacts, and navigation +``` + +### Following Timeline Conversations +``` +1. Start: devrev://tickets/12345/timeline +2. Navigate: devrev://tickets/12345/timeline/entry_456 +3. Related: devrev://artifacts/artifact_789 +``` + +### Reverse Artifact Lookup +``` +1. Artifact: devrev://artifacts/screenshot_123 +2. Related Tickets: devrev://artifacts/screenshot_123/tickets +3. Specific Ticket: devrev://tickets/12345 +``` + +## Development Guidelines + +### Adding New Resources + +1. Create resource handler in appropriate module +2. Implement URI pattern matching +3. Add data enrichment logic +4. Include navigation links +5. Implement caching strategy +6. Add comprehensive error handling +7. Update this documentation + +### Resource Handler Pattern + +```python +async def resource_handler(object_id: str, ctx: Context, cache: dict) -> str: + """ + Standard resource handler pattern. + + Args: + object_id: The object identifier + ctx: FastMCP context for logging + cache: Shared cache dictionary + + Returns: + JSON string with enriched object data + """ + # 1. Normalize and validate object_id + # 2. Check cache for existing data + # 3. Fetch from DevRev API + # 4. Enrich with related data + # 5. Add navigation links + # 6. Cache and return result +``` + +### Testing Resources + +Resources should be tested for: +- **URI Pattern Matching**: Correct routing +- **Data Enrichment**: Complete related data +- **Navigation Links**: Valid URIs +- **Error Scenarios**: Graceful failures +- **Performance**: Caching effectiveness +- **Memory Usage**: Resource cleanup + +## Performance Considerations + +- **Lazy Loading**: Only fetch required related data +- **Batch Operations**: Group API calls when possible +- **Cache Warming**: Pre-load frequently accessed objects +- **Memory Limits**: Implement cache size limits +- **Network Optimization**: Minimize API round trips \ No newline at end of file diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index c76551e..04b8cc0 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -22,6 +22,8 @@ from .tools.update_object import update_object as update_object_tool from .tools.download_artifact import download_artifact as download_artifact_tool +# Import new types for visibility handling +from .types import VisibilityInfo, TimelineEntryType, format_visibility_summary # Create the FastMCP server mcp = FastMCP( @@ -164,8 +166,15 @@ async def ticket(ticket_id: str, ctx: Context) -> str: @mcp.resource( uri="devrev://tickets/{ticket_id}/timeline", - description="Access enriched timeline for a ticket with customer context, conversation flow, and artifacts. Returns token-efficient structured format focusing on support workflow.", - tags=["timeline", "enriched", "devrev", "conversation", "artifacts"] + description="""Access enriched timeline for a ticket with customer context, conversation flow, artifacts, and detailed visibility information. + + Returns token-efficient structured format focusing on support workflow with comprehensive visibility data: + - Each entry includes visibility_info showing who can see it (private/internal/external/public) + - Summary includes visibility breakdown and customer-visible percentage + - Visual indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ) help identify visibility levels at a glance + - Visibility levels: private (creator only), internal (dev org), external (dev org + customers), public (everyone) + """, + tags=["timeline", "enriched", "devrev", "conversation", "artifacts", "visibility"] ) async def ticket_timeline(ticket_id: str, ctx: Context) -> str: """ @@ -264,12 +273,16 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: # Process timeline entries into conversation and events conversation_seq = 1 - artifacts_found = {} # artifact_id -> artifact_info dict + artifacts_found = {} for entry in all_entries: entry_type = entry.get("type", "") timestamp = entry.get("created_date", "") + # Extract visibility information + visibility_raw = entry.get("visibility") + visibility_info = VisibilityInfo.from_visibility(visibility_raw) + # Handle conversation entries (comments) if entry_type == "timeline_comment": body = entry.get("body", "") @@ -291,7 +304,8 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: "type": speaker_type }, "message": body, - "artifacts": [] + "artifacts": [], + "visibility_info": visibility_info.to_dict() } # Add artifacts if present @@ -327,7 +341,8 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: event_info = { "type": entry_type.replace("work_", "").replace("_", " "), "event_type": entry_type, - "timestamp": timestamp + "timestamp": timestamp, + "visibility_info": visibility_info.to_dict() } # Add context for stage updates @@ -372,7 +387,8 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: "type": speaker_type }, "message": body, - "artifacts": [] + "artifacts": [], + "visibility_info": visibility_info.to_dict() } # Add timeline entry navigation link @@ -393,7 +409,8 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: event_info = { "type": entry_type.replace("_", " "), "event_type": entry_type, - "timestamp": timestamp + "timestamp": timestamp, + "visibility_info": visibility_info.to_dict() } # Add author information if available @@ -409,6 +426,10 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: result["all_artifacts"] = list(artifacts_found.values()) result["summary"]["total_artifacts"] = len(artifacts_found) + # Add visibility summary to the result + all_entries_with_visibility = result["conversation_thread"] + result["key_events"] + result["visibility_summary"] = format_visibility_summary(all_entries_with_visibility) + # Add navigation links result["links"] = { "ticket": f"devrev://tickets/{ticket_id}" @@ -551,51 +572,19 @@ async def artifact_tickets(artifact_id: str, ctx: Context) -> str: return json.dumps(result, indent=2) -# Add dynamic resource access for DevRev objects -@mcp.resource( - uri="devrev://{id}", - description="Universal DevRev object accessor supporting any object type including tickets, issues, comments, parts, and users. Automatically routes to specialized handlers based on object type for optimal data enrichment and presentation.", - tags=["devrev", "universal", "router", "objects", "tickets", "issues", "comments"] -) -async def get_devrev_resource(id: str, ctx: Context) -> str: - """ - Access any DevRev object (tickets, comments, issues, etc.) by its full DevRev ID. - Routes to specialized handlers based on object type. - - Args: - id: The DevRev object ID - - Returns: - JSON string containing the object data - """ - try: - await ctx.info(f"Routing resource request for {id} to specialized handler") - - # Route to specialized handlers based on ID pattern - if ":ticket/" in id: - if ":comment/" in id: - # This is a timeline entry (comment) - return await timeline_entry(id, ctx) - else: - # This is a ticket - return await ticket(id, ctx) - elif ":artifact/" in id: - # This is an artifact - artifact_id = id.split(":artifact/")[1] - return await artifact(artifact_id, ctx) - else: - # Fall back to generic object handler for other types - await ctx.info(f"Using generic object handler for {id}") - return await get_object(id, ctx) - - except Exception as e: - await ctx.error(f"Failed to get resource {id}: {str(e)}") - raise ValueError(f"Resource {id} not found: {str(e)}") @mcp.tool( name="get_timeline_entries", - description="Retrieve chronological timeline of all activity on a DevRev ticket including comments, status changes, assignments, and system events. Essential for understanding ticket progression, customer interactions, and audit trails. Accepts flexible ID formats (TKT-12345, 12345, or full don: format) and provides multiple output formats for different use cases.", - tags=["timeline", "devrev", "tickets", "history", "conversations", "audit"] + description="""Retrieve chronological timeline of all activity on a DevRev ticket including comments, status changes, assignments, and system events with detailed visibility information. + + Essential for understanding ticket progression, customer interactions, and audit trails. Each entry includes: + - Visibility level (private/internal/external/public) showing who can access it + - Visual indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ) for quick visibility identification + - Percentage breakdown of customer-visible vs internal-only content + - Audience information (creator only, dev org, dev org + customers, everyone) + + Accepts flexible ID formats (TKT-12345, 12345, or full don: format) and provides multiple output formats for different use cases.""", + tags=["timeline", "devrev", "tickets", "history", "conversations", "audit", "visibility"] ) async def get_timeline_entries(id: str, format: str = "summary", ctx: Context = None) -> str: """ diff --git a/src/devrev_mcp/tools/get_timeline_entries.py b/src/devrev_mcp/tools/get_timeline_entries.py index 9c9f26a..a0908d6 100644 --- a/src/devrev_mcp/tools/get_timeline_entries.py +++ b/src/devrev_mcp/tools/get_timeline_entries.py @@ -6,6 +6,7 @@ import json from fastmcp import Context +from ..types import VisibilityInfo, TimelineEntryType async def get_timeline_entries( @@ -150,7 +151,7 @@ def _format_summary(timeline_data, ticket_id: str) -> str: f"**Created:** {summary.get('created_date', 'Unknown')}", ] - # Add message counts + # Add message counts with visibility breakdown customer_messages = [msg for msg in conversation if msg.get("speaker", {}).get("type") == "customer"] support_messages = [msg for msg in conversation if msg.get("speaker", {}).get("type") == "support"] @@ -159,13 +160,31 @@ def _format_summary(timeline_data, ticket_id: str) -> str: f"**Activity:** {len(customer_messages)} customer messages, {len(support_messages)} support responses", ]) + # Add visibility summary if available + if isinstance(timeline_data, dict) and "visibility_summary" in timeline_data: + vis_summary = timeline_data["visibility_summary"] + lines.extend([ + "", + "**Visibility Summary:**", + f"- Customer-visible entries: {vis_summary.get('customer_visible_entries', 0)} ({vis_summary.get('customer_visible_percentage', 0)}%)", + f"- Internal-only entries: {vis_summary.get('internal_only_entries', 0)} ({vis_summary.get('internal_only_percentage', 0)}%)", + ]) + + # Show breakdown by visibility level + breakdown = vis_summary.get("visibility_breakdown", {}) + if breakdown: + lines.append("- Visibility levels:") + for level, count in breakdown.items(): + description = VisibilityInfo.from_visibility(level).description + lines.append(f" โ€ข {level}: {count} entries ({description})") + # Add last activity timestamps if summary.get("last_customer_message"): lines.append(f"**Last customer message:** {summary['last_customer_message']}") if summary.get("last_support_response"): lines.append(f"**Last support response:** {summary['last_support_response']}") - # Add latest messages preview + # Add latest messages preview with visibility indicators if conversation: lines.extend([ "", @@ -178,7 +197,22 @@ def _format_summary(timeline_data, ticket_id: str) -> str: speaker = msg.get("speaker", {}) timestamp = msg.get("timestamp", "")[:10] # Just date part message_preview = msg.get("message", "")[:100] + ("..." if len(msg.get("message", "")) > 100 else "") - lines.append(f"- **{speaker.get('name', 'Unknown')}** ({timestamp}): {message_preview}") + + # Add visibility indicator + visibility_info = msg.get("visibility_info", {}) + visibility_indicator = "" + if visibility_info: + level = visibility_info.get("level", "external") + if level == "private": + visibility_indicator = "๐Ÿ”’ " + elif level == "internal": + visibility_indicator = "๐Ÿข " + elif level == "external": + visibility_indicator = "๐Ÿ‘ฅ " + elif level == "public": + visibility_indicator = "๐ŸŒ " + + lines.append(f"- **{speaker.get('name', 'Unknown')}** ({timestamp}): {visibility_indicator}{message_preview}") # Add artifacts info if isinstance(timeline_data, dict): @@ -216,19 +250,37 @@ def _format_detailed(timeline_data, ticket_id: str) -> str: "**Conversation Thread:**" ] - # Add each conversation entry + # Add each conversation entry with visibility information for msg in conversation: speaker = msg.get("speaker", {}) timestamp = msg.get("timestamp", "") message = msg.get("message", "") artifacts = msg.get("artifacts", []) + visibility_info = msg.get("visibility_info", {}) # Format timestamp to be more readable display_time = timestamp[:19].replace("T", " ") if timestamp else "Unknown time" + # Format visibility info + visibility_text = "" + if visibility_info: + level = visibility_info.get("level", "external") + description = visibility_info.get("description", "") + audience = visibility_info.get("audience", "") + + # Add visibility indicator + if level == "private": + visibility_text = " ๐Ÿ”’ [PRIVATE - Creator only]" + elif level == "internal": + visibility_text = " ๐Ÿข [INTERNAL - Dev org only]" + elif level == "external": + visibility_text = " ๐Ÿ‘ฅ [EXTERNAL - Dev org + customers]" + elif level == "public": + visibility_text = " ๐ŸŒ [PUBLIC - Everyone]" + lines.extend([ "", - f"**{msg.get('seq', '?')}. {speaker.get('name', 'Unknown')} ({speaker.get('type', 'unknown')}) - {display_time}**" + f"**{msg.get('seq', '?')}. {speaker.get('name', 'Unknown')} ({speaker.get('type', 'unknown')}) - {display_time}**{visibility_text}" ]) # Add message content with proper formatting @@ -244,8 +296,12 @@ def _format_detailed(timeline_data, ticket_id: str) -> str: # Add artifacts info if artifacts: lines.append(f" *Attachments: {len(artifacts)} file(s)*") + + # Add visibility details if relevant + if visibility_info and visibility_info.get("level") in ["private", "internal"]: + lines.append(f" *Visibility: {visibility_info.get('description', 'Unknown')}*") - # Add key events summary + # Add key events summary with visibility if isinstance(timeline_data, dict): key_events = timeline_data.get("key_events", []) if key_events: @@ -257,6 +313,32 @@ def _format_detailed(timeline_data, ticket_id: str) -> str: event_time = event.get("timestamp", "")[:19].replace("T", " ") event_type = event.get("type", "unknown") actor = event.get("actor", {}).get("name", "System") - lines.append(f"- {event_time}: {event_type} by {actor}") + + # Add visibility indicator for events + visibility_info = event.get("visibility_info", {}) + visibility_indicator = "" + if visibility_info: + level = visibility_info.get("level", "external") + if level == "private": + visibility_indicator = " ๐Ÿ”’" + elif level == "internal": + visibility_indicator = " ๐Ÿข" + elif level == "external": + visibility_indicator = " ๐Ÿ‘ฅ" + elif level == "public": + visibility_indicator = " ๐ŸŒ" + + lines.append(f"- {event_time}: {event_type} by {actor}{visibility_indicator}") + + # Add overall visibility summary + if "visibility_summary" in timeline_data: + vis_summary = timeline_data["visibility_summary"] + lines.extend([ + "", + "**Visibility Overview:**", + f"- Total entries: {vis_summary.get('total_entries', 0)}", + f"- Customer-visible: {vis_summary.get('customer_visible_entries', 0)} ({vis_summary.get('customer_visible_percentage', 0)}%)", + f"- Internal-only: {vis_summary.get('internal_only_entries', 0)} ({vis_summary.get('internal_only_percentage', 0)}%)" + ]) return "\n".join(lines) \ No newline at end of file diff --git a/src/devrev_mcp/types.py b/src/devrev_mcp/types.py new file mode 100644 index 0000000..83a163c --- /dev/null +++ b/src/devrev_mcp/types.py @@ -0,0 +1,157 @@ +""" +DevRev MCP Server Type Definitions + +Contains enums, constants, and type definitions for DevRev objects +to improve model understanding and provide clear documentation. +""" + +from enum import Enum +from typing import Dict, Any, Optional +from dataclasses import dataclass + + +class TimelineEntryVisibility(Enum): + """ + Timeline entry visibility levels that control access to entries. + + These visibility levels determine who can see timeline entries: + - PRIVATE: Only visible to the creator of the entry + - INTERNAL: Visible within the Dev organization only + - EXTERNAL: Visible to the Dev organization and Rev users (customers) + - PUBLIC: Visible to all users (default is EXTERNAL if not specified) + """ + PRIVATE = "private" + INTERNAL = "internal" + EXTERNAL = "external" + PUBLIC = "public" + + @classmethod + def get_description(cls, visibility: str) -> str: + """Get human-readable description of visibility level.""" + descriptions = { + cls.PRIVATE.value: "Only visible to the creator", + cls.INTERNAL.value: "Visible within the Dev organization", + cls.EXTERNAL.value: "Visible to Dev organization and Rev users (customers)", + cls.PUBLIC.value: "Visible to all users" + } + return descriptions.get(visibility, f"Unknown visibility: {visibility}") + + @classmethod + def get_audience(cls, visibility: str) -> str: + """Get the audience who can see this visibility level.""" + audiences = { + cls.PRIVATE.value: "Creator only", + cls.INTERNAL.value: "Dev organization members", + cls.EXTERNAL.value: "Dev organization + customers", + cls.PUBLIC.value: "Everyone" + } + return audiences.get(visibility, "Unknown") + + @classmethod + def is_customer_visible(cls, visibility: str) -> bool: + """Check if customers can see entries with this visibility.""" + return visibility in [cls.EXTERNAL.value, cls.PUBLIC.value] + + @classmethod + def is_internal_only(cls, visibility: str) -> bool: + """Check if entry is restricted to internal users only.""" + return visibility in [cls.PRIVATE.value, cls.INTERNAL.value] + + +@dataclass +class VisibilityInfo: + """ + Container for visibility information with helpful context. + """ + level: str + description: str + audience: str + customer_visible: bool + internal_only: bool + + @classmethod + def from_visibility(cls, visibility: Optional[str]) -> 'VisibilityInfo': + """Create VisibilityInfo from a visibility string.""" + # Default to EXTERNAL if not specified + vis_level = visibility or TimelineEntryVisibility.EXTERNAL.value + + return cls( + level=vis_level, + description=TimelineEntryVisibility.get_description(vis_level), + audience=TimelineEntryVisibility.get_audience(vis_level), + customer_visible=TimelineEntryVisibility.is_customer_visible(vis_level), + internal_only=TimelineEntryVisibility.is_internal_only(vis_level) + ) + + def to_dict(self) -> Dict[str, Any]: + """Convert to dictionary for JSON serialization.""" + return { + "level": self.level, + "description": self.description, + "audience": self.audience, + "customer_visible": self.customer_visible, + "internal_only": self.internal_only + } + + +class TimelineEntryType(Enum): + """ + Common timeline entry types in DevRev. + """ + TIMELINE_COMMENT = "timeline_comment" + WORK_CREATED = "work_created" + STAGE_UPDATED = "stage_updated" + PART_SUGGESTED = "part_suggested" + WORK_UPDATED = "work_updated" + + @classmethod + def is_conversation_type(cls, entry_type: str) -> bool: + """Check if this entry type represents a conversation/comment.""" + return entry_type == cls.TIMELINE_COMMENT.value + + @classmethod + def is_system_event(cls, entry_type: str) -> bool: + """Check if this entry type is a system-generated event.""" + return entry_type in [ + cls.WORK_CREATED.value, + cls.STAGE_UPDATED.value, + cls.PART_SUGGESTED.value, + cls.WORK_UPDATED.value + ] + + +def format_visibility_summary(entries_with_visibility: list) -> Dict[str, Any]: + """ + Generate a summary of visibility levels across timeline entries. + + Args: + entries_with_visibility: List of timeline entries with visibility info + + Returns: + Dictionary with visibility statistics and breakdown + """ + visibility_counts = {} + customer_visible_count = 0 + internal_only_count = 0 + + for entry in entries_with_visibility: + visibility = entry.get("visibility_info", {}) + level = visibility.get("level", "external") + + visibility_counts[level] = visibility_counts.get(level, 0) + 1 + + if visibility.get("customer_visible", False): + customer_visible_count += 1 + if visibility.get("internal_only", False): + internal_only_count += 1 + + total_entries = len(entries_with_visibility) + + return { + "total_entries": total_entries, + "visibility_breakdown": visibility_counts, + "customer_visible_entries": customer_visible_count, + "internal_only_entries": internal_only_count, + "customer_visible_percentage": round((customer_visible_count / total_entries * 100), 1) if total_entries > 0 else 0, + "internal_only_percentage": round((internal_only_count / total_entries * 100), 1) if total_entries > 0 else 0 + } \ No newline at end of file From 12904a40f0cc1168d9a390628aa43e63a76d23b6 Mon Sep 17 00:00:00 2001 From: Sara Date: Mon, 2 Jun 2025 13:07:22 -0400 Subject: [PATCH 11/17] cleanup for PR --- src/devrev_mcp/resources/artifact_tickets.py | 34 ++ src/devrev_mcp/resources/ticket_artifacts.py | 46 +++ src/devrev_mcp/resources/timeline.py | 277 ++++++++++++++++ src/devrev_mcp/server.py | 319 ++----------------- src/devrev_mcp/tools/get_timeline_entries.py | 26 +- 5 files changed, 394 insertions(+), 308 deletions(-) create mode 100644 src/devrev_mcp/resources/artifact_tickets.py create mode 100644 src/devrev_mcp/resources/ticket_artifacts.py create mode 100644 src/devrev_mcp/resources/timeline.py diff --git a/src/devrev_mcp/resources/artifact_tickets.py b/src/devrev_mcp/resources/artifact_tickets.py new file mode 100644 index 0000000..f6f631e --- /dev/null +++ b/src/devrev_mcp/resources/artifact_tickets.py @@ -0,0 +1,34 @@ +""" +DevRev Artifact Tickets Resource Handler + +Provides reverse lookup from artifacts to tickets that reference them. +""" + +import json +from fastmcp import Context + + +async def artifact_tickets(artifact_id: str, ctx: Context, devrev_cache: dict) -> str: + """ + Access tickets that reference this artifact. + + Args: + artifact_id: The DevRev artifact ID + ctx: FastMCP context + devrev_cache: Cache dictionary for storing results + + Returns: + JSON string containing linked tickets + """ + # This would require a search or reverse lookup in DevRev API + # For now, return a placeholder structure + + result = { + "linked_tickets": [], # Would be populated with actual ticket URIs + "message": "Reverse artifact lookup not yet implemented", + "links": { + "artifact": f"devrev://artifacts/{artifact_id}" + } + } + + return json.dumps(result, indent=2) \ No newline at end of file diff --git a/src/devrev_mcp/resources/ticket_artifacts.py b/src/devrev_mcp/resources/ticket_artifacts.py new file mode 100644 index 0000000..8972109 --- /dev/null +++ b/src/devrev_mcp/resources/ticket_artifacts.py @@ -0,0 +1,46 @@ +""" +DevRev Ticket Artifacts Resource Handler + +Provides access to all artifacts associated with a specific ticket. +""" + +import json +from fastmcp import Context +from .ticket import ticket as ticket_resource + + +async def ticket_artifacts(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: + """ + Access all artifacts for a ticket. + + Args: + ticket_id: The DevRev ticket ID (e.g., 12345 for TKT-12345) + ctx: FastMCP context + devrev_cache: Cache dictionary for storing results + + Returns: + JSON string containing artifacts with navigation links + """ + # Get ticket data to extract artifacts + ticket_data_str = await ticket_resource(ticket_id, ctx, devrev_cache) + + ticket_data = json.loads(ticket_data_str) + artifacts = ticket_data.get("artifacts", []) + + # Add navigation links to each artifact + for artifact in artifacts: + artifact_id = artifact.get("id", "").split("/")[-1] if artifact.get("id") else "" + if artifact_id: + artifact["links"] = { + "self": f"devrev://artifacts/{artifact_id}", + "ticket": f"devrev://tickets/{ticket_id}" + } + + result = { + "artifacts": artifacts, + "links": { + "ticket": f"devrev://tickets/{ticket_id}" + } + } + + return json.dumps(result, indent=2) \ No newline at end of file diff --git a/src/devrev_mcp/resources/timeline.py b/src/devrev_mcp/resources/timeline.py new file mode 100644 index 0000000..4fd4309 --- /dev/null +++ b/src/devrev_mcp/resources/timeline.py @@ -0,0 +1,277 @@ +""" +DevRev Timeline Resource Handler + +Provides enriched timeline access for DevRev tickets with conversation flow and visibility information. +""" + +import json +from fastmcp import Context +from ..utils import make_devrev_request +from ..types import VisibilityInfo, format_visibility_summary + + +async def ticket_timeline(ticket_id: str, ctx: Context) -> str: + """ + Access enriched timeline for a ticket with structured conversation format. + + Args: + ticket_id: The DevRev ticket ID (e.g., 12345 for TKT-12345) + ctx: FastMCP context + + Returns: + JSON string containing enriched timeline with customer context and conversation flow + """ + try: + # Normalize ticket ID to handle various formats - extract just the number then format properly + if ticket_id.upper().startswith("TKT-"): + # Extract numeric part and reformat + numeric_id = ticket_id[4:] # Remove TKT- or tkt- + normalized_id = f"TKT-{numeric_id}" + else: + normalized_id = f"TKT-{ticket_id}" + + # Get ticket details for customer and workspace info + ticket_response = make_devrev_request("works.get", {"id": normalized_id}) + if ticket_response.status_code != 200: + raise ValueError(f"Failed to fetch ticket {normalized_id}") + + ticket_data = ticket_response.json() + work = ticket_data.get("work", {}) + + # Get timeline entries with pagination + all_entries = [] + cursor = None + page_count = 0 + max_pages = 50 # Safety limit to prevent infinite loops + + while page_count < max_pages: + request_payload = { + "object": normalized_id, + "limit": 50 # Use DevRev's default limit + } + if cursor: + request_payload["cursor"] = cursor + request_payload["mode"] = "after" # Get entries after this cursor + + timeline_response = make_devrev_request( + "timeline-entries.list", + request_payload + ) + + if timeline_response.status_code != 200: + raise ValueError(f"Failed to fetch timeline for {normalized_id}") + + timeline_data = timeline_response.json() + page_entries = timeline_data.get("timeline_entries", []) + all_entries.extend(page_entries) + + # Check for next page using DevRev's cursor system + cursor = timeline_data.get("next_cursor") + page_count += 1 + + await ctx.info(f"DEBUG: Fetched page {page_count} with {len(page_entries)} entries, total so far: {len(all_entries)}") + + # Break if no more pages or no entries in this page + if not cursor or len(page_entries) == 0: + break + + await ctx.info(f"DEBUG: Found {len(all_entries)} timeline entries for {normalized_id}") + + # Extract customer information + customer_info = {} + created_by = work.get("created_by", {}) + if created_by: + customer_info = { + "name": created_by.get("display_name", "Unknown"), + "email": created_by.get("email", ""), + "type": "customer" if created_by.get("type") == "user" else "system" + } + + # Build enriched schema + result = { + "summary": { + "ticket_id": normalized_id, + "customer": customer_info.get("email", customer_info.get("name", "Unknown")), + "workspace": work.get("owned_by", [{}])[0].get("display_name", "Unknown Workspace") if work.get("owned_by") else "Unknown Workspace", + "subject": work.get("title", "No title"), + "current_stage": work.get("stage", {}).get("name", "unknown"), + "created_date": work.get("created_date"), + "total_artifacts": 0 + }, + "conversation_thread": [], + "key_events": [], + "all_artifacts": [] + } + + # Process timeline entries into conversation and events + conversation_seq = 1 + artifacts_found = {} + + for entry in all_entries: + entry_type = entry.get("type", "") + timestamp = entry.get("created_date", "") + + # Extract visibility information + visibility_raw = entry.get("visibility") + visibility_info = VisibilityInfo.from_visibility(visibility_raw) + + # Handle conversation entries (comments) + if entry_type == "timeline_comment": + body = entry.get("body", "") + author = entry.get("created_by", {}) + + # Determine speaker type + speaker_type = "support" + if author.get("email") == customer_info.get("email"): + speaker_type = "customer" + elif "system" in author.get("display_name", "").lower(): + speaker_type = "system" + + conversation_entry = { + "seq": conversation_seq, + "timestamp": timestamp, + "event_type": entry_type, + "speaker": { + "name": author.get("display_name", author.get("email", "Unknown")), + "type": speaker_type + }, + "message": body, + "artifacts": [], + "visibility_info": visibility_info.to_dict() + } + + # Add artifacts if present + if entry.get("artifacts"): + for artifact in entry["artifacts"]: + artifact_id = artifact.get("id") + artifact_info = { + "id": artifact_id, + "display_id": artifact.get("display_id"), + "type": artifact.get("file", {}).get("type", "unknown"), + "attached_to_message": conversation_seq, + "resource_uri": f"devrev://artifacts/{artifact_id}" + } + conversation_entry["artifacts"].append(artifact_info) + artifacts_found[artifact_id] = artifact_info + + # Add timeline entry navigation link + entry_id = entry.get("id", "").split("/")[-1] if entry.get("id") else "" + if entry_id: + conversation_entry["timeline_entry_uri"] = f"devrev://tickets/{ticket_id}/timeline/{entry_id}" + + result["conversation_thread"].append(conversation_entry) + conversation_seq += 1 + + # Update last message timestamps + if speaker_type == "customer": + result["summary"]["last_customer_message"] = timestamp + elif speaker_type == "support": + result["summary"]["last_support_response"] = timestamp + + # Handle key events + elif entry_type in ["work_created", "stage_updated", "part_suggested", "work_updated"]: + event_info = { + "type": entry_type.replace("work_", "").replace("_", " "), + "event_type": entry_type, + "timestamp": timestamp, + "visibility_info": visibility_info.to_dict() + } + + # Add context for stage updates + if entry_type == "stage_updated" and entry.get("stage_updated"): + stage_info = entry["stage_updated"] + event_info["from_stage"] = stage_info.get("old_stage", {}).get("name") + event_info["to_stage"] = stage_info.get("new_stage", {}).get("name") + + # Add author information if available + author = entry.get("created_by", {}) + if author: + event_info["actor"] = { + "name": author.get("display_name", author.get("email", "System")), + "type": "customer" if author.get("email") == customer_info.get("email") else "support" + } + + result["key_events"].append(event_info) + + # Handle all other event types to preserve information + else: + # Skip entries without meaningful content + if not entry_type or entry_type in ["", "unknown"]: + continue + + # Determine if this is likely a conversation-like entry + body = entry.get("body", "").strip() + author = entry.get("created_by", {}) + + if body: # Has content, treat as conversation + speaker_type = "support" + if author.get("email") == customer_info.get("email"): + speaker_type = "customer" + elif "system" in author.get("display_name", "").lower(): + speaker_type = "system" + + conversation_entry = { + "seq": conversation_seq, + "timestamp": timestamp, + "event_type": entry_type, + "speaker": { + "name": author.get("display_name", author.get("email", "Unknown")), + "type": speaker_type + }, + "message": body, + "artifacts": [], + "visibility_info": visibility_info.to_dict() + } + + # Add timeline entry navigation link + entry_id = entry.get("id", "").split("/")[-1] if entry.get("id") else "" + if entry_id: + conversation_entry["timeline_entry_uri"] = f"devrev://tickets/{ticket_id}/timeline/{entry_id}" + + result["conversation_thread"].append(conversation_entry) + conversation_seq += 1 + + # Update last message timestamps + if speaker_type == "customer": + result["summary"]["last_customer_message"] = timestamp + elif speaker_type == "support": + result["summary"]["last_support_response"] = timestamp + + else: # No content, treat as event + event_info = { + "type": entry_type.replace("_", " "), + "event_type": entry_type, + "timestamp": timestamp, + "visibility_info": visibility_info.to_dict() + } + + # Add author information if available + if author: + event_info["actor"] = { + "name": author.get("display_name", author.get("email", "System")), + "type": "customer" if author.get("email") == customer_info.get("email") else "support" + } + + result["key_events"].append(event_info) + + # Set artifact count and list + result["all_artifacts"] = list(artifacts_found.values()) + result["summary"]["total_artifacts"] = len(artifacts_found) + + # Add visibility summary to the result + all_entries_with_visibility = result["conversation_thread"] + result["key_events"] + result["visibility_summary"] = format_visibility_summary(all_entries_with_visibility) + + # Add navigation links + result["links"] = { + "ticket": f"devrev://tickets/{ticket_id}" + } + + if result["all_artifacts"]: + result["links"]["artifacts"] = f"devrev://tickets/{ticket_id}/artifacts" + + return json.dumps(result, indent=2) + + except Exception as e: + await ctx.error(f"Failed to get timeline for ticket {ticket_id}: {str(e)}") + raise ValueError(f"Timeline for ticket {ticket_id} not found: {str(e)}") \ No newline at end of file diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index 04b8cc0..9ea9ee9 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -12,8 +12,11 @@ # Import modular resources and tools from .resources.ticket import ticket as ticket_resource +from .resources.timeline import ticket_timeline as timeline_resource from .resources.timeline_entry import timeline_entry as timeline_entry_resource from .resources.artifact import artifact as artifact_resource +from .resources.ticket_artifacts import ticket_artifacts as ticket_artifacts_resource +from .resources.artifact_tickets import artifact_tickets as artifact_tickets_resource from .tools.get_object import get_object as get_object_tool from .tools.get_timeline_entries import get_timeline_entries as get_timeline_entries_tool from .tools.get_ticket import get_ticket as get_ticket_tool @@ -166,9 +169,12 @@ async def ticket(ticket_id: str, ctx: Context) -> str: @mcp.resource( uri="devrev://tickets/{ticket_id}/timeline", - description="""Access enriched timeline for a ticket with customer context, conversation flow, artifacts, and detailed visibility information. + description=""" + Access enriched timeline for a ticket with customer context, conversation flow, + artifacts, and detailed visibility information. - Returns token-efficient structured format focusing on support workflow with comprehensive visibility data: + Returns token-efficient structured format focusing on support workflow with + comprehensive visibility data: - Each entry includes visibility_info showing who can see it (private/internal/external/public) - Summary includes visibility breakdown and customer-visible percentage - Visual indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ) help identify visibility levels at a glance @@ -186,263 +192,7 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: Returns: JSON string containing enriched timeline with customer context and conversation flow """ - from .utils import make_devrev_request - import json - - try: - # Normalize ticket ID to handle various formats - extract just the number then format properly - if ticket_id.upper().startswith("TKT-"): - # Extract numeric part and reformat - numeric_id = ticket_id[4:] # Remove TKT- or tkt- - normalized_id = f"TKT-{numeric_id}" - else: - normalized_id = f"TKT-{ticket_id}" - - # Get ticket details for customer and workspace info - ticket_response = make_devrev_request("works.get", {"id": normalized_id}) - if ticket_response.status_code != 200: - raise ValueError(f"Failed to fetch ticket {normalized_id}") - - ticket_data = ticket_response.json() - work = ticket_data.get("work", {}) - - # Get timeline entries with pagination - all_entries = [] - cursor = None - page_count = 0 - max_pages = 50 # Safety limit to prevent infinite loops - - while page_count < max_pages: - request_payload = { - "object": normalized_id, - "limit": 50 # Use DevRev's default limit - } - if cursor: - request_payload["cursor"] = cursor - request_payload["mode"] = "after" # Get entries after this cursor - - timeline_response = make_devrev_request( - "timeline-entries.list", - request_payload - ) - - if timeline_response.status_code != 200: - raise ValueError(f"Failed to fetch timeline for {normalized_id}") - - timeline_data = timeline_response.json() - page_entries = timeline_data.get("timeline_entries", []) - all_entries.extend(page_entries) - - # Check for next page using DevRev's cursor system - cursor = timeline_data.get("next_cursor") - page_count += 1 - - await ctx.info(f"DEBUG: Fetched page {page_count} with {len(page_entries)} entries, total so far: {len(all_entries)}") - - # Break if no more pages or no entries in this page - if not cursor or len(page_entries) == 0: - break - - await ctx.info(f"DEBUG: Found {len(all_entries)} timeline entries for {normalized_id}") - - # Extract customer information - customer_info = {} - created_by = work.get("created_by", {}) - if created_by: - customer_info = { - "name": created_by.get("display_name", "Unknown"), - "email": created_by.get("email", ""), - "type": "customer" if created_by.get("type") == "user" else "system" - } - - # Build enriched schema - result = { - "summary": { - "ticket_id": normalized_id, - "customer": customer_info.get("email", customer_info.get("name", "Unknown")), - "workspace": work.get("owned_by", [{}])[0].get("display_name", "Unknown Workspace") if work.get("owned_by") else "Unknown Workspace", - "subject": work.get("title", "No title"), - "current_stage": work.get("stage", {}).get("name", "unknown"), - "created_date": work.get("created_date"), - "total_artifacts": 0 - }, - "conversation_thread": [], - "key_events": [], - "all_artifacts": [] - } - - # Process timeline entries into conversation and events - conversation_seq = 1 - artifacts_found = {} - - for entry in all_entries: - entry_type = entry.get("type", "") - timestamp = entry.get("created_date", "") - - # Extract visibility information - visibility_raw = entry.get("visibility") - visibility_info = VisibilityInfo.from_visibility(visibility_raw) - - # Handle conversation entries (comments) - if entry_type == "timeline_comment": - body = entry.get("body", "") - author = entry.get("created_by", {}) - - # Determine speaker type - speaker_type = "support" - if author.get("email") == customer_info.get("email"): - speaker_type = "customer" - elif "system" in author.get("display_name", "").lower(): - speaker_type = "system" - - conversation_entry = { - "seq": conversation_seq, - "timestamp": timestamp, - "event_type": entry_type, - "speaker": { - "name": author.get("display_name", author.get("email", "Unknown")), - "type": speaker_type - }, - "message": body, - "artifacts": [], - "visibility_info": visibility_info.to_dict() - } - - # Add artifacts if present - if entry.get("artifacts"): - for artifact in entry["artifacts"]: - artifact_id = artifact.get("id") - artifact_info = { - "id": artifact_id, - "display_id": artifact.get("display_id"), - "type": artifact.get("file", {}).get("type", "unknown"), - "attached_to_message": conversation_seq, - "resource_uri": f"devrev://artifacts/{artifact_id}" - } - conversation_entry["artifacts"].append(artifact_info) - artifacts_found[artifact_id] = artifact_info - - # Add timeline entry navigation link - entry_id = entry.get("id", "").split("/")[-1] if entry.get("id") else "" - if entry_id: - conversation_entry["timeline_entry_uri"] = f"devrev://tickets/{ticket_id}/timeline/{entry_id}" - - result["conversation_thread"].append(conversation_entry) - conversation_seq += 1 - - # Update last message timestamps - if speaker_type == "customer": - result["summary"]["last_customer_message"] = timestamp - elif speaker_type == "support": - result["summary"]["last_support_response"] = timestamp - - # Handle key events - elif entry_type in ["work_created", "stage_updated", "part_suggested", "work_updated"]: - event_info = { - "type": entry_type.replace("work_", "").replace("_", " "), - "event_type": entry_type, - "timestamp": timestamp, - "visibility_info": visibility_info.to_dict() - } - - # Add context for stage updates - if entry_type == "stage_updated" and entry.get("stage_updated"): - stage_info = entry["stage_updated"] - event_info["from_stage"] = stage_info.get("old_stage", {}).get("name") - event_info["to_stage"] = stage_info.get("new_stage", {}).get("name") - - # Add author information if available - author = entry.get("created_by", {}) - if author: - event_info["actor"] = { - "name": author.get("display_name", author.get("email", "System")), - "type": "customer" if author.get("email") == customer_info.get("email") else "support" - } - - result["key_events"].append(event_info) - - # Handle all other event types to preserve information - else: - # Skip entries without meaningful content - if not entry_type or entry_type in ["", "unknown"]: - continue - - # Determine if this is likely a conversation-like entry - body = entry.get("body", "").strip() - author = entry.get("created_by", {}) - - if body: # Has content, treat as conversation - speaker_type = "support" - if author.get("email") == customer_info.get("email"): - speaker_type = "customer" - elif "system" in author.get("display_name", "").lower(): - speaker_type = "system" - - conversation_entry = { - "seq": conversation_seq, - "timestamp": timestamp, - "event_type": entry_type, - "speaker": { - "name": author.get("display_name", author.get("email", "Unknown")), - "type": speaker_type - }, - "message": body, - "artifacts": [], - "visibility_info": visibility_info.to_dict() - } - - # Add timeline entry navigation link - entry_id = entry.get("id", "").split("/")[-1] if entry.get("id") else "" - if entry_id: - conversation_entry["timeline_entry_uri"] = f"devrev://tickets/{ticket_id}/timeline/{entry_id}" - - result["conversation_thread"].append(conversation_entry) - conversation_seq += 1 - - # Update last message timestamps - if speaker_type == "customer": - result["summary"]["last_customer_message"] = timestamp - elif speaker_type == "support": - result["summary"]["last_support_response"] = timestamp - - else: # No content, treat as event - event_info = { - "type": entry_type.replace("_", " "), - "event_type": entry_type, - "timestamp": timestamp, - "visibility_info": visibility_info.to_dict() - } - - # Add author information if available - if author: - event_info["actor"] = { - "name": author.get("display_name", author.get("email", "System")), - "type": "customer" if author.get("email") == customer_info.get("email") else "support" - } - - result["key_events"].append(event_info) - - # Set artifact count and list - result["all_artifacts"] = list(artifacts_found.values()) - result["summary"]["total_artifacts"] = len(artifacts_found) - - # Add visibility summary to the result - all_entries_with_visibility = result["conversation_thread"] + result["key_events"] - result["visibility_summary"] = format_visibility_summary(all_entries_with_visibility) - - # Add navigation links - result["links"] = { - "ticket": f"devrev://tickets/{ticket_id}" - } - - if result["all_artifacts"]: - result["links"]["artifacts"] = f"devrev://tickets/{ticket_id}/artifacts" - - return json.dumps(result, indent=2) - - except Exception as e: - await ctx.error(f"Failed to get timeline for ticket {ticket_id}: {str(e)}") - raise ValueError(f"Timeline for ticket {ticket_id} not found: {str(e)}") + return await timeline_resource(ticket_id, ctx) @mcp.resource( uri="devrev://tickets/{ticket_id}/timeline/{entry_id}", @@ -492,30 +242,7 @@ async def ticket_artifacts(ticket_id: str, ctx: Context) -> str: Returns: JSON string containing artifacts with navigation links """ - # Get ticket data to extract artifacts - ticket_data_str = await ticket_resource(ticket_id, ctx, devrev_cache) - - import json - ticket_data = json.loads(ticket_data_str) - artifacts = ticket_data.get("artifacts", []) - - # Add navigation links to each artifact - for artifact in artifacts: - artifact_id = artifact.get("id", "").split("/")[-1] if artifact.get("id") else "" - if artifact_id: - artifact["links"] = { - "self": f"devrev://artifacts/{artifact_id}", - "ticket": f"devrev://tickets/{ticket_id}" - } - - result = { - "artifacts": artifacts, - "links": { - "ticket": f"devrev://tickets/{ticket_id}" - } - } - - return json.dumps(result, indent=2) + return await ticket_artifacts_resource(ticket_id, ctx, devrev_cache) @mcp.resource( uri="devrev://artifacts/{artifact_id}", @@ -558,32 +285,24 @@ async def artifact_tickets(artifact_id: str, ctx: Context) -> str: Returns: JSON string containing linked tickets """ - # This would require a search or reverse lookup in DevRev API - # For now, return a placeholder structure - import json - - result = { - "linked_tickets": [], # Would be populated with actual ticket URIs - "message": "Reverse artifact lookup not yet implemented", - "links": { - "artifact": f"devrev://artifacts/{artifact_id}" - } - } - - return json.dumps(result, indent=2) - + return await artifact_tickets_resource(artifact_id, ctx, devrev_cache) @mcp.tool( name="get_timeline_entries", - description="""Retrieve chronological timeline of all activity on a DevRev ticket including comments, status changes, assignments, and system events with detailed visibility information. + description=""" + Retrieve chronological timeline of all activity on a DevRev ticket including + comments, status changes, assignments, and system events with detailed visibility information. - Essential for understanding ticket progression, customer interactions, and audit trails. Each entry includes: + Essential for understanding ticket progression, customer interactions, and audit trails. + Each entry includes: - Visibility level (private/internal/external/public) showing who can access it - Visual indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ) for quick visibility identification - Percentage breakdown of customer-visible vs internal-only content - Audience information (creator only, dev org, dev org + customers, everyone) - Accepts flexible ID formats (TKT-12345, 12345, or full don: format) and provides multiple output formats for different use cases.""", + Accepts flexible ID formats (TKT-12345, 12345, or full don: format) and provides + multiple output formats for different use cases. + """, tags=["timeline", "devrev", "tickets", "history", "conversations", "audit", "visibility"] ) async def get_timeline_entries(id: str, format: str = "summary", ctx: Context = None) -> str: diff --git a/src/devrev_mcp/tools/get_timeline_entries.py b/src/devrev_mcp/tools/get_timeline_entries.py index a0908d6..d935540 100644 --- a/src/devrev_mcp/tools/get_timeline_entries.py +++ b/src/devrev_mcp/tools/get_timeline_entries.py @@ -157,7 +157,8 @@ def _format_summary(timeline_data, ticket_id: str) -> str: lines.extend([ "", - f"**Activity:** {len(customer_messages)} customer messages, {len(support_messages)} support responses", + (f"**Activity:** {len(customer_messages)} customer messages, " + f"{len(support_messages)} support responses"), ]) # Add visibility summary if available @@ -166,8 +167,10 @@ def _format_summary(timeline_data, ticket_id: str) -> str: lines.extend([ "", "**Visibility Summary:**", - f"- Customer-visible entries: {vis_summary.get('customer_visible_entries', 0)} ({vis_summary.get('customer_visible_percentage', 0)}%)", - f"- Internal-only entries: {vis_summary.get('internal_only_entries', 0)} ({vis_summary.get('internal_only_percentage', 0)}%)", + (f"- Customer-visible entries: {vis_summary.get('customer_visible_entries', 0)} " + f"({vis_summary.get('customer_visible_percentage', 0)}%)"), + (f"- Internal-only entries: {vis_summary.get('internal_only_entries', 0)} " + f"({vis_summary.get('internal_only_percentage', 0)}%)"), ]) # Show breakdown by visibility level @@ -196,7 +199,8 @@ def _format_summary(timeline_data, ticket_id: str) -> str: for msg in recent_messages: speaker = msg.get("speaker", {}) timestamp = msg.get("timestamp", "")[:10] # Just date part - message_preview = msg.get("message", "")[:100] + ("..." if len(msg.get("message", "")) > 100 else "") + message_preview = (msg.get("message", "")[:100] + + ("..." if len(msg.get("message", "")) > 100 else "")) # Add visibility indicator visibility_info = msg.get("visibility_info", {}) @@ -212,7 +216,10 @@ def _format_summary(timeline_data, ticket_id: str) -> str: elif level == "public": visibility_indicator = "๐ŸŒ " - lines.append(f"- **{speaker.get('name', 'Unknown')}** ({timestamp}): {visibility_indicator}{message_preview}") + lines.append( + f"- **{speaker.get('name', 'Unknown')}** ({timestamp}): " + f"{visibility_indicator}{message_preview}" + ) # Add artifacts info if isinstance(timeline_data, dict): @@ -280,7 +287,8 @@ def _format_detailed(timeline_data, ticket_id: str) -> str: lines.extend([ "", - f"**{msg.get('seq', '?')}. {speaker.get('name', 'Unknown')} ({speaker.get('type', 'unknown')}) - {display_time}**{visibility_text}" + (f"**{msg.get('seq', '?')}. {speaker.get('name', 'Unknown')} " + f"({speaker.get('type', 'unknown')}) - {display_time}**{visibility_text}") ]) # Add message content with proper formatting @@ -337,8 +345,10 @@ def _format_detailed(timeline_data, ticket_id: str) -> str: "", "**Visibility Overview:**", f"- Total entries: {vis_summary.get('total_entries', 0)}", - f"- Customer-visible: {vis_summary.get('customer_visible_entries', 0)} ({vis_summary.get('customer_visible_percentage', 0)}%)", - f"- Internal-only: {vis_summary.get('internal_only_entries', 0)} ({vis_summary.get('internal_only_percentage', 0)}%)" + (f"- Customer-visible: {vis_summary.get('customer_visible_entries', 0)} " + f"({vis_summary.get('customer_visible_percentage', 0)}%)"), + (f"- Internal-only: {vis_summary.get('internal_only_entries', 0)} " + f"({vis_summary.get('internal_only_percentage', 0)}%)") ]) return "\n".join(lines) \ No newline at end of file From ec6ea526c6202adeadf2506fd3d6e901c0960cc6 Mon Sep 17 00:00:00 2001 From: Sara Date: Mon, 2 Jun 2025 14:30:52 -0400 Subject: [PATCH 12/17] error handling + pr cleanup --- CLAUDE.md | 1 + src/devrev_mcp/endpoints.py | 37 +++ src/devrev_mcp/error_handler.py | 224 +++++++++++++++++++ src/devrev_mcp/resources/artifact.py | 133 ++++++----- src/devrev_mcp/resources/artifact_tickets.py | 34 --- src/devrev_mcp/resources/ticket.py | 178 ++++++++------- src/devrev_mcp/resources/ticket_artifacts.py | 2 + src/devrev_mcp/resources/timeline.py | 7 +- src/devrev_mcp/resources/timeline_entry.py | 5 +- src/devrev_mcp/server.py | 60 +---- src/devrev_mcp/tools/create_object.py | 5 +- src/devrev_mcp/tools/download_artifact.py | 7 +- src/devrev_mcp/tools/get_object.py | 5 +- src/devrev_mcp/tools/get_ticket.py | 2 + src/devrev_mcp/tools/get_timeline_entries.py | 2 + src/devrev_mcp/tools/search.py | 5 +- src/devrev_mcp/tools/update_object.py | 5 +- src/devrev_mcp/utils.py | 2 +- 18 files changed, 469 insertions(+), 245 deletions(-) create mode 100644 src/devrev_mcp/endpoints.py create mode 100644 src/devrev_mcp/error_handler.py delete mode 100644 src/devrev_mcp/resources/artifact_tickets.py diff --git a/CLAUDE.md b/CLAUDE.md index d287fa6..1706609 100644 --- a/CLAUDE.md +++ b/CLAUDE.md @@ -1,2 +1,3 @@ +This project uses `uv` for package management - Reference https://gofastmcp.com/llms.txt - Reference https://developer.devrev.ai/llms.txt \ No newline at end of file diff --git a/src/devrev_mcp/endpoints.py b/src/devrev_mcp/endpoints.py new file mode 100644 index 0000000..d56b2c2 --- /dev/null +++ b/src/devrev_mcp/endpoints.py @@ -0,0 +1,37 @@ +""" +DevRev API Endpoints Constants + +This module defines all DevRev API endpoint strings used throughout the application. +Centralizing these constants prevents typos and makes API changes easier to manage. +""" + + +class DevRevEndpoints: + """DevRev API endpoint constants for consistent usage across the application.""" + + # Works (Tickets, Issues, etc.) + WORKS_GET = "works.get" + WORKS_CREATE = "works.create" + WORKS_UPDATE = "works.update" + + # Timeline Entries + TIMELINE_ENTRIES_LIST = "timeline-entries.list" + TIMELINE_ENTRIES_GET = "timeline-entries.get" + + # Artifacts + ARTIFACTS_GET = "artifacts.get" + ARTIFACTS_LOCATE = "artifacts.locate" + + # Search + SEARCH_HYBRID = "search.hybrid" + + +# Convenience exports for simpler imports +WORKS_GET = DevRevEndpoints.WORKS_GET +WORKS_CREATE = DevRevEndpoints.WORKS_CREATE +WORKS_UPDATE = DevRevEndpoints.WORKS_UPDATE +TIMELINE_ENTRIES_LIST = DevRevEndpoints.TIMELINE_ENTRIES_LIST +TIMELINE_ENTRIES_GET = DevRevEndpoints.TIMELINE_ENTRIES_GET +ARTIFACTS_GET = DevRevEndpoints.ARTIFACTS_GET +ARTIFACTS_LOCATE = DevRevEndpoints.ARTIFACTS_LOCATE +SEARCH_HYBRID = DevRevEndpoints.SEARCH_HYBRID \ No newline at end of file diff --git a/src/devrev_mcp/error_handler.py b/src/devrev_mcp/error_handler.py new file mode 100644 index 0000000..40837a7 --- /dev/null +++ b/src/devrev_mcp/error_handler.py @@ -0,0 +1,224 @@ +""" +DevRev MCP Error Handler + +Provides standardized error handling for resources and tools. +""" + +import json +from typing import Dict, Optional +from functools import wraps +from fastmcp import Context + + +class DevRevMCPError(Exception): + """Base exception for DevRev MCP errors.""" + def __init__(self, message: str, error_code: str = "UNKNOWN", details: Optional[Dict] = None): + self.message = message + self.error_code = error_code + self.details = details or {} + super().__init__(message) + + +class ResourceNotFoundError(DevRevMCPError): + """Raised when a requested resource is not found.""" + def __init__(self, resource_type: str, resource_id: str, details: Optional[Dict] = None): + message = f"{resource_type} {resource_id} not found" + super().__init__(message, "RESOURCE_NOT_FOUND", details) + self.resource_type = resource_type + self.resource_id = resource_id + + +class APIError(DevRevMCPError): + """Raised when DevRev API returns an error.""" + def __init__(self, endpoint: str, status_code: int, response_text: str): + message = f"DevRev API error on {endpoint}: HTTP {status_code}" + details = {"status_code": status_code, "response": response_text} + super().__init__(message, "API_ERROR", details) + self.endpoint = endpoint + self.status_code = status_code + + +def create_error_response( + error: Exception, + resource_type: str = "resource", + resource_id: str = "", + additional_data: Optional[Dict] = None +) -> str: + """ + Create a standardized JSON error response. + + Args: + error: The exception that occurred + resource_type: Type of resource (ticket, artifact, etc.) + resource_id: ID of the resource that failed + additional_data: Additional data to include in error response + + Returns: + JSON string containing error information + """ + error_data = { + "error": True, + "error_type": type(error).__name__, + "message": str(error), + "resource_type": resource_type, + "resource_id": resource_id, + "timestamp": None # Could add timestamp if needed + } + + # Add specific error details for known error types + if isinstance(error, DevRevMCPError): + error_data["error_code"] = error.error_code + error_data["details"] = error.details + + if isinstance(error, APIError): + error_data["api_endpoint"] = error.endpoint + error_data["http_status"] = error.status_code + + # Include any additional data + if additional_data: + error_data.update(additional_data) + + return json.dumps(error_data, indent=2) + + +def resource_error_handler(resource_type: str): + """ + Decorator for resource handlers that provides standardized error handling. + + Args: + resource_type: The type of resource (e.g., "ticket", "artifact") + + Returns: + Decorated function with error handling + """ + def decorator(func): + @wraps(func) + async def wrapper(*args, **kwargs): + # Extract resource_id from function arguments + resource_id = args[0] if args else "unknown" + ctx = None + + # Find Context in arguments + for arg in args: + if isinstance(arg, Context): + ctx = arg + break + + try: + return await func(*args, **kwargs) + + except DevRevMCPError as e: + if ctx: + await ctx.error(f"{resource_type} error: {e.message}") + return create_error_response(e, resource_type, resource_id) + + except Exception as e: + if ctx: + await ctx.error(f"Unexpected error in {resource_type} {resource_id}: {str(e)}") + + # Convert to standardized error + mcp_error = DevRevMCPError( + f"Unexpected error: {str(e)}", + "INTERNAL_ERROR" + ) + return create_error_response(mcp_error, resource_type, resource_id) + + return wrapper + return decorator + + +def tool_error_handler(tool_name: str): + """ + Decorator for tool handlers that provides standardized error handling. + + Args: + tool_name: The name of the tool + + Returns: + Decorated function with error handling + """ + def decorator(func): + @wraps(func) + async def wrapper(*args, **kwargs): + ctx = None + + # Find Context in arguments or kwargs + for arg in args: + if isinstance(arg, Context): + ctx = arg + break + + if not ctx and 'ctx' in kwargs: + ctx = kwargs['ctx'] + + try: + return await func(*args, **kwargs) + + except DevRevMCPError as e: + if ctx: + await ctx.error(f"{tool_name} error: {e.message}") + raise # Re-raise for tools since they can handle exceptions + + except Exception as e: + if ctx: + await ctx.error(f"Unexpected error in {tool_name}: {str(e)}") + + # Convert to standardized error and re-raise + raise DevRevMCPError( + f"Tool {tool_name} failed: {str(e)}", + "TOOL_ERROR" + ) from e + + return wrapper + return decorator + + +def handle_api_response(response, endpoint: str, expected_status: int = 200): + """ + Handle DevRev API response and raise appropriate errors. + + Args: + response: The requests Response object + endpoint: API endpoint that was called + expected_status: Expected HTTP status code (default 200) + + Raises: + APIError: If the response status is not as expected + """ + if response.status_code != expected_status: + raise APIError(endpoint, response.status_code, response.text) + + return response + + +# Utility function to check and validate resource IDs +def validate_resource_id(resource_id: str, resource_type: str) -> str: + """ + Validate and normalize resource IDs. + + Args: + resource_id: The resource ID to validate + resource_type: Type of resource for error messages + + Returns: + Normalized resource ID + + Raises: + ResourceNotFoundError: If resource ID is invalid + """ + if not resource_id or not isinstance(resource_id, str): + raise ResourceNotFoundError( + resource_type, + str(resource_id), + {"reason": "Invalid or empty resource ID"} + ) + + resource_id = resource_id.strip() + if not resource_id: + raise ResourceNotFoundError( + resource_type, + resource_id, + {"reason": "Empty resource ID after normalization"} + ) + + return resource_id \ No newline at end of file diff --git a/src/devrev_mcp/resources/artifact.py b/src/devrev_mcp/resources/artifact.py index 7257c85..183fa2b 100644 --- a/src/devrev_mcp/resources/artifact.py +++ b/src/devrev_mcp/resources/artifact.py @@ -7,8 +7,11 @@ import json from fastmcp import Context from ..utils import make_devrev_request +from ..error_handler import resource_error_handler, handle_api_response, validate_resource_id +from ..endpoints import ARTIFACTS_GET, ARTIFACTS_LOCATE +@resource_error_handler("artifact") async def artifact(artifact_id: str, ctx: Context, devrev_cache: dict) -> str: """ Access DevRev artifact metadata. @@ -21,71 +24,67 @@ async def artifact(artifact_id: str, ctx: Context, devrev_cache: dict) -> str: Returns: JSON string containing the artifact metadata """ - try: - cache_key = f"artifact:{artifact_id}" - - # Check cache first - if cache_key in devrev_cache: - await ctx.info(f"Retrieved artifact {artifact_id} from cache") - return devrev_cache[cache_key] - - await ctx.info(f"Fetching artifact {artifact_id} from DevRev API") - - # For artifacts, use artifacts.get endpoint - response = make_devrev_request( - "artifacts.get", - {"id": artifact_id} - ) - - if response.status_code != 200: - error_text = response.text - await ctx.error(f"Failed to fetch artifact {artifact_id}: HTTP {response.status_code} - {error_text}") - raise ValueError(f"Failed to fetch artifact {artifact_id} (HTTP {response.status_code}): {error_text}") - - result = response.json() - - # Try to get download URL if available through artifacts.locate - artifact_info = result.get("artifact", {}) - if artifact_info and not any(key in artifact_info.get("file", {}) for key in ["download_url", "url"]): - try: - await ctx.info(f"Attempting to get download URL for artifact {artifact_id}") - locate_response = make_devrev_request( - "artifacts.locate", - {"id": artifact_id} - ) - - if locate_response.status_code == 200: - locate_data = locate_response.json() - locate_artifact = locate_data.get("artifact", {}) - if locate_artifact: - # Merge locate data into the main artifact data - if "download_url" in locate_artifact: - artifact_info["download_url"] = locate_artifact["download_url"] - if "file" in locate_artifact and "download_url" in locate_artifact["file"]: - if "file" not in artifact_info: - artifact_info["file"] = {} - artifact_info["file"]["download_url"] = locate_artifact["file"]["download_url"] - await ctx.info(f"Successfully added download URL for artifact {artifact_id}") - else: - await ctx.info(f"artifacts.locate not available for {artifact_id}: HTTP {locate_response.status_code}") - except Exception as locate_error: - await ctx.info(f"Could not locate download URL for artifact {artifact_id}: {str(locate_error)}") - # Continue without download URL - - # Add navigation links to timeline entry (artifacts belong to timeline entries) - # Note: We'd need to determine the timeline entry ID from the artifact context - # For now, adding a placeholder structure that could be populated based on API response - result["links"] = { - "timeline_entry": "devrev://timeline-entries/{timeline_entry_id}", # Would need actual ID - "note": "Artifact belongs to a specific timeline entry, which belongs to a ticket" - } - - # Cache the result - devrev_cache[cache_key] = json.dumps(result, indent=2) - await ctx.info(f"Successfully retrieved and cached artifact: {artifact_id}") - + # Validate the artifact ID + artifact_id = validate_resource_id(artifact_id, "artifact") + + cache_key = f"artifact:{artifact_id}" + + # Check cache first + if cache_key in devrev_cache: + await ctx.info(f"Retrieved artifact {artifact_id} from cache") return devrev_cache[cache_key] - - except Exception as e: - await ctx.error(f"Failed to get artifact resource {artifact_id}: {str(e)}") - raise ValueError(f"Artifact resource {artifact_id} not found: {str(e)}") \ No newline at end of file + + await ctx.info(f"Fetching artifact {artifact_id} from DevRev API") + + # For artifacts, use artifacts.get endpoint + response = make_devrev_request( + ARTIFACTS_GET, + {"id": artifact_id} + ) + + # Handle API response with standardized error handling + handle_api_response(response, ARTIFACTS_GET) + + result = response.json() + + # Try to get download URL if available through artifacts.locate + artifact_info = result.get("artifact", {}) + if artifact_info and not any(key in artifact_info.get("file", {}) for key in ["download_url", "url"]): + try: + await ctx.info(f"Attempting to get download URL for artifact {artifact_id}") + locate_response = make_devrev_request( + ARTIFACTS_LOCATE, + {"id": artifact_id} + ) + + if locate_response.status_code == 200: + locate_data = locate_response.json() + locate_artifact = locate_data.get("artifact", {}) + if locate_artifact: + # Merge locate data into the main artifact data + if "download_url" in locate_artifact: + artifact_info["download_url"] = locate_artifact["download_url"] + if "file" in locate_artifact and "download_url" in locate_artifact["file"]: + if "file" not in artifact_info: + artifact_info["file"] = {} + artifact_info["file"]["download_url"] = locate_artifact["file"]["download_url"] + await ctx.info(f"Successfully added download URL for artifact {artifact_id}") + else: + await ctx.info(f"artifacts.locate not available for {artifact_id}: HTTP {locate_response.status_code}") + except Exception as locate_error: + await ctx.info(f"Could not locate download URL for artifact {artifact_id}: {str(locate_error)}") + # Continue without download URL + + # Add navigation links to timeline entry (artifacts belong to timeline entries) + # Note: We'd need to determine the timeline entry ID from the artifact context + # For now, adding a placeholder structure that could be populated based on API response + result["links"] = { + "timeline_entry": "devrev://timeline-entries/{timeline_entry_id}", # Would need actual ID + "note": "Artifact belongs to a specific timeline entry, which belongs to a ticket" + } + + # Cache the result + devrev_cache[cache_key] = json.dumps(result, indent=2) + await ctx.info(f"Successfully retrieved and cached artifact: {artifact_id}") + + return devrev_cache[cache_key] \ No newline at end of file diff --git a/src/devrev_mcp/resources/artifact_tickets.py b/src/devrev_mcp/resources/artifact_tickets.py deleted file mode 100644 index f6f631e..0000000 --- a/src/devrev_mcp/resources/artifact_tickets.py +++ /dev/null @@ -1,34 +0,0 @@ -""" -DevRev Artifact Tickets Resource Handler - -Provides reverse lookup from artifacts to tickets that reference them. -""" - -import json -from fastmcp import Context - - -async def artifact_tickets(artifact_id: str, ctx: Context, devrev_cache: dict) -> str: - """ - Access tickets that reference this artifact. - - Args: - artifact_id: The DevRev artifact ID - ctx: FastMCP context - devrev_cache: Cache dictionary for storing results - - Returns: - JSON string containing linked tickets - """ - # This would require a search or reverse lookup in DevRev API - # For now, return a placeholder structure - - result = { - "linked_tickets": [], # Would be populated with actual ticket URIs - "message": "Reverse artifact lookup not yet implemented", - "links": { - "artifact": f"devrev://artifacts/{artifact_id}" - } - } - - return json.dumps(result, indent=2) \ No newline at end of file diff --git a/src/devrev_mcp/resources/ticket.py b/src/devrev_mcp/resources/ticket.py index 6b1630b..fe50b79 100644 --- a/src/devrev_mcp/resources/ticket.py +++ b/src/devrev_mcp/resources/ticket.py @@ -7,8 +7,11 @@ import json from fastmcp import Context from ..utils import make_devrev_request +from ..error_handler import resource_error_handler +from ..endpoints import WORKS_GET, TIMELINE_ENTRIES_LIST +@resource_error_handler("ticket") async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: """ Access DevRev ticket details with enriched timeline entries and artifact data. @@ -21,91 +24,102 @@ async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: Returns: JSON string containing the ticket data with timeline entries and artifacts """ + # Convert simple ID to TKT- format for API calls + if ticket_id.upper().startswith("TKT-"): + # Extract numeric part and reformat + numeric_id = ticket_id[4:] # Remove TKT- or tkt- + normalized_id = f"TKT-{numeric_id}" + else: + normalized_id = f"TKT-{ticket_id}" + cache_key = f"ticket:{ticket_id}" + + # Check cache first + if cache_key in devrev_cache: + await ctx.info(f"Retrieved ticket {normalized_id} from cache") + return devrev_cache[cache_key] + + await ctx.info(f"Fetching ticket {normalized_id} from DevRev API") + + # Get ticket details + response = make_devrev_request(WORKS_GET, {"id": normalized_id}) + + if response.status_code != 200: + error_text = response.text + await ctx.error(f"Failed to fetch ticket {normalized_id}: HTTP {response.status_code} - {error_text}") + raise ValueError(f"Failed to fetch ticket {normalized_id} (HTTP {response.status_code}): {error_text}") + + result = response.json() + + # Extract the work object from the API response + if isinstance(result, dict) and "work" in result: + result = result["work"] + + # Get timeline entries for the ticket try: - # Convert simple ID to TKT- format for API calls - if ticket_id.upper().startswith("TKT-"): - # Extract numeric part and reformat - numeric_id = ticket_id[4:] # Remove TKT- or tkt- - normalized_id = f"TKT-{numeric_id}" - else: - normalized_id = f"TKT-{ticket_id}" - cache_key = f"ticket:{ticket_id}" - - # Check cache first - if cache_key in devrev_cache: - await ctx.info(f"Retrieved ticket {normalized_id} from cache") - return devrev_cache[cache_key] - - await ctx.info(f"Fetching ticket {normalized_id} from DevRev API") - - # Get ticket details - response = make_devrev_request("works.get", {"id": normalized_id}) - - if response.status_code != 200: - error_text = response.text - await ctx.error(f"Failed to fetch ticket {normalized_id}: HTTP {response.status_code} - {error_text}") - raise ValueError(f"Failed to fetch ticket {normalized_id} (HTTP {response.status_code}): {error_text}") - - result = response.json() + timeline_response = make_devrev_request( + TIMELINE_ENTRIES_LIST, + {"object": normalized_id} + ) - # Extract the work object from the API response - if isinstance(result, dict) and "work" in result: - result = result["work"] - - # Get timeline entries for the ticket - try: - timeline_response = make_devrev_request( - "timeline-entries.list", - {"object": normalized_id} - ) + if timeline_response.status_code == 200: + timeline_data = timeline_response.json() + timeline_entries = timeline_data.get("timeline_entries", []) + result["timeline_entries"] = timeline_entries + await ctx.info(f"Added {len(timeline_entries)} timeline entries to ticket {normalized_id}") + + # Extract artifact data directly from timeline entries (no additional API calls needed) + artifacts = [] + seen_artifact_ids = set() # Avoid duplicates across timeline entries + + for entry in timeline_entries: + if "artifacts" in entry: + for artifact in entry["artifacts"]: + # Timeline entries contain full artifact objects, not just IDs + if isinstance(artifact, dict): + artifact_id = artifact.get("id", "") + if artifact_id and artifact_id not in seen_artifact_ids: + seen_artifact_ids.add(artifact_id) + + # Add navigation link for downloading + artifact_id_clean = artifact_id.split("/")[-1] if "/" in artifact_id else artifact_id + artifact["links"] = { + "download": f"devrev://artifacts/{artifact_id_clean}/download", + "ticket": f"devrev://tickets/{ticket_id}" + } + artifacts.append(artifact) + elif isinstance(artifact, str): + # Fallback: if it's just an ID string, create minimal artifact object + if artifact not in seen_artifact_ids: + seen_artifact_ids.add(artifact) + artifact_id_clean = artifact.split("/")[-1] if "/" in artifact else artifact + artifacts.append({ + "id": artifact, + "links": { + "download": f"devrev://artifacts/{artifact_id_clean}/download", + "ticket": f"devrev://tickets/{ticket_id}" + } + }) + + result["artifacts"] = artifacts + await ctx.info(f"Extracted {len(artifacts)} artifacts from timeline entries for ticket {normalized_id}") - if timeline_response.status_code == 200: - timeline_data = timeline_response.json() - timeline_entries = timeline_data.get("timeline_entries", []) - result["timeline_entries"] = timeline_entries - await ctx.info(f"Added {len(timeline_entries)} timeline entries to ticket {normalized_id}") - - # Extract and gather artifact data from timeline entries - artifacts = [] - for entry in timeline_entries: - if "artifacts" in entry: - for artifact_id in entry["artifacts"]: - try: - artifact_response = make_devrev_request( - "artifacts.get", - {"id": artifact_id} - ) - if artifact_response.status_code == 200: - artifact_data = artifact_response.json() - artifacts.append(artifact_data) - except Exception as e: - await ctx.warning(f"Could not fetch artifact {artifact_id}: {str(e)}") - - result["artifacts"] = artifacts - await ctx.info(f"Added {len(artifacts)} artifacts to ticket {normalized_id}") - - else: - await ctx.warning(f"Could not fetch timeline entries for ticket {normalized_id}") - result["timeline_entries"] = [] - result["artifacts"] = [] - except Exception as e: - await ctx.warning(f"Error fetching timeline entries for ticket {normalized_id}: {str(e)}") + else: + await ctx.warning(f"Could not fetch timeline entries for ticket {normalized_id}") result["timeline_entries"] = [] result["artifacts"] = [] - - # Add navigation links - result["links"] = { - "timeline": f"devrev://tickets/{ticket_id}/timeline", - "artifacts": f"devrev://tickets/{ticket_id}/artifacts" - } - - # Cache the enriched result - devrev_cache[cache_key] = json.dumps(result, indent=2) - await ctx.info(f"Successfully retrieved and cached ticket: {normalized_id}") - - return devrev_cache[cache_key] - except Exception as e: - await ctx.error(f"Failed to get ticket resource {ticket_id}: {str(e)}") - # Return empty JSON object instead of raising exception - return json.dumps({"error": f"Ticket resource {ticket_id} not found: {str(e)}"}, indent=2) \ No newline at end of file + await ctx.warning(f"Error fetching timeline entries for ticket {normalized_id}: {str(e)}") + result["timeline_entries"] = [] + result["artifacts"] = [] + + # Add navigation links + result["links"] = { + "timeline": f"devrev://tickets/{ticket_id}/timeline", + "artifacts": f"devrev://tickets/{ticket_id}/artifacts" + } + + # Cache the enriched result + devrev_cache[cache_key] = json.dumps(result, indent=2) + await ctx.info(f"Successfully retrieved and cached ticket: {normalized_id}") + + return devrev_cache[cache_key] \ No newline at end of file diff --git a/src/devrev_mcp/resources/ticket_artifacts.py b/src/devrev_mcp/resources/ticket_artifacts.py index 8972109..95e9c3b 100644 --- a/src/devrev_mcp/resources/ticket_artifacts.py +++ b/src/devrev_mcp/resources/ticket_artifacts.py @@ -7,8 +7,10 @@ import json from fastmcp import Context from .ticket import ticket as ticket_resource +from ..error_handler import resource_error_handler +@resource_error_handler("ticket_artifacts") async def ticket_artifacts(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: """ Access all artifacts for a ticket. diff --git a/src/devrev_mcp/resources/timeline.py b/src/devrev_mcp/resources/timeline.py index 4fd4309..cfbf317 100644 --- a/src/devrev_mcp/resources/timeline.py +++ b/src/devrev_mcp/resources/timeline.py @@ -8,8 +8,11 @@ from fastmcp import Context from ..utils import make_devrev_request from ..types import VisibilityInfo, format_visibility_summary +from ..error_handler import resource_error_handler +from ..endpoints import WORKS_GET, TIMELINE_ENTRIES_LIST +@resource_error_handler("timeline") async def ticket_timeline(ticket_id: str, ctx: Context) -> str: """ Access enriched timeline for a ticket with structured conversation format. @@ -31,7 +34,7 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: normalized_id = f"TKT-{ticket_id}" # Get ticket details for customer and workspace info - ticket_response = make_devrev_request("works.get", {"id": normalized_id}) + ticket_response = make_devrev_request(WORKS_GET, {"id": normalized_id}) if ticket_response.status_code != 200: raise ValueError(f"Failed to fetch ticket {normalized_id}") @@ -54,7 +57,7 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: request_payload["mode"] = "after" # Get entries after this cursor timeline_response = make_devrev_request( - "timeline-entries.list", + TIMELINE_ENTRIES_LIST, request_payload ) diff --git a/src/devrev_mcp/resources/timeline_entry.py b/src/devrev_mcp/resources/timeline_entry.py index 6f0b7a2..efc55fa 100644 --- a/src/devrev_mcp/resources/timeline_entry.py +++ b/src/devrev_mcp/resources/timeline_entry.py @@ -7,8 +7,11 @@ import json from fastmcp import Context from ..utils import make_devrev_request +from ..error_handler import resource_error_handler +from ..endpoints import TIMELINE_ENTRIES_GET +@resource_error_handler("timeline_entry") async def timeline_entry(timeline_id: str, ctx: Context, devrev_cache: dict) -> str: """ Access specific timeline entry details. @@ -33,7 +36,7 @@ async def timeline_entry(timeline_id: str, ctx: Context, devrev_cache: dict) -> # For timeline entries, use timeline-entries.get endpoint response = make_devrev_request( - "timeline-entries.get", + TIMELINE_ENTRIES_GET, {"id": timeline_id} ) diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index 9ea9ee9..36ed92b 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -16,7 +16,6 @@ from .resources.timeline_entry import timeline_entry as timeline_entry_resource from .resources.artifact import artifact as artifact_resource from .resources.ticket_artifacts import ticket_artifacts as ticket_artifacts_resource -from .resources.artifact_tickets import artifact_tickets as artifact_tickets_resource from .tools.get_object import get_object as get_object_tool from .tools.get_timeline_entries import get_timeline_entries as get_timeline_entries_tool from .tools.get_ticket import get_ticket as get_ticket_tool @@ -144,28 +143,7 @@ async def ticket(ticket_id: str, ctx: Context) -> str: Returns: JSON string containing the ticket data with navigation links """ - result = await ticket_resource(ticket_id, ctx, devrev_cache) - - # Debug: Log the result details - await ctx.info(f"ticket_resource returned result type: {type(result)}, length: {len(result) if result else 0}") - if result: - await ctx.info(f"Result preview: {repr(result[:100])}") - - # Debug: Check if result is empty - if not result: - await ctx.error(f"ticket_resource returned empty result for ticket_id: {ticket_id}") - raise ValueError(f"Empty result from ticket_resource for ticket {ticket_id}") - - # Parse the result and add navigation links - import json - ticket_data = json.loads(result) - ticket_data["links"] = { - "timeline": f"devrev://tickets/{ticket_id}/timeline", - "artifacts": f"devrev://tickets/{ticket_id}/artifacts" - } - - # Return JSON string as expected by MCP framework - return json.dumps(ticket_data, indent=2) + return await ticket_resource(ticket_id, ctx, devrev_cache) @mcp.resource( uri="devrev://tickets/{ticket_id}/timeline", @@ -213,7 +191,7 @@ async def timeline_entry(ticket_id: str, entry_id: str, ctx: Context) -> str: # Construct full timeline ID if needed if not entry_id.startswith("don:core:"): # This is a simplified ID, we'll need to fetch it via the ticket timeline - return await ticket_timeline(ticket_id, ctx) + return await timeline_resource(ticket_id, ctx) result = await timeline_entry_resource(entry_id, ctx, devrev_cache) @@ -246,46 +224,24 @@ async def ticket_artifacts(ticket_id: str, ctx: Context) -> str: @mcp.resource( uri="devrev://artifacts/{artifact_id}", - description="Access DevRev artifact metadata with temporary download URLs and reverse links to associated tickets.", - tags=["artifact", "devrev", "files", "reverse-links"] + description="Access DevRev artifact metadata with temporary download URLs.", + tags=["artifact", "devrev", "files"] ) async def artifact(artifact_id: str, ctx: Context) -> str: """ - Access DevRev artifact metadata with reverse links. + Access DevRev artifact metadata. Args: artifact_id: The DevRev artifact ID Returns: - JSON string containing the artifact metadata with reverse links + JSON string containing the artifact metadata """ result = await artifact_resource(artifact_id, ctx, devrev_cache) - # Add reverse links (would need to be implemented based on DevRev API capabilities) - import json - artifact_data = json.loads(result) - artifact_data["links"] = { - "tickets": f"devrev://artifacts/{artifact_id}/tickets" - } - - return json.dumps(artifact_data, indent=2) + # Return the artifact data directly + return result -@mcp.resource( - uri="devrev://artifacts/{artifact_id}/tickets", - description="Access all tickets that reference this artifact. Provides reverse lookup from artifacts to tickets.", - tags=["artifact", "reverse-links", "devrev", "tickets"] -) -async def artifact_tickets(artifact_id: str, ctx: Context) -> str: - """ - Access tickets that reference this artifact. - - Args: - artifact_id: The DevRev artifact ID - - Returns: - JSON string containing linked tickets - """ - return await artifact_tickets_resource(artifact_id, ctx, devrev_cache) @mcp.tool( name="get_timeline_entries", diff --git a/src/devrev_mcp/tools/create_object.py b/src/devrev_mcp/tools/create_object.py index f6a4b45..6904b37 100644 --- a/src/devrev_mcp/tools/create_object.py +++ b/src/devrev_mcp/tools/create_object.py @@ -7,8 +7,11 @@ import json from fastmcp import Context from ..utils import make_devrev_request +from ..error_handler import tool_error_handler +from ..endpoints import WORKS_CREATE +@tool_error_handler("create_object") async def create_object( type: str, title: str, @@ -48,7 +51,7 @@ async def create_object( if owned_by: payload["owned_by"] = owned_by - response = make_devrev_request("works.create", payload) + response = make_devrev_request(WORKS_CREATE, payload) if response.status_code != 200: error_text = response.text diff --git a/src/devrev_mcp/tools/download_artifact.py b/src/devrev_mcp/tools/download_artifact.py index d425bce..99890ab 100644 --- a/src/devrev_mcp/tools/download_artifact.py +++ b/src/devrev_mcp/tools/download_artifact.py @@ -10,8 +10,11 @@ from pathlib import Path from fastmcp import Context from ..utils import make_devrev_request +from ..error_handler import tool_error_handler +from ..endpoints import ARTIFACTS_GET, ARTIFACTS_LOCATE +@tool_error_handler("download_artifact") async def download_artifact(artifact_id: str, download_directory: str, ctx: Context) -> str: """ Download a DevRev artifact to a specified directory. @@ -32,7 +35,7 @@ async def download_artifact(artifact_id: str, download_directory: str, ctx: Cont # First, get artifact information using artifacts.get artifact_response = make_devrev_request( - "artifacts.get", + ARTIFACTS_GET, {"id": artifact_id} ) @@ -65,7 +68,7 @@ async def download_artifact(artifact_id: str, download_directory: str, ctx: Cont await ctx.info("No direct download URL found, attempting to locate artifact...") # Try a different approach - some APIs have a separate locate endpoint locate_response = make_devrev_request( - "artifacts.locate", + ARTIFACTS_LOCATE, {"id": artifact_id} ) diff --git a/src/devrev_mcp/tools/get_object.py b/src/devrev_mcp/tools/get_object.py index d18f401..7974186 100644 --- a/src/devrev_mcp/tools/get_object.py +++ b/src/devrev_mcp/tools/get_object.py @@ -7,8 +7,11 @@ import json from fastmcp import Context from ..utils import make_devrev_request +from ..error_handler import tool_error_handler +from ..endpoints import WORKS_GET +@tool_error_handler("get_object") async def get_object(id: str, ctx: Context, devrev_cache: dict) -> str: """ Get all information about a DevRev issue and ticket using its ID. @@ -24,7 +27,7 @@ async def get_object(id: str, ctx: Context, devrev_cache: dict) -> str: try: await ctx.info(f"Fetching object {id} from DevRev") - response = make_devrev_request("works.get", {"id": id}) + response = make_devrev_request(WORKS_GET, {"id": id}) if response.status_code != 200: error_text = response.text diff --git a/src/devrev_mcp/tools/get_ticket.py b/src/devrev_mcp/tools/get_ticket.py index 6aac1b2..6c1c0b4 100644 --- a/src/devrev_mcp/tools/get_ticket.py +++ b/src/devrev_mcp/tools/get_ticket.py @@ -6,8 +6,10 @@ import json from fastmcp import Context +from ..error_handler import tool_error_handler +@tool_error_handler("get_ticket") async def get_ticket( id: str, ctx: Context diff --git a/src/devrev_mcp/tools/get_timeline_entries.py b/src/devrev_mcp/tools/get_timeline_entries.py index d935540..f62d6db 100644 --- a/src/devrev_mcp/tools/get_timeline_entries.py +++ b/src/devrev_mcp/tools/get_timeline_entries.py @@ -7,8 +7,10 @@ import json from fastmcp import Context from ..types import VisibilityInfo, TimelineEntryType +from ..error_handler import tool_error_handler +@tool_error_handler("get_timeline_entries") async def get_timeline_entries( id: str, ctx: Context, diff --git a/src/devrev_mcp/tools/search.py b/src/devrev_mcp/tools/search.py index c417d45..48c3a72 100644 --- a/src/devrev_mcp/tools/search.py +++ b/src/devrev_mcp/tools/search.py @@ -10,8 +10,11 @@ from fastmcp import Context from ..utils import make_devrev_request +from ..error_handler import tool_error_handler +from ..endpoints import SEARCH_HYBRID +@tool_error_handler("search") async def search(query: str, namespace: str, ctx: Context) -> str: """ Search DevRev using the provided query and return parsed, useful information. @@ -31,7 +34,7 @@ async def search(query: str, namespace: str, ctx: Context) -> str: await ctx.info(f"Searching DevRev for '{query}' in namespace '{namespace}'") response = make_devrev_request( - "search.hybrid", + SEARCH_HYBRID, {"query": query, "namespace": namespace} ) diff --git a/src/devrev_mcp/tools/update_object.py b/src/devrev_mcp/tools/update_object.py index a4a0ee2..debac70 100644 --- a/src/devrev_mcp/tools/update_object.py +++ b/src/devrev_mcp/tools/update_object.py @@ -7,8 +7,11 @@ import json from fastmcp import Context from ..utils import make_devrev_request +from ..error_handler import tool_error_handler +from ..endpoints import WORKS_UPDATE +@tool_error_handler("update_object") async def update_object( id: str, type: str, @@ -50,7 +53,7 @@ async def update_object( if body: payload["body"] = body - response = make_devrev_request("works.update", payload) + response = make_devrev_request(WORKS_UPDATE, payload) if response.status_code != 200: error_text = response.text diff --git a/src/devrev_mcp/utils.py b/src/devrev_mcp/utils.py index 557ed08..17efe9a 100644 --- a/src/devrev_mcp/utils.py +++ b/src/devrev_mcp/utils.py @@ -14,7 +14,7 @@ def make_devrev_request(endpoint: str, payload: Dict[str, Any]) -> requests.Resp Make an authenticated request to the DevRev API. Args: - endpoint: The API endpoint path (e.g., "works.get" or "search.hybrid") + endpoint: The API endpoint path (use constants from endpoints.py) payload: The JSON payload to send Returns: From ef140c1fa873650183690f047ab7907b9029195a Mon Sep 17 00:00:00 2001 From: Sara Date: Mon, 2 Jun 2025 16:18:18 -0400 Subject: [PATCH 13/17] more PR cleanup --- src/devrev_mcp/cache.py | 65 ++++++++++++++++++++++ src/devrev_mcp/resources/artifact.py | 17 ++---- src/devrev_mcp/resources/ticket.py | 21 +++---- src/devrev_mcp/resources/timeline.py | 32 +++++++---- src/devrev_mcp/resources/timeline_entry.py | 16 +++--- src/devrev_mcp/server.py | 30 +++------- src/devrev_mcp/tools/get_object.py | 47 ---------------- src/devrev_mcp/tools/update_object.py | 4 +- src/devrev_mcp/utils.py | 53 +++++++++++++++++- 9 files changed, 170 insertions(+), 115 deletions(-) create mode 100644 src/devrev_mcp/cache.py delete mode 100644 src/devrev_mcp/tools/get_object.py diff --git a/src/devrev_mcp/cache.py b/src/devrev_mcp/cache.py new file mode 100644 index 0000000..e162184 --- /dev/null +++ b/src/devrev_mcp/cache.py @@ -0,0 +1,65 @@ +""" +Simple size-limited cache for DevRev MCP server. + +Prevents unbounded memory growth by limiting cache size and using simple LRU eviction. +""" + +from collections import OrderedDict +from typing import Any, Dict, Optional, Union +import json + + +class SimpleCache: + """Simple LRU cache with size limit to prevent memory leaks.""" + + def __init__(self, max_size: int = 500): + """Initialize cache with maximum size limit.""" + self.max_size = max_size + self._cache: OrderedDict[str, str] = OrderedDict() + + def get(self, key: str) -> Optional[str]: + """Get value from cache, moving it to end (most recently used).""" + if key in self._cache: + # Move to end (most recently used) + value = self._cache.pop(key) + self._cache[key] = value + return value + return None + + def set(self, key: str, value: Union[str, Dict[str, Any]]) -> None: + """Set value in cache, evicting oldest if needed.""" + # Convert dict to JSON string if needed + if isinstance(value, dict): + cache_value = json.dumps(value, indent=2) + else: + cache_value = str(value) + + # Remove if already exists + if key in self._cache: + del self._cache[key] + + # Add to end + self._cache[key] = cache_value + + # Evict oldest if over limit + while len(self._cache) > self.max_size: + self._cache.popitem(last=False) # Remove oldest (first item) + + def delete(self, key: str) -> bool: + """Remove key from cache.""" + if key in self._cache: + del self._cache[key] + return True + return False + + def size(self) -> int: + """Get current number of cache entries.""" + return len(self._cache) + + def __contains__(self, key: str) -> bool: + """Check if key exists in cache.""" + return key in self._cache + + +# Global cache instance - replaces devrev_cache = {} +devrev_cache = SimpleCache(max_size=500) \ No newline at end of file diff --git a/src/devrev_mcp/resources/artifact.py b/src/devrev_mcp/resources/artifact.py index 183fa2b..f929e4b 100644 --- a/src/devrev_mcp/resources/artifact.py +++ b/src/devrev_mcp/resources/artifact.py @@ -30,9 +30,10 @@ async def artifact(artifact_id: str, ctx: Context, devrev_cache: dict) -> str: cache_key = f"artifact:{artifact_id}" # Check cache first - if cache_key in devrev_cache: + cached_value = devrev_cache.get(cache_key) + if cached_value is not None: await ctx.info(f"Retrieved artifact {artifact_id} from cache") - return devrev_cache[cache_key] + return cached_value await ctx.info(f"Fetching artifact {artifact_id} from DevRev API") @@ -75,16 +76,10 @@ async def artifact(artifact_id: str, ctx: Context, devrev_cache: dict) -> str: await ctx.info(f"Could not locate download URL for artifact {artifact_id}: {str(locate_error)}") # Continue without download URL - # Add navigation links to timeline entry (artifacts belong to timeline entries) - # Note: We'd need to determine the timeline entry ID from the artifact context - # For now, adding a placeholder structure that could be populated based on API response - result["links"] = { - "timeline_entry": "devrev://timeline-entries/{timeline_entry_id}", # Would need actual ID - "note": "Artifact belongs to a specific timeline entry, which belongs to a ticket" - } # Cache the result - devrev_cache[cache_key] = json.dumps(result, indent=2) + cache_value = json.dumps(result, indent=2) + devrev_cache.set(cache_key, cache_value) await ctx.info(f"Successfully retrieved and cached artifact: {artifact_id}") - return devrev_cache[cache_key] \ No newline at end of file + return cache_value \ No newline at end of file diff --git a/src/devrev_mcp/resources/ticket.py b/src/devrev_mcp/resources/ticket.py index fe50b79..f0311bd 100644 --- a/src/devrev_mcp/resources/ticket.py +++ b/src/devrev_mcp/resources/ticket.py @@ -6,7 +6,7 @@ import json from fastmcp import Context -from ..utils import make_devrev_request +from ..utils import make_devrev_request, normalize_ticket_id from ..error_handler import resource_error_handler from ..endpoints import WORKS_GET, TIMELINE_ENTRIES_LIST @@ -24,19 +24,15 @@ async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: Returns: JSON string containing the ticket data with timeline entries and artifacts """ - # Convert simple ID to TKT- format for API calls - if ticket_id.upper().startswith("TKT-"): - # Extract numeric part and reformat - numeric_id = ticket_id[4:] # Remove TKT- or tkt- - normalized_id = f"TKT-{numeric_id}" - else: - normalized_id = f"TKT-{ticket_id}" + # Normalize ticket ID for API calls + normalized_id = normalize_ticket_id(ticket_id) cache_key = f"ticket:{ticket_id}" # Check cache first - if cache_key in devrev_cache: + cached_value = devrev_cache.get(cache_key) + if cached_value is not None: await ctx.info(f"Retrieved ticket {normalized_id} from cache") - return devrev_cache[cache_key] + return cached_value await ctx.info(f"Fetching ticket {normalized_id} from DevRev API") @@ -119,7 +115,8 @@ async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: } # Cache the enriched result - devrev_cache[cache_key] = json.dumps(result, indent=2) + cache_value = json.dumps(result, indent=2) + devrev_cache.set(cache_key, cache_value) await ctx.info(f"Successfully retrieved and cached ticket: {normalized_id}") - return devrev_cache[cache_key] \ No newline at end of file + return cache_value \ No newline at end of file diff --git a/src/devrev_mcp/resources/timeline.py b/src/devrev_mcp/resources/timeline.py index cfbf317..0204d5b 100644 --- a/src/devrev_mcp/resources/timeline.py +++ b/src/devrev_mcp/resources/timeline.py @@ -6,14 +6,12 @@ import json from fastmcp import Context -from ..utils import make_devrev_request +from ..utils import make_devrev_request, normalize_ticket_id from ..types import VisibilityInfo, format_visibility_summary from ..error_handler import resource_error_handler from ..endpoints import WORKS_GET, TIMELINE_ENTRIES_LIST - - @resource_error_handler("timeline") -async def ticket_timeline(ticket_id: str, ctx: Context) -> str: +async def timeline(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: """ Access enriched timeline for a ticket with structured conversation format. @@ -25,13 +23,18 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: JSON string containing enriched timeline with customer context and conversation flow """ try: - # Normalize ticket ID to handle various formats - extract just the number then format properly - if ticket_id.upper().startswith("TKT-"): - # Extract numeric part and reformat - numeric_id = ticket_id[4:] # Remove TKT- or tkt- - normalized_id = f"TKT-{numeric_id}" - else: - normalized_id = f"TKT-{ticket_id}" + # Normalize ticket ID for API calls + normalized_id = normalize_ticket_id(ticket_id) + + cache_key = f"ticket_timeline:{ticket_id}" + + # Check cache first + cached_value = devrev_cache.get(cache_key) + if cached_value is not None: + await ctx.info(f"Retrieved timeline for {normalized_id} from cache") + return cached_value + + await ctx.info(f"Fetching timeline for {normalized_id} from DevRev API") # Get ticket details for customer and workspace info ticket_response = make_devrev_request(WORKS_GET, {"id": normalized_id}) @@ -273,7 +276,12 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: if result["all_artifacts"]: result["links"]["artifacts"] = f"devrev://tickets/{ticket_id}/artifacts" - return json.dumps(result, indent=2) + # Cache the enriched result + cache_value = json.dumps(result, indent=2) + devrev_cache.set(cache_key, cache_value) + await ctx.info(f"Successfully retrieved and cached timeline: {normalized_id}") + + return cache_value except Exception as e: await ctx.error(f"Failed to get timeline for ticket {ticket_id}: {str(e)}") diff --git a/src/devrev_mcp/resources/timeline_entry.py b/src/devrev_mcp/resources/timeline_entry.py index efc55fa..0bfb8dd 100644 --- a/src/devrev_mcp/resources/timeline_entry.py +++ b/src/devrev_mcp/resources/timeline_entry.py @@ -6,7 +6,7 @@ import json from fastmcp import Context -from ..utils import make_devrev_request +from ..utils import make_devrev_request, extract_ticket_id_from_object from ..error_handler import resource_error_handler from ..endpoints import TIMELINE_ENTRIES_GET @@ -25,12 +25,13 @@ async def timeline_entry(timeline_id: str, ctx: Context, devrev_cache: dict) -> JSON string containing the timeline entry data """ try: - cache_key = f"timeline:{timeline_id}" + cache_key = f"timeline_entry:{timeline_id}" # Check cache first - if cache_key in devrev_cache: + cached_value = devrev_cache.get(cache_key) + if cached_value is not None: await ctx.info(f"Retrieved timeline entry {timeline_id} from cache") - return devrev_cache[cache_key] + return cached_value await ctx.info(f"Fetching timeline entry {timeline_id} from DevRev API") @@ -53,7 +54,7 @@ async def timeline_entry(timeline_id: str, ctx: Context, devrev_cache: dict) -> if "object" in result: object_id = result["object"] if "TKT-" in object_id: - ticket_id = object_id.replace("TKT-", "") + ticket_id = extract_ticket_id_from_object(object_id) links = {} if ticket_id: @@ -67,10 +68,11 @@ async def timeline_entry(timeline_id: str, ctx: Context, devrev_cache: dict) -> result["links"] = links # Cache the result - devrev_cache[cache_key] = json.dumps(result, indent=2) + cache_value = json.dumps(result, indent=2) + devrev_cache.set(cache_key, cache_value) await ctx.info(f"Successfully retrieved and cached timeline entry: {timeline_id}") - return devrev_cache[cache_key] + return cache_value except Exception as e: await ctx.error(f"Failed to get timeline resource {timeline_id}: {str(e)}") diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index 36ed92b..430ea9c 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -12,11 +12,10 @@ # Import modular resources and tools from .resources.ticket import ticket as ticket_resource -from .resources.timeline import ticket_timeline as timeline_resource +from .resources.timeline import timeline as timeline_resource from .resources.timeline_entry import timeline_entry as timeline_entry_resource from .resources.artifact import artifact as artifact_resource from .resources.ticket_artifacts import ticket_artifacts as ticket_artifacts_resource -from .tools.get_object import get_object as get_object_tool from .tools.get_timeline_entries import get_timeline_entries as get_timeline_entries_tool from .tools.get_ticket import get_ticket as get_ticket_tool from .tools.search import search as search_tool @@ -34,8 +33,8 @@ description="DevRev MCP Server - Provides tools for interacting with DevRev API" ) -# Store DevRev resources (works, comments, etc.) for resource access -devrev_cache = {} +# Import cache utility to prevent unbounded memory growth +from .cache import devrev_cache @mcp.tool( name="search", @@ -109,22 +108,6 @@ async def update_object( """ return await update_object_tool(id, type, title, body, ctx, devrev_cache) -@mcp.tool( - name="get_object", - description="Retrieve comprehensive information about any DevRev object including tickets, issues, parts, and users. Returns complete metadata, relationships, assignment details, and history for thorough analysis and investigation.", - tags=["retrieve", "devrev", "objects", "metadata", "investigation", "analysis"] -) -async def get_object(id: str, ctx: Context) -> str: - """ - Get all information about a DevRev issue and ticket using its ID. - - Args: - id: The DevRev object ID - - Returns: - JSON string containing the object information - """ - return await get_object_tool(id, ctx, devrev_cache) # Specialized resource handlers for different DevRev object types @@ -170,7 +153,7 @@ async def ticket_timeline(ticket_id: str, ctx: Context) -> str: Returns: JSON string containing enriched timeline with customer context and conversation flow """ - return await timeline_resource(ticket_id, ctx) + return await timeline_resource(ticket_id, ctx, devrev_cache) @mcp.resource( uri="devrev://tickets/{ticket_id}/timeline/{entry_id}", @@ -290,7 +273,7 @@ async def get_ticket(id: str, ctx: Context) -> str: @mcp.tool( name="download_artifact", - description="Download a DevRev artifact to a specified directory. Retrieves the artifact file and saves it locally with proper metadata.", + description="Download a DevRev artifact to a specified directory using its full artifact ID. Requires the complete don:core artifact ID format (e.g., don:core:dvrv-us-1:devo/123:artifact/456), not just the numeric ID. Retrieves the artifact file and saves it locally with proper metadata.", tags=["download", "artifact", "devrev", "files", "local-storage"] ) async def download_artifact(artifact_id: str, download_directory: str, ctx: Context) -> str: @@ -298,7 +281,8 @@ async def download_artifact(artifact_id: str, download_directory: str, ctx: Cont Download a DevRev artifact to a specified directory. Args: - artifact_id: The DevRev artifact ID to download + artifact_id: The full DevRev artifact ID in don:core format (e.g., don:core:dvrv-us-1:devo/123:artifact/456). + The numeric ID alone (e.g., 456) will not work. download_directory: The local directory path where the artifact should be saved Returns: diff --git a/src/devrev_mcp/tools/get_object.py b/src/devrev_mcp/tools/get_object.py deleted file mode 100644 index 7974186..0000000 --- a/src/devrev_mcp/tools/get_object.py +++ /dev/null @@ -1,47 +0,0 @@ -""" -DevRev Get Object Tool - -Provides a tool for fetching DevRev objects by ID. -""" - -import json -from fastmcp import Context -from ..utils import make_devrev_request -from ..error_handler import tool_error_handler -from ..endpoints import WORKS_GET - - -@tool_error_handler("get_object") -async def get_object(id: str, ctx: Context, devrev_cache: dict) -> str: - """ - Get all information about a DevRev issue and ticket using its ID. - - Args: - id: The DevRev object ID - ctx: FastMCP context - devrev_cache: Cache dictionary for storing results - - Returns: - JSON string containing the object information - """ - try: - await ctx.info(f"Fetching object {id} from DevRev") - - response = make_devrev_request(WORKS_GET, {"id": id}) - - if response.status_code != 200: - error_text = response.text - await ctx.error(f"Failed to get object {id}: HTTP {response.status_code} - {error_text}") - raise ValueError(f"Failed to get object {id} (HTTP {response.status_code}): {error_text}") - - result_data = response.json() - - # Cache the result - devrev_cache[id] = json.dumps(result_data, indent=2) - - await ctx.info(f"Successfully retrieved object: {id}") - return devrev_cache[id] - - except Exception as e: - await ctx.error(f"Failed to get object {id}: {str(e)}") - raise \ No newline at end of file diff --git a/src/devrev_mcp/tools/update_object.py b/src/devrev_mcp/tools/update_object.py index debac70..a6cb10b 100644 --- a/src/devrev_mcp/tools/update_object.py +++ b/src/devrev_mcp/tools/update_object.py @@ -63,8 +63,8 @@ async def update_object( result_data = response.json() # Update cache if we have this object cached - if devrev_cache and id in devrev_cache: - del devrev_cache[id] + if devrev_cache: + devrev_cache.delete(id) await ctx.info(f"Cleared cache for updated object: {id}") await ctx.info(f"Successfully updated {type}: {id}") diff --git a/src/devrev_mcp/utils.py b/src/devrev_mcp/utils.py index 17efe9a..09b983c 100644 --- a/src/devrev_mcp/utils.py +++ b/src/devrev_mcp/utils.py @@ -42,4 +42,55 @@ def make_devrev_request(endpoint: str, payload: Dict[str, Any]) -> requests.Resp ) return response except requests.RequestException as e: - raise requests.RequestException(f"DevRev API request failed for endpoint '{endpoint}': {e}") from e + raise requests.RequestException(f"DevRev API request failed for endpoint '{endpoint}': {e}") from e + + +def normalize_ticket_id(ticket_id: str) -> str: + """ + Normalize ticket ID to TKT-XXXXX format for API calls. + + Args: + ticket_id: The input ticket ID (e.g., "12345", "TKT-12345", "tkt-12345") + + Returns: + Normalized ticket ID in TKT-XXXXX format + + Examples: + normalize_ticket_id("12345") -> "TKT-12345" + normalize_ticket_id("TKT-12345") -> "TKT-12345" + normalize_ticket_id("tkt-12345") -> "TKT-12345" + """ + if not ticket_id: + return ticket_id + + # Remove any existing TKT- prefix (case insensitive) + if ticket_id.upper().startswith("TKT-"): + numeric_id = ticket_id[4:] # Remove TKT- or tkt- + else: + numeric_id = ticket_id + + # Return normalized format + return f"TKT-{numeric_id}" + + +def extract_ticket_id_from_object(object_id: str) -> str: + """ + Extract numeric ticket ID from object ID containing TKT- prefix. + + Args: + object_id: Object ID that may contain TKT- prefix + + Returns: + Numeric part of ticket ID + + Examples: + extract_ticket_id_from_object("TKT-12345") -> "12345" + extract_ticket_id_from_object("12345") -> "12345" + """ + if not object_id: + return object_id + + if "TKT-" in object_id: + return object_id.replace("TKT-", "") + + return object_id From 0584f8701c5631d161560cdb01039ed49541a20b Mon Sep 17 00:00:00 2001 From: Sara Date: Mon, 2 Jun 2025 19:28:01 -0400 Subject: [PATCH 14/17] slight cache improvement / docs improvement --- src/devrev_mcp/resources/ticket.py | 2 +- src/devrev_mcp/server.py | 5 +++-- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/src/devrev_mcp/resources/ticket.py b/src/devrev_mcp/resources/ticket.py index f0311bd..97ba8b2 100644 --- a/src/devrev_mcp/resources/ticket.py +++ b/src/devrev_mcp/resources/ticket.py @@ -26,7 +26,7 @@ async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: """ # Normalize ticket ID for API calls normalized_id = normalize_ticket_id(ticket_id) - cache_key = f"ticket:{ticket_id}" + cache_key = f"devrev://tickets/{ticket_id}" # Check cache first cached_value = devrev_cache.get(cache_key) diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index 430ea9c..4a538b7 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -84,7 +84,7 @@ async def create_object( @mcp.tool( name="update_object", - description="Update existing DevRev tickets or issues with new information, descriptions, or titles. Maintains object history and audit trails while allowing incremental updates as investigations progress.", + description="Update existing DevRev tickets or issues with new information, descriptions, or titles. Accepts flexible ID formats for tickets/issues. Maintains object history and audit trails while allowing incremental updates as investigations progress.", tags=["update", "devrev", "tickets", "issues", "maintenance", "audit"] ) async def update_object( @@ -98,7 +98,8 @@ async def update_object( Update an existing issue or ticket in DevRev. Args: - id: The ID of the object to update + id: The DevRev object ID - for tickets accepts TKT-12345, 12345, or full don:core format; + for issues accepts ISS-12345, 12345, or full don:core format type: The type of object ("issue" or "ticket") title: New title for the object (optional) body: New body/description for the object (optional) From b2535daf07a162d289777ecc67749e202f0736b1 Mon Sep 17 00:00:00 2001 From: Sara Date: Tue, 3 Jun 2025 11:40:02 -0400 Subject: [PATCH 15/17] gets issues and comments working --- src/devrev_mcp/endpoints.py | 10 +- src/devrev_mcp/resources/issue.py | 133 ++++++ src/devrev_mcp/resources/ticket.py | 57 +-- src/devrev_mcp/resources/timeline.py | 24 +- src/devrev_mcp/resources/timeline_entry.py | 4 +- src/devrev_mcp/resources/work.py | 129 ++++++ src/devrev_mcp/server.py | 391 ++++++++++++++++-- .../tools/create_timeline_comment.py | 103 +++++ src/devrev_mcp/tools/get_issue.py | 34 ++ src/devrev_mcp/tools/get_ticket.py | 117 ++---- src/devrev_mcp/tools/get_timeline_entries.py | 141 +++---- src/devrev_mcp/tools/get_work.py | 58 +++ src/devrev_mcp/utils.py | 272 ++++++++++-- 13 files changed, 1221 insertions(+), 252 deletions(-) create mode 100644 src/devrev_mcp/resources/issue.py create mode 100644 src/devrev_mcp/resources/work.py create mode 100644 src/devrev_mcp/tools/create_timeline_comment.py create mode 100644 src/devrev_mcp/tools/get_issue.py create mode 100644 src/devrev_mcp/tools/get_work.py diff --git a/src/devrev_mcp/endpoints.py b/src/devrev_mcp/endpoints.py index d56b2c2..9754993 100644 --- a/src/devrev_mcp/endpoints.py +++ b/src/devrev_mcp/endpoints.py @@ -17,6 +17,7 @@ class DevRevEndpoints: # Timeline Entries TIMELINE_ENTRIES_LIST = "timeline-entries.list" TIMELINE_ENTRIES_GET = "timeline-entries.get" + TIMELINE_ENTRIES_CREATE = "timeline-entries.create" # Artifacts ARTIFACTS_GET = "artifacts.get" @@ -24,6 +25,10 @@ class DevRevEndpoints: # Search SEARCH_HYBRID = "search.hybrid" + + # Links + LINKS_LIST = "links.list" + LINK_TYPES_LIST = "link-types.custom.list" # Convenience exports for simpler imports @@ -32,6 +37,9 @@ class DevRevEndpoints: WORKS_UPDATE = DevRevEndpoints.WORKS_UPDATE TIMELINE_ENTRIES_LIST = DevRevEndpoints.TIMELINE_ENTRIES_LIST TIMELINE_ENTRIES_GET = DevRevEndpoints.TIMELINE_ENTRIES_GET +TIMELINE_ENTRIES_CREATE = DevRevEndpoints.TIMELINE_ENTRIES_CREATE ARTIFACTS_GET = DevRevEndpoints.ARTIFACTS_GET ARTIFACTS_LOCATE = DevRevEndpoints.ARTIFACTS_LOCATE -SEARCH_HYBRID = DevRevEndpoints.SEARCH_HYBRID \ No newline at end of file +SEARCH_HYBRID = DevRevEndpoints.SEARCH_HYBRID +LINKS_LIST = DevRevEndpoints.LINKS_LIST +LINK_TYPES_LIST = DevRevEndpoints.LINK_TYPES_LIST \ No newline at end of file diff --git a/src/devrev_mcp/resources/issue.py b/src/devrev_mcp/resources/issue.py new file mode 100644 index 0000000..b129252 --- /dev/null +++ b/src/devrev_mcp/resources/issue.py @@ -0,0 +1,133 @@ +""" +DevRev Issue Resource Handler + +Provides specialized resource access for DevRev issues with enriched timeline and artifact data. +""" + +import json +from fastmcp import Context +from ..utils import make_devrev_request, fetch_linked_work_items +from ..error_handler import resource_error_handler +from ..endpoints import WORKS_GET, TIMELINE_ENTRIES_LIST + + +@resource_error_handler("issue") +async def issue(issue_number: str, ctx: Context, devrev_cache: dict) -> str: + """ + Access DevRev issue details with enriched timeline entries and artifact data. + + Args: + issue_number: The numeric DevRev issue number (e.g., "9031") + ctx: FastMCP context + devrev_cache: Cache dictionary for storing results + + Returns: + JSON string containing the issue data with timeline entries and artifacts + """ + # Use the display ID format that the API expects + issue_id = f"ISS-{issue_number}" + cache_key = f"devrev://issues/{issue_number}" + + # Check cache first + cached_value = devrev_cache.get(cache_key) + if cached_value is not None: + await ctx.info(f"Retrieved issue {issue_number} from cache") + return cached_value + + await ctx.info(f"Fetching issue {issue_id} from DevRev API") + + # Get issue details using the display ID + response = make_devrev_request(WORKS_GET, {"id": issue_id}) + + if response.status_code != 200: + error_text = response.text + await ctx.error(f"Failed to fetch issue {issue_id}: HTTP {response.status_code} - {error_text}") + raise ValueError(f"Failed to fetch issue {issue_id} (HTTP {response.status_code}): {error_text}") + + result = response.json() + + # Extract the work object from the API response + if isinstance(result, dict) and "work" in result: + result = result["work"] + + # Get timeline entries for the issue + try: + timeline_response = make_devrev_request( + TIMELINE_ENTRIES_LIST, + {"object": issue_id} + ) + + if timeline_response.status_code == 200: + timeline_data = timeline_response.json() + timeline_entries = timeline_data.get("timeline_entries", []) + result["timeline_entries"] = timeline_entries + await ctx.info(f"Added {len(timeline_entries)} timeline entries to issue {issue_id}") + + # Extract artifact data directly from timeline entries (no additional API calls needed) + artifacts = [] + seen_artifact_ids = set() # Avoid duplicates across timeline entries + + for entry in timeline_entries: + if "artifacts" in entry: + for artifact in entry["artifacts"]: + # Timeline entries contain full artifact objects, not just IDs + if isinstance(artifact, dict): + artifact_id = artifact.get("id", "") + if artifact_id and artifact_id not in seen_artifact_ids: + seen_artifact_ids.add(artifact_id) + + # Add navigation link for downloading + artifact_id_clean = artifact_id.split("/")[-1] if "/" in artifact_id else artifact_id + artifact["links"] = { + "download": f"devrev://artifacts/{artifact_id_clean}/download", + "issue": f"devrev://issues/{issue_number}" + } + artifacts.append(artifact) + elif isinstance(artifact, str): + # Fallback: if it's just an ID string, create minimal artifact object + if artifact not in seen_artifact_ids: + seen_artifact_ids.add(artifact) + artifact_id_clean = artifact.split("/")[-1] if "/" in artifact else artifact + artifacts.append({ + "id": artifact, + "links": { + "download": f"devrev://artifacts/{artifact_id_clean}/download", + "issue": f"devrev://issues/{issue_number}" + } + }) + + result["artifacts"] = artifacts + await ctx.info(f"Extracted {len(artifacts)} artifacts from timeline entries for issue {issue_number}") + + else: + await ctx.warning(f"Could not fetch timeline entries for issue {issue_number}") + result["timeline_entries"] = [] + result["artifacts"] = [] + except Exception as e: + await ctx.warning(f"Error fetching timeline entries for issue {issue_number}: {str(e)}") + result["timeline_entries"] = [] + result["artifacts"] = [] + + # Get linked work items using the reusable utility function + work_item_don_id = result.get("id", issue_id) # Use the full don:core ID from the API response + linked_work_items = await fetch_linked_work_items( + work_item_id=work_item_don_id, + work_item_display_id=issue_id, + work_item_type="issue", + ctx=ctx, + cache=devrev_cache + ) + + # Add navigation links + result["links"] = { + "timeline": f"devrev://issues/{issue_number}/timeline", + "artifacts": f"devrev://issues/{issue_number}/artifacts", + "works": linked_work_items + } + + # Cache the enriched result + cache_value = json.dumps(result, indent=2, default=str) + devrev_cache.set(cache_key, cache_value) + await ctx.info(f"Successfully retrieved and cached issue: {issue_number}") + + return cache_value \ No newline at end of file diff --git a/src/devrev_mcp/resources/ticket.py b/src/devrev_mcp/resources/ticket.py index 97ba8b2..27b715f 100644 --- a/src/devrev_mcp/resources/ticket.py +++ b/src/devrev_mcp/resources/ticket.py @@ -6,43 +6,43 @@ import json from fastmcp import Context -from ..utils import make_devrev_request, normalize_ticket_id +from ..utils import make_devrev_request, fetch_linked_work_items from ..error_handler import resource_error_handler from ..endpoints import WORKS_GET, TIMELINE_ENTRIES_LIST @resource_error_handler("ticket") -async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: +async def ticket(ticket_number: str, ctx: Context, devrev_cache: dict) -> str: """ Access DevRev ticket details with enriched timeline entries and artifact data. Args: - ticket_id: The DevRev ticket ID (e.g., 12345 for TKT-12345) + ticket_number: The numeric DevRev ticket ID (e.g., "12345") ctx: FastMCP context devrev_cache: Cache dictionary for storing results Returns: JSON string containing the ticket data with timeline entries and artifacts """ - # Normalize ticket ID for API calls - normalized_id = normalize_ticket_id(ticket_id) - cache_key = f"devrev://tickets/{ticket_id}" + # Use the display ID format that the API expects + ticket_id = f"TKT-{ticket_number}" + cache_key = f"devrev://tickets/{ticket_number}" # Check cache first cached_value = devrev_cache.get(cache_key) if cached_value is not None: - await ctx.info(f"Retrieved ticket {normalized_id} from cache") + await ctx.info(f"Retrieved ticket {ticket_number} from cache") return cached_value - await ctx.info(f"Fetching ticket {normalized_id} from DevRev API") + await ctx.info(f"Fetching ticket {ticket_id} from DevRev API") - # Get ticket details - response = make_devrev_request(WORKS_GET, {"id": normalized_id}) + # Get ticket details using the display ID + response = make_devrev_request(WORKS_GET, {"id": ticket_id}) if response.status_code != 200: error_text = response.text - await ctx.error(f"Failed to fetch ticket {normalized_id}: HTTP {response.status_code} - {error_text}") - raise ValueError(f"Failed to fetch ticket {normalized_id} (HTTP {response.status_code}): {error_text}") + await ctx.error(f"Failed to fetch ticket {ticket_id}: HTTP {response.status_code} - {error_text}") + raise ValueError(f"Failed to fetch ticket {ticket_id} (HTTP {response.status_code}): {error_text}") result = response.json() @@ -54,14 +54,14 @@ async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: try: timeline_response = make_devrev_request( TIMELINE_ENTRIES_LIST, - {"object": normalized_id} + {"object": ticket_id} ) if timeline_response.status_code == 200: timeline_data = timeline_response.json() timeline_entries = timeline_data.get("timeline_entries", []) result["timeline_entries"] = timeline_entries - await ctx.info(f"Added {len(timeline_entries)} timeline entries to ticket {normalized_id}") + await ctx.info(f"Added {len(timeline_entries)} timeline entries to ticket {ticket_id}") # Extract artifact data directly from timeline entries (no additional API calls needed) artifacts = [] @@ -80,7 +80,7 @@ async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: artifact_id_clean = artifact_id.split("/")[-1] if "/" in artifact_id else artifact_id artifact["links"] = { "download": f"devrev://artifacts/{artifact_id_clean}/download", - "ticket": f"devrev://tickets/{ticket_id}" + "ticket": f"devrev://tickets/{ticket_number}" } artifacts.append(artifact) elif isinstance(artifact, str): @@ -92,31 +92,42 @@ async def ticket(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: "id": artifact, "links": { "download": f"devrev://artifacts/{artifact_id_clean}/download", - "ticket": f"devrev://tickets/{ticket_id}" + "ticket": f"devrev://tickets/{ticket_number}" } }) result["artifacts"] = artifacts - await ctx.info(f"Extracted {len(artifacts)} artifacts from timeline entries for ticket {normalized_id}") + await ctx.info(f"Extracted {len(artifacts)} artifacts from timeline entries for ticket {ticket_id}") else: - await ctx.warning(f"Could not fetch timeline entries for ticket {normalized_id}") + await ctx.warning(f"Could not fetch timeline entries for ticket {ticket_id}") result["timeline_entries"] = [] result["artifacts"] = [] except Exception as e: - await ctx.warning(f"Error fetching timeline entries for ticket {normalized_id}: {str(e)}") + await ctx.warning(f"Error fetching timeline entries for ticket {ticket_id}: {str(e)}") result["timeline_entries"] = [] result["artifacts"] = [] + # Get linked work items using the reusable utility function + work_item_don_id = result.get("id", ticket_id) # Use the full don:core ID from the API response + linked_work_items = await fetch_linked_work_items( + work_item_id=work_item_don_id, + work_item_display_id=ticket_id, + work_item_type="ticket", + ctx=ctx, + cache=devrev_cache + ) + # Add navigation links result["links"] = { - "timeline": f"devrev://tickets/{ticket_id}/timeline", - "artifacts": f"devrev://tickets/{ticket_id}/artifacts" + "timeline": f"devrev://tickets/{ticket_number}/timeline", + "artifacts": f"devrev://tickets/{ticket_number}/artifacts", + "works": linked_work_items } # Cache the enriched result - cache_value = json.dumps(result, indent=2) + cache_value = json.dumps(result, indent=2, default=str) devrev_cache.set(cache_key, cache_value) - await ctx.info(f"Successfully retrieved and cached ticket: {normalized_id}") + await ctx.info(f"Successfully retrieved and cached ticket: {ticket_number}") return cache_value \ No newline at end of file diff --git a/src/devrev_mcp/resources/timeline.py b/src/devrev_mcp/resources/timeline.py index 0204d5b..faef54f 100644 --- a/src/devrev_mcp/resources/timeline.py +++ b/src/devrev_mcp/resources/timeline.py @@ -6,7 +6,7 @@ import json from fastmcp import Context -from ..utils import make_devrev_request, normalize_ticket_id +from ..utils import make_devrev_request from ..types import VisibilityInfo, format_visibility_summary from ..error_handler import resource_error_handler from ..endpoints import WORKS_GET, TIMELINE_ENTRIES_LIST @@ -23,23 +23,21 @@ async def timeline(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: JSON string containing enriched timeline with customer context and conversation flow """ try: - # Normalize ticket ID for API calls - normalized_id = normalize_ticket_id(ticket_id) - + # ticket_id is already normalized by server.py pattern matching cache_key = f"ticket_timeline:{ticket_id}" # Check cache first cached_value = devrev_cache.get(cache_key) if cached_value is not None: - await ctx.info(f"Retrieved timeline for {normalized_id} from cache") + await ctx.info(f"Retrieved timeline for {ticket_id} from cache") return cached_value - await ctx.info(f"Fetching timeline for {normalized_id} from DevRev API") + await ctx.info(f"Fetching timeline for {ticket_id} from DevRev API") # Get ticket details for customer and workspace info - ticket_response = make_devrev_request(WORKS_GET, {"id": normalized_id}) + ticket_response = make_devrev_request(WORKS_GET, {"id": ticket_id}) if ticket_response.status_code != 200: - raise ValueError(f"Failed to fetch ticket {normalized_id}") + raise ValueError(f"Failed to fetch ticket {ticket_id}") ticket_data = ticket_response.json() work = ticket_data.get("work", {}) @@ -52,7 +50,7 @@ async def timeline(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: while page_count < max_pages: request_payload = { - "object": normalized_id, + "object": ticket_id, "limit": 50 # Use DevRev's default limit } if cursor: @@ -65,7 +63,7 @@ async def timeline(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: ) if timeline_response.status_code != 200: - raise ValueError(f"Failed to fetch timeline for {normalized_id}") + raise ValueError(f"Failed to fetch timeline for {ticket_id}") timeline_data = timeline_response.json() page_entries = timeline_data.get("timeline_entries", []) @@ -81,7 +79,7 @@ async def timeline(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: if not cursor or len(page_entries) == 0: break - await ctx.info(f"DEBUG: Found {len(all_entries)} timeline entries for {normalized_id}") + await ctx.info(f"DEBUG: Found {len(all_entries)} timeline entries for {ticket_id}") # Extract customer information customer_info = {} @@ -96,7 +94,7 @@ async def timeline(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: # Build enriched schema result = { "summary": { - "ticket_id": normalized_id, + "ticket_id": ticket_id, "customer": customer_info.get("email", customer_info.get("name", "Unknown")), "workspace": work.get("owned_by", [{}])[0].get("display_name", "Unknown Workspace") if work.get("owned_by") else "Unknown Workspace", "subject": work.get("title", "No title"), @@ -279,7 +277,7 @@ async def timeline(ticket_id: str, ctx: Context, devrev_cache: dict) -> str: # Cache the enriched result cache_value = json.dumps(result, indent=2) devrev_cache.set(cache_key, cache_value) - await ctx.info(f"Successfully retrieved and cached timeline: {normalized_id}") + await ctx.info(f"Successfully retrieved and cached timeline: {ticket_id}") return cache_value diff --git a/src/devrev_mcp/resources/timeline_entry.py b/src/devrev_mcp/resources/timeline_entry.py index 0bfb8dd..9072de2 100644 --- a/src/devrev_mcp/resources/timeline_entry.py +++ b/src/devrev_mcp/resources/timeline_entry.py @@ -6,7 +6,7 @@ import json from fastmcp import Context -from ..utils import make_devrev_request, extract_ticket_id_from_object +from ..utils import make_devrev_request from ..error_handler import resource_error_handler from ..endpoints import TIMELINE_ENTRIES_GET @@ -54,7 +54,7 @@ async def timeline_entry(timeline_id: str, ctx: Context, devrev_cache: dict) -> if "object" in result: object_id = result["object"] if "TKT-" in object_id: - ticket_id = extract_ticket_id_from_object(object_id) + ticket_id = object_id.replace("TKT-", "") links = {} if ticket_id: diff --git a/src/devrev_mcp/resources/work.py b/src/devrev_mcp/resources/work.py new file mode 100644 index 0000000..c735773 --- /dev/null +++ b/src/devrev_mcp/resources/work.py @@ -0,0 +1,129 @@ +""" +Copyright (c) 2025 DevRev, Inc. +SPDX-License-Identifier: MIT + +DevRev Work Item Resource + +Provides a unified resource for accessing work items (tickets, issues, etc.) +via the devrev://work/{work_id} URI format. +""" + +import json +from fastmcp import Context +from ..utils import make_devrev_request +from ..endpoints import WORKS_GET +from ..error_handler import resource_error_handler + + +@resource_error_handler("works") +async def works(work_id: str, ctx: Context, cache=None) -> str: + """ + Access DevRev work item details using unified work ID format. + + Args: + work_id: The DevRev work ID (e.g., TKT-12345, ISS-9031, or numeric ID) + ctx: FastMCP context + cache: Optional cache instance + + Returns: + JSON string containing the work item data with navigation links + """ + try: + await ctx.info(f"Fetching work item {work_id}") + + # Check cache first if available + cache_key = f"work_{work_id}" + if cache: + cached_result = cache.get(cache_key) + if cached_result: + await ctx.info(f"Using cached data for work item {work_id}") + return cached_result + + # Normalize work_id to the format expected by the API + normalized_work_id = work_id + + # The DevRev works.get API accepts both display IDs (TKT-12345, ISS-9031) + # and full don:core IDs, so we can pass them directly + # No transformation needed - the API handles both formats + + # Make API request to get work item details using works.get + payload = { + "id": normalized_work_id + } + + response = make_devrev_request(WORKS_GET, payload) + + if response.status_code != 200: + await ctx.error(f"DevRev API returned status {response.status_code}") + return json.dumps({ + "error": f"Failed to fetch work item {work_id}", + "status_code": response.status_code, + "message": response.text + }) + + data = response.json() + work_item = data.get("work") + + if not work_item: + return json.dumps({ + "error": f"Work item {work_id} not found", + "message": "No work item found with the provided ID" + }) + work_type = work_item.get("type", "unknown") + + # Enhance the work item data with navigation links + enhanced_work = { + **work_item, + "links": _build_navigation_links(work_item), + "metadata": { + "resource_type": "work", + "work_type": work_type, + "fetched_at": data.get("next_cursor", ""), + "api_version": "v1" + } + } + + result = json.dumps(enhanced_work, indent=2, default=str) + + # Cache the result if cache is available + if cache: + cache.set(cache_key, result) # Cache result + + return result + + except Exception as e: + await ctx.error(f"Failed to fetch work item {work_id}: {str(e)}") + return json.dumps({ + "error": f"Failed to fetch work item {work_id}", + "message": str(e) + }) + + +def _build_navigation_links(work_item: dict) -> dict: + """ + Build navigation links for a work item based on its type and ID. + + Args: + work_item: The work item data from DevRev API + + Returns: + Dictionary of navigation links + """ + display_id = work_item.get("display_id", "") + work_type = work_item.get("type", "unknown") + + links = { + "self": f"devrev://work/{display_id}", + } + + # Add type-specific links + if work_type == "ticket" and display_id.startswith("TKT-"): + # Extract numeric ID for ticket-specific resources + numeric_id = display_id.replace("TKT-", "") + links.update({ + "ticket": f"devrev://tickets/{numeric_id}", + "timeline": f"devrev://tickets/{numeric_id}/timeline", + "artifacts": f"devrev://tickets/{numeric_id}/artifacts" + }) + + return links \ No newline at end of file diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index 4a538b7..016574b 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -5,6 +5,7 @@ This module implements the FastMCP server for DevRev integration. """ +import json from typing import Dict, Any from fastmcp import FastMCP, Context @@ -16,12 +17,17 @@ from .resources.timeline_entry import timeline_entry as timeline_entry_resource from .resources.artifact import artifact as artifact_resource from .resources.ticket_artifacts import ticket_artifacts as ticket_artifacts_resource +from .resources.work import works as work_resource +from .resources.issue import issue as issue_resource from .tools.get_timeline_entries import get_timeline_entries as get_timeline_entries_tool from .tools.get_ticket import get_ticket as get_ticket_tool from .tools.search import search as search_tool from .tools.create_object import create_object as create_object_tool from .tools.update_object import update_object as update_object_tool from .tools.download_artifact import download_artifact as download_artifact_tool +from .tools.get_work import get_work as get_work_tool +from .tools.get_issue import get_issue as get_issue_tool +from .tools.create_timeline_comment import create_timeline_comment as create_timeline_comment_tool # Import new types for visibility handling from .types import VisibilityInfo, TimelineEntryType, format_visibility_summary @@ -38,7 +44,26 @@ @mcp.tool( name="search", - description="Search DevRev objects using hybrid search. Supports natural language queries across tickets, issues, articles, parts, and users. Returns enriched results with metadata, ownership, status, and organizational context for efficient triage and analysis.", + description="""Search DevRev objects using hybrid search across multiple content types. + +This tool performs intelligent search across DevRev data using natural language queries. +It combines semantic understanding with exact matching to find relevant content. + +Supported namespaces: +- article: Search knowledge base articles and documentation +- issue: Search internal issues and bug reports +- ticket: Search customer support tickets +- part: Search product parts and components +- dev_user: Search team members and user profiles + +Usage examples: +- Find tickets about a specific technology: query="python dependency issues", namespace="ticket" +- Search for team expertise: query="frontend react developer", namespace="dev_user" +- Look up documentation: query="API authentication guide", namespace="article" +- Find related issues: query="memory leak in scanner", namespace="issue" + +Returns enriched results with metadata, ownership, status, and organizational context +for efficient triage and analysis.""", tags=["search", "devrev", "tickets", "issues", "articles", "hybrid-search"] ) async def search(query: str, namespace: str, ctx: Context) -> str: @@ -112,70 +137,134 @@ async def update_object( # Specialized resource handlers for different DevRev object types +# Resource metadata constants +TICKET_RESOURCE_DESCRIPTION = "Access comprehensive DevRev ticket information with enriched timeline and artifacts. Supports multiple URI formats: numeric (12345), TKT format (TKT-12345), and full don:core IDs." +TICKET_RESOURCE_TAGS = ["ticket", "devrev", "customer-support", "enriched", "navigation"] + +TIMELINE_RESOURCE_DESCRIPTION = "Access enriched ticket timeline with conversation flow, artifacts, and detailed visibility information. Includes customer context, visual visibility indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ), and comprehensive audit trail." +TIMELINE_RESOURCE_TAGS = ["timeline", "devrev", "customer-support", "enriched", "conversation", "visibility", "audit"] + +TIMELINE_ENTRY_RESOURCE_DESCRIPTION = "Access individual timeline entry with detailed conversation data and navigation links. Provides specific entry context within ticket timeline." +TIMELINE_ENTRY_RESOURCE_TAGS = ["timeline-entry", "devrev", "customer-support", "conversation", "navigation"] + +TICKET_ARTIFACTS_RESOURCE_DESCRIPTION = "Access all artifacts associated with a specific ticket. Returns collection of files, screenshots, and documents with download links and metadata." +TICKET_ARTIFACTS_RESOURCE_TAGS = ["artifacts", "devrev", "customer-support", "collection", "files", "navigation"] + +ARTIFACT_RESOURCE_DESCRIPTION = "Access DevRev artifact metadata with temporary download URLs. Provides file information, content type, and secure download links." +ARTIFACT_RESOURCE_TAGS = ["artifact", "devrev", "files", "metadata", "download"] + +WORK_RESOURCE_DESCRIPTION = "Access any DevRev work item with unified interface for tickets, issues, and other work types. Supports display ID formats (TKT-12345, ISS-9031) with navigation links." +WORK_RESOURCE_TAGS = ["work", "devrev", "unified", "tickets", "issues", "navigation"] + +ISSUE_RESOURCE_DESCRIPTION = "Access comprehensive DevRev issue information with enriched timeline and artifacts. Supports multiple URI formats: numeric (9031), ISS format (ISS-9031), and full don:core IDs." +ISSUE_RESOURCE_TAGS = ["issue", "devrev", "internal-work", "enriched", "navigation"] + +# Additional resource patterns for increased exposure +TIMELINE_ALT_RESOURCE_DESCRIPTION = "Access ticket timeline with alternative URI formats. Supports TKT- format and numeric IDs for flexible timeline access." +TIMELINE_ALT_RESOURCE_TAGS = ["timeline", "devrev", "customer-support", "enriched", "alternative-access"] + @mcp.resource( uri="devrev://tickets/{ticket_id}", - description="Access comprehensive DevRev ticket information with navigation links to related resources. Includes customer details, status progression, assignment history, and navigation to timeline and artifacts.", - tags=["ticket", "devrev", "customer-support", "navigation"] + description=TICKET_RESOURCE_DESCRIPTION, + tags=TICKET_RESOURCE_TAGS +) +@mcp.resource( + uri="devrev://tickets/TKT-{ticket_number}", + description=TICKET_RESOURCE_DESCRIPTION, + tags=TICKET_RESOURCE_TAGS +) +@mcp.resource( + uri="devrev://tickets/don:core:dvrv-us-1:devo/{dev_org_id}:ticket/{ticket_number}", + description=TICKET_RESOURCE_DESCRIPTION, + tags=TICKET_RESOURCE_TAGS ) -async def ticket(ticket_id: str, ctx: Context) -> str: +async def ticket(ticket_id: str = None, ticket_number: str = None, dev_org_id: str = None, ctx: Context = None) -> str: """ Access DevRev ticket details with navigation links. + Supports multiple URI formats - all normalize to numeric ticket ID. Args: - ticket_id: The DevRev ticket ID (e.g., 12345 for TKT-12345) + ticket_id: The DevRev ticket ID (numeric, e.g., 12345) + ticket_number: The numeric part of the ticket ID (e.g., 12345 for TKT-12345) + dev_org_id: The dev org ID (e.g., 118WAPdKBc) - unused but required for don:core format + ctx: FastMCP context Returns: JSON string containing the ticket data with navigation links """ - return await ticket_resource(ticket_id, ctx, devrev_cache) + # Normalize to ticket number - all formats end up as the numeric ID + numeric_id = ticket_id or ticket_number + return await ticket_resource(numeric_id, ctx, devrev_cache) @mcp.resource( uri="devrev://tickets/{ticket_id}/timeline", - description=""" - Access enriched timeline for a ticket with customer context, conversation flow, - artifacts, and detailed visibility information. - - Returns token-efficient structured format focusing on support workflow with - comprehensive visibility data: - - Each entry includes visibility_info showing who can see it (private/internal/external/public) - - Summary includes visibility breakdown and customer-visible percentage - - Visual indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ) help identify visibility levels at a glance - - Visibility levels: private (creator only), internal (dev org), external (dev org + customers), public (everyone) - """, - tags=["timeline", "enriched", "devrev", "conversation", "artifacts", "visibility"] + description=TIMELINE_RESOURCE_DESCRIPTION, + tags=TIMELINE_RESOURCE_TAGS +) +@mcp.resource( + uri="devrev://timeline/{ticket_id}", + description=TIMELINE_ALT_RESOURCE_DESCRIPTION, + tags=TIMELINE_ALT_RESOURCE_TAGS +) +@mcp.resource( + uri="devrev://timeline/TKT-{ticket_number}", + description=TIMELINE_ALT_RESOURCE_DESCRIPTION, + tags=TIMELINE_ALT_RESOURCE_TAGS ) -async def ticket_timeline(ticket_id: str, ctx: Context) -> str: +async def ticket_timeline(ticket_id: str = None, ticket_number: str = None, ctx: Context = None) -> str: """ Access enriched timeline for a ticket with structured conversation format. + Supports multiple URI formats for flexible access. Args: - ticket_id: The DevRev ticket ID (e.g., 12345 for TKT-12345) + ticket_id: The DevRev ticket ID (numeric, e.g., 12345) + ticket_number: The numeric part of the ticket ID (e.g., 12345 for TKT-12345) + ctx: FastMCP context Returns: JSON string containing enriched timeline with customer context and conversation flow """ - return await timeline_resource(ticket_id, ctx, devrev_cache) + # Normalize to ticket number + numeric_id = ticket_id or ticket_number + return await timeline_resource(numeric_id, ctx, devrev_cache) @mcp.resource( uri="devrev://tickets/{ticket_id}/timeline/{entry_id}", - description="Access individual timeline entry with detailed conversation data and navigation links.", - tags=["timeline", "entry", "devrev", "conversation"] + description=TIMELINE_ENTRY_RESOURCE_DESCRIPTION, + tags=TIMELINE_ENTRY_RESOURCE_TAGS ) -async def timeline_entry(ticket_id: str, entry_id: str, ctx: Context) -> str: +@mcp.resource( + uri="devrev://tickets/TKT-{ticket_number}/timeline/{entry_id}", + description=TIMELINE_ENTRY_RESOURCE_DESCRIPTION, + tags=TIMELINE_ENTRY_RESOURCE_TAGS +) +@mcp.resource( + uri="devrev://tickets/don:core:dvrv-us-1:devo/{dev_org_id}:ticket/{ticket_number}/timeline/{entry_id}", + description=TIMELINE_ENTRY_RESOURCE_DESCRIPTION, + tags=TIMELINE_ENTRY_RESOURCE_TAGS +) +async def timeline_entry(ticket_id: str = None, ticket_number: str = None, dev_org_id: str = None, entry_id: str = None, ctx: Context = None) -> str: """ Access specific timeline entry details. + Supports multiple URI formats for flexible access. Args: - ticket_id: The DevRev ticket ID + ticket_id: The DevRev ticket ID (numeric, e.g., 12345) + ticket_number: The numeric part of the ticket ID (e.g., 12345 for TKT-12345) + dev_org_id: The dev org ID (e.g., 118WAPdKBc) - unused but required for don:core format entry_id: The timeline entry ID + ctx: FastMCP context Returns: JSON string containing the timeline entry data with links """ + # Normalize to ticket number + numeric_id = ticket_id or ticket_number + # Construct full timeline ID if needed if not entry_id.startswith("don:core:"): # This is a simplified ID, we'll need to fetch it via the ticket timeline - return await timeline_resource(ticket_id, ctx) + return await timeline_resource(numeric_id, ctx, devrev_cache) result = await timeline_entry_resource(entry_id, ctx, devrev_cache) @@ -183,33 +272,49 @@ async def timeline_entry(ticket_id: str, entry_id: str, ctx: Context) -> str: import json entry_data = json.loads(result) entry_data["links"] = { - "ticket": f"devrev://tickets/{ticket_id}", - "timeline": f"devrev://tickets/{ticket_id}/timeline" + "ticket": f"devrev://tickets/{numeric_id}", + "timeline": f"devrev://tickets/{numeric_id}/timeline" } return json.dumps(entry_data, indent=2) @mcp.resource( uri="devrev://tickets/{ticket_id}/artifacts", - description="Access all artifacts associated with a specific ticket. Returns list of files, screenshots, and documents attached to the ticket.", - tags=["artifacts", "collection", "devrev", "ticket-artifacts"] + description=TICKET_ARTIFACTS_RESOURCE_DESCRIPTION, + tags=TICKET_ARTIFACTS_RESOURCE_TAGS +) +@mcp.resource( + uri="devrev://tickets/TKT-{ticket_number}/artifacts", + description=TICKET_ARTIFACTS_RESOURCE_DESCRIPTION, + tags=TICKET_ARTIFACTS_RESOURCE_TAGS +) +@mcp.resource( + uri="devrev://tickets/don:core:dvrv-us-1:devo/{dev_org_id}:ticket/{ticket_number}/artifacts", + description=TICKET_ARTIFACTS_RESOURCE_DESCRIPTION, + tags=TICKET_ARTIFACTS_RESOURCE_TAGS ) -async def ticket_artifacts(ticket_id: str, ctx: Context) -> str: +async def ticket_artifacts(ticket_id: str = None, ticket_number: str = None, dev_org_id: str = None, ctx: Context = None) -> str: """ Access all artifacts for a ticket. + Supports multiple URI formats for flexible access. Args: - ticket_id: The DevRev ticket ID (e.g., 12345 for TKT-12345) + ticket_id: The DevRev ticket ID (numeric, e.g., 12345) + ticket_number: The numeric part of the ticket ID (e.g., 12345 for TKT-12345) + dev_org_id: The dev org ID (e.g., 118WAPdKBc) - unused but required for don:core format + ctx: FastMCP context Returns: JSON string containing artifacts with navigation links """ - return await ticket_artifacts_resource(ticket_id, ctx, devrev_cache) + # Normalize to ticket number + numeric_id = ticket_id or ticket_number + return await ticket_artifacts_resource(numeric_id, ctx, devrev_cache) @mcp.resource( uri="devrev://artifacts/{artifact_id}", - description="Access DevRev artifact metadata with temporary download URLs.", - tags=["artifact", "devrev", "files"] + description=ARTIFACT_RESOURCE_DESCRIPTION, + tags=ARTIFACT_RESOURCE_TAGS ) async def artifact(artifact_id: str, ctx: Context) -> str: """ @@ -226,6 +331,164 @@ async def artifact(artifact_id: str, ctx: Context) -> str: # Return the artifact data directly return result +@mcp.resource( + uri="devrev://works/don:core:dvrv-us-1:devo/{dev_org_id}:{work_type}/{work_number}", + description=WORK_RESOURCE_DESCRIPTION, + tags=WORK_RESOURCE_TAGS +) +@mcp.resource( + uri="devrev://works/{work_id}", + description=WORK_RESOURCE_DESCRIPTION, + tags=WORK_RESOURCE_TAGS +) +async def works(ctx: Context, work_id: str | None = None, work_type: str | None = None, work_number: str | None = None, dev_org_id: str | None = None) -> str: + """ + Access DevRev work item details using unified work ID format. + + Args: + work_id: The DevRev work ID (e.g., TKT-12345, ISS-9031) + + Returns: + JSON string containing the work item data with navigation links + """ + if work_id is not None: + return await work_resource(work_id, ctx, devrev_cache) + elif work_type is not None and work_number is not None: + work_id = f"{work_type}-{work_number}" + return await work_resource(work_id, ctx, devrev_cache) + else: + raise ValueError("work_type and work_number are required if work_id is not provided") + + +@mcp.resource( + uri="devrev://issues/{issue_number}", + description=ISSUE_RESOURCE_DESCRIPTION, + tags=ISSUE_RESOURCE_TAGS +) +@mcp.resource( + uri="devrev://issues/ISS-{issue_number}", + description=ISSUE_RESOURCE_DESCRIPTION, + tags=ISSUE_RESOURCE_TAGS +) +@mcp.resource( + uri="devrev://issues/don:core:dvrv-us-1:devo/{dev_org_id}:issue/{issue_number}", + description=ISSUE_RESOURCE_DESCRIPTION, + tags=ISSUE_RESOURCE_TAGS +) +async def issue(issue_number: str = None, dev_org_id: str = None, ctx: Context = None) -> str: + """ + Access DevRev issue details with navigation links. + Supports multiple URI formats - all normalize to numeric issue number. + + Args: + issue_id: The DevRev issue ID (numeric, e.g., 9031) + issue_number: The numeric part of the issue ID (e.g., 9031 for ISS-9031) + dev_org_id: The dev org ID (e.g., 118WAPdKBc) - unused but required for don:core format + ctx: FastMCP context + + Returns: + JSON string containing the issue data with navigation links + """ + # Normalize to issue number - all formats end up as the numeric ID + return await issue_resource(issue_number, ctx, devrev_cache) + + +@mcp.resource( + uri="devrev://issues/{issue_id}/timeline", + description="Access enriched issue timeline with conversation flow, artifacts, and detailed visibility information. Includes internal context, visual visibility indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ), and comprehensive audit trail.", + tags=["issue-timeline", "devrev", "internal-work", "enriched", "conversation", "visibility", "audit"] +) +@mcp.resource( + uri="devrev://issues/ISS-{issue_number}/timeline", + description="Access enriched issue timeline with conversation flow, artifacts, and detailed visibility information. Includes internal context, visual visibility indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ), and comprehensive audit trail.", + tags=["issue-timeline", "devrev", "internal-work", "enriched", "conversation", "visibility", "audit"] +) +async def issue_timeline(issue_id: str = None, issue_number: str = None, ctx: Context = None) -> str: + """ + Access enriched timeline for an issue with structured conversation format. + Supports multiple URI formats for flexible access. + + Args: + issue_id: The DevRev issue ID (numeric, e.g., 9031) + issue_number: The numeric part of the issue ID (e.g., 9031 for ISS-9031) + ctx: FastMCP context + + Returns: + JSON string containing enriched timeline with internal context and conversation flow + """ + # Normalize to issue number + numeric_id = issue_id or issue_number + + # Get issue data to extract timeline + issue_data_str = await issue_resource(numeric_id, ctx, devrev_cache) + issue_data = json.loads(issue_data_str) + timeline_entries = issue_data.get("timeline_entries", []) + + # Build simplified timeline structure for issues + result = { + "issue_id": f"ISS-{numeric_id}", + "timeline_entries": timeline_entries, + "total_entries": len(timeline_entries), + "links": { + "issue": f"devrev://issues/{numeric_id}", + "artifacts": f"devrev://issues/{numeric_id}/artifacts" + } + } + + return json.dumps(result, indent=2) + + +@mcp.resource( + uri="devrev://issues/{issue_id}/artifacts", + description="Access all artifacts associated with a specific issue. Returns collection of files, screenshots, and documents with download links and metadata.", + tags=["issue-artifacts", "devrev", "internal-work", "collection", "files", "navigation"] +) +@mcp.resource( + uri="devrev://issues/ISS-{issue_number}/artifacts", + description="Access all artifacts associated with a specific issue. Returns collection of files, screenshots, and documents with download links and metadata.", + tags=["issue-artifacts", "devrev", "internal-work", "collection", "files", "navigation"] +) +async def issue_artifacts(issue_id: str = None, issue_number: str = None, ctx: Context = None) -> str: + """ + Access all artifacts for an issue. + Supports multiple URI formats for flexible access. + + Args: + issue_id: The DevRev issue ID (numeric, e.g., 9031) + issue_number: The numeric part of the issue ID (e.g., 9031 for ISS-9031) + ctx: FastMCP context + + Returns: + JSON string containing artifacts with navigation links + """ + # Normalize to issue number + numeric_id = issue_id or issue_number + + # Get issue data to extract artifacts + issue_data_str = await issue_resource(numeric_id, ctx, devrev_cache) + issue_data = json.loads(issue_data_str) + artifacts = issue_data.get("artifacts", []) + + # Add navigation links to each artifact + for artifact in artifacts: + artifact_id = artifact.get("id", "").split("/")[-1] if artifact.get("id") else "" + if artifact_id: + artifact["links"] = { + "self": f"devrev://artifacts/{artifact_id}", + "issue": f"devrev://issues/{numeric_id}" + } + + result = { + "artifacts": artifacts, + "total_artifacts": len(artifacts), + "links": { + "issue": f"devrev://issues/{numeric_id}", + "timeline": f"devrev://issues/{numeric_id}/timeline" + } + } + + return json.dumps(result, indent=2) + @mcp.tool( name="get_timeline_entries", @@ -291,6 +554,64 @@ async def download_artifact(artifact_id: str, download_directory: str, ctx: Cont """ return await download_artifact_tool(artifact_id, download_directory, ctx) +@mcp.tool( + name="get_work", + description="Get any DevRev work item (ticket, issue, etc.) by ID. Supports unified access to all work item types using their display IDs like TKT-12345, ISS-9031, etc.", + tags=["work", "devrev", "tickets", "issues", "unified", "get"] +) +async def get_work(id: str, ctx: Context) -> str: + """ + Get a DevRev work item by ID. + + Args: + id: The DevRev work ID - accepts TKT-12345, ISS-9031, or any work item format + + Returns: + JSON string containing the work item data + """ + return await get_work_tool(id, ctx) + +@mcp.tool( + name="get_issue", + description="Get a DevRev issue by ID. Supports unified access to issue using display IDs like ISS-9031, numeric IDs, or full don:core format.", + tags=["issue", "devrev", "internal-work", "get"] +) +async def get_issue(id: str, ctx: Context) -> str: + """ + Get a DevRev issue by ID. + + Args: + id: The DevRev issue ID - accepts ISS-9031, 9031, or full don:core format + + Returns: + JSON string containing the issue data + """ + return await get_issue_tool(id, ctx) + +@mcp.tool( + name="create_timeline_comment", + description="""Create an internal timeline comment on a DevRev ticket. + +Adds a comment to the ticket's timeline that is only visible to internal team members +for documentation and collaboration purposes. + +โš ๏ธ REQUIRES MANUAL REVIEW - This tool modifies ticket data and should always be +manually approved before execution.""", + tags=["timeline", "comment", "devrev", "internal", "create", "dangerous"] +) +async def create_timeline_comment(work_id: str, body: str, ctx: Context) -> str: + """ + Create an internal timeline comment on a DevRev ticket. + + Args: + work_id: The DevRev work ID (e.g., "12345", "TKT-12345", "ISS-12345) + body: The comment text to add to the timeline + + Returns: + JSON string containing the created timeline entry data + """ + return await create_timeline_comment_tool(work_id, body, ctx) + def main(): """Main entry point for the DevRev MCP server.""" # Run the server diff --git a/src/devrev_mcp/tools/create_timeline_comment.py b/src/devrev_mcp/tools/create_timeline_comment.py new file mode 100644 index 0000000..bf024fc --- /dev/null +++ b/src/devrev_mcp/tools/create_timeline_comment.py @@ -0,0 +1,103 @@ +""" +DevRev Create Timeline Comment Tool + +Provides a tool for creating internal timeline comments on DevRev tickets and issues. +""" + +import json +from fastmcp import Context +from ..error_handler import tool_error_handler +from ..endpoints import TIMELINE_ENTRIES_CREATE +from ..utils import make_devrev_request, read_resource_content + + +@tool_error_handler("create_timeline_comment") +async def create_timeline_comment( + work_id: str, + body: str, + ctx: Context +) -> str: + """ + Create an internal timeline comment on a DevRev ticket or issue. + + Args: + work_id: The DevRev work item ID (e.g., "12345", "TKT-12345", "ISS-9465") + body: The comment text to add to the timeline + ctx: FastMCP context + + Returns: + JSON string containing the created timeline entry data + """ + try: + await ctx.info(f"Creating timeline comment on work item {work_id}") + + resource_uri = f"devrev://works/{work_id}" + await ctx.info(f"Constructed resource URI: {resource_uri}") + + try: + work_item = await read_resource_content(ctx, resource_uri, parse_json=True) + await ctx.info(f"Successfully retrieved work item. Keys: {list(work_item.keys()) if work_item else 'None'}") + await ctx.info(f"Work item type: {type(work_item)}") + if work_item: + await ctx.info(f"Work item sample: {json.dumps(dict(list(work_item.items())[:5]), indent=2)}") + except Exception as e: + await ctx.error(f"Failed to read resource content for {resource_uri}: {str(e)}") + # Try alternative formats + alternative_uris = [ + f"devrev://tickets/{work_id}", + f"devrev://issues/{work_id}", + f"devrev://works/{work_id.replace('TKT-', '').replace('ISS-', '')}" + ] + for alt_uri in alternative_uris: + try: + await ctx.info(f"Trying alternative URI: {alt_uri}") + work_item = await read_resource_content(ctx, alt_uri, parse_json=True) + await ctx.info(f"Success with alternative URI: {alt_uri}") + break + except Exception as alt_e: + await ctx.info(f"Alternative URI {alt_uri} failed: {str(alt_e)}") + else: + raise e + + # Extract the object ID from the work item - this should be the full don:core ID + object_id = work_item.get("id") + await ctx.info(f"Raw object_id extracted: {object_id}") + await ctx.info(f"Object ID type: {type(object_id)}") + + if not object_id: + await ctx.error(f"Work item: {work_item}") + raise ValueError(f"Could not extract object ID from work item {work_id}\n{work_item}") + + + + await ctx.info(f"Using object ID: {object_id}") + + # Prepare the payload for timeline comment creation using the full object ID + payload = { + "body": body, + "body_type": "text", + "object": object_id, + "type": "timeline_comment", + "collections": ["discussions"], + "visibility": "internal" + } + + await ctx.info(f"Creating comment with payload: {json.dumps(payload, indent=2)}") + + # Make the API request + response = make_devrev_request(TIMELINE_ENTRIES_CREATE, payload) + + if response.status_code == 200 or response.status_code == 201: + result_data = response.json() + await ctx.info(f"Successfully created timeline comment on work item {work_id}") + return json.dumps(result_data, indent=2) + else: + error_msg = f"Failed to create timeline comment: HTTP {response.status_code}" + if response.text: + error_msg += f" - {response.text}" + await ctx.error(error_msg) + raise Exception(error_msg) + + except Exception as e: + await ctx.error(f"Failed to create timeline comment on work item {work_id}: {str(e)}") + raise \ No newline at end of file diff --git a/src/devrev_mcp/tools/get_issue.py b/src/devrev_mcp/tools/get_issue.py new file mode 100644 index 0000000..924bd6f --- /dev/null +++ b/src/devrev_mcp/tools/get_issue.py @@ -0,0 +1,34 @@ +""" +DevRev Get Issue Tool + +Tool for retrieving DevRev issue information with enriched timeline and linked work items. +""" + +from fastmcp import Context +from ..resources.issue import issue +from ..cache import devrev_cache + + +async def get_issue(issue_id: str, ctx: Context) -> str: + """ + Get a DevRev issue by ID with enriched timeline entries and linked work items. + + Args: + issue_id: The DevRev issue ID - accepts ISS-9031, 9031, or full don:core format + ctx: FastMCP context + + Returns: + JSON string containing the enriched issue data with timeline and links + """ + # Extract numeric ID from various formats + if issue_id.startswith("ISS-"): + issue_number = issue_id[4:] + elif issue_id.startswith("don:core:"): + # Extract from full don format + issue_number = issue_id.split("/")[-1] + else: + # Assume it's already a numeric ID + issue_number = issue_id + + # Use the enriched issue resource + return await issue(issue_number, ctx, devrev_cache) \ No newline at end of file diff --git a/src/devrev_mcp/tools/get_ticket.py b/src/devrev_mcp/tools/get_ticket.py index 6c1c0b4..3756fe1 100644 --- a/src/devrev_mcp/tools/get_ticket.py +++ b/src/devrev_mcp/tools/get_ticket.py @@ -7,6 +7,7 @@ import json from fastmcp import Context from ..error_handler import tool_error_handler +from ..utils import read_resource_content @tool_error_handler("get_ticket") @@ -18,55 +19,35 @@ async def get_ticket( Get a DevRev ticket with all associated timeline entries and artifacts. Args: - id: The DevRev ticket ID - accepts TKT-12345, 12345, or full don:core format + id: The DevRev ticket ID - accepts TKT-12345, ISS-9031, or any work item format ctx: FastMCP context Returns: JSON string containing the ticket data with timeline entries and artifacts """ try: - # Normalize the ticket ID to just the number + await ctx.info(f"Fetching work item {id} with timeline entries and artifacts") + + # Use different resource depending on ID format if id.upper().startswith("TKT-"): - ticket_id = id[4:] # Remove TKT- prefix - elif id.startswith("don:core:"): - # Extract ID from don:core format - ticket_id = id.split(":")[-1] + # Extract numeric part for tickets resource + numeric_id = id.replace("TKT-", "").replace("tkt-", "") + resource_uri = f"devrev://tickets/{numeric_id}" + elif id.upper().startswith("ISS-"): + # Use works resource for issues + resource_uri = f"devrev://works/{id}" else: - ticket_id = id - - await ctx.info(f"Fetching ticket {ticket_id} with timeline entries and artifacts") + # Use tickets resource for don:core IDs or numeric IDs + resource_uri = f"devrev://tickets/{id}" - # Get the main ticket data - ticket_uri = f"devrev://tickets/{ticket_id}" try: - resource_contents = await ctx.read_resource(ticket_uri) - - if resource_contents and len(resource_contents) > 0: - # Handle multiple contents by trying each until we find valid JSON - if len(resource_contents) > 1: - await ctx.warning(f"Multiple resource contents returned ({len(resource_contents)}), trying each for valid JSON") - - ticket_data = None - for i, content_item in enumerate(resource_contents): - try: - ticket_data = json.loads(content_item.content) - if i > 0: - await ctx.info(f"Successfully parsed JSON from content item {i}") - break - except json.JSONDecodeError as e: - await ctx.warning(f"Content item {i} is not valid JSON: {e}") - continue - - if ticket_data is None: - raise ValueError(f"No valid JSON found in any of the {len(resource_contents)} resource contents") - else: - raise ValueError(f"No resource contents returned for {ticket_uri}") + ticket_data = await read_resource_content(ctx, resource_uri, parse_json=True) except Exception as ticket_error: - await ctx.error(f"Error reading ticket resource {ticket_uri}: {str(ticket_error)}") + await ctx.error(f"Error reading ticket resource {resource_uri}: {str(ticket_error)}") raise ticket_error if not ticket_data: - return f"No ticket found with ID {ticket_id}" + return f"No ticket found with ID {id}" # Handle case where ticket_data is unexpectedly a list if isinstance(ticket_data, list): @@ -74,54 +55,36 @@ async def get_ticket( if len(ticket_data) > 0: ticket_data = ticket_data[0] else: - return f"No ticket data found for ID {ticket_id}" + return f"No ticket data found for ID {id}" # Ensure ticket_data is a dict if not isinstance(ticket_data, dict): await ctx.error(f"ticket_data is not a dict, type: {type(ticket_data)}, value: {repr(ticket_data)}") - return f"Invalid ticket data format for ID {ticket_id} (type: {type(ticket_data)})" - - # Get timeline entries - timeline_uri = f"devrev://tickets/{ticket_id}/timeline" - try: - timeline_contents = await ctx.read_resource(timeline_uri) - if timeline_contents and len(timeline_contents) > 0: - # Try each content item for valid JSON - timeline_data = None - for i, content_item in enumerate(timeline_contents): - try: - timeline_data = json.loads(content_item.content) - break - except json.JSONDecodeError: - continue - - ticket_data["timeline_entries"] = timeline_data if timeline_data else [] - else: - ticket_data["timeline_entries"] = [] - except Exception as timeline_error: - await ctx.warning(f"Error reading timeline entries: {str(timeline_error)}") - ticket_data["timeline_entries"] = [] + return f"Invalid ticket data format for ID {id} (type: {type(ticket_data)})" - # Get artifacts if any are referenced - artifacts = [] - if "artifact_uris" in ticket_data: - for uri in ticket_data["artifact_uris"]: - try: - artifact_contents = await ctx.read_resource(uri) - if artifact_contents and len(artifact_contents) > 0: - # Try each content item for valid JSON - for content_item in artifact_contents: - try: - artifact_data = json.loads(content_item.content) - artifacts.append(artifact_data) - break - except json.JSONDecodeError: - continue - except Exception as artifact_error: - await ctx.warning(f"Error reading artifact {uri}: {str(artifact_error)}") + # Add navigation links for detailed information (no extra data fetching) + if id.upper().startswith("TKT-"): + timeline_id = id.replace("TKT-", "").replace("tkt-", "") + else: + timeline_id = id + + # Add navigation links for detailed information + ticket_data["_links"] = { + "timeline": f"devrev://tickets/{timeline_id}/timeline", + "artifacts": f"devrev://tickets/{timeline_id}/artifacts", + "self": resource_uri + } - ticket_data["artifacts"] = artifacts - + # Remove any large nested data that might be in the ticket response + # Keep only core ticket information + if "timeline_entries" in ticket_data: + del ticket_data["timeline_entries"] + if "artifacts" in ticket_data and isinstance(ticket_data["artifacts"], list) and len(ticket_data["artifacts"]) > 0: + # Keep just the count of artifacts, not the full data + artifacts_count = len(ticket_data["artifacts"]) + ticket_data["artifacts"] = f"{artifacts_count} artifacts available (use _links.artifacts to access)" + + await ctx.info(f"Returning core ticket data for {id} with navigation links") return json.dumps(ticket_data, indent=2) except Exception as e: diff --git a/src/devrev_mcp/tools/get_timeline_entries.py b/src/devrev_mcp/tools/get_timeline_entries.py index f62d6db..6eaea61 100644 --- a/src/devrev_mcp/tools/get_timeline_entries.py +++ b/src/devrev_mcp/tools/get_timeline_entries.py @@ -8,6 +8,7 @@ from fastmcp import Context from ..types import VisibilityInfo, TimelineEntryType from ..error_handler import tool_error_handler +from ..utils import read_resource_content @tool_error_handler("get_timeline_entries") @@ -17,10 +18,10 @@ async def get_timeline_entries( format: str = "summary" ) -> str: """ - Get timeline entries for a DevRev ticket with flexible formatting options. + Get timeline entries for a DevRev work item (ticket or issue) with flexible formatting options. Args: - id: The DevRev ticket ID - accepts TKT-12345, 12345, or full don:core format + id: The DevRev work ID - accepts TKT-12345, ISS-9031, numeric IDs, or full don:core format ctx: FastMCP context format: Output format - "summary" (key info), "detailed" (conversation focus), or "full" (complete data) @@ -28,59 +29,47 @@ async def get_timeline_entries( Formatted timeline entries based on the requested format """ try: - # Normalize the ticket ID to just the number - ticket_id = _normalize_ticket_id(id) - await ctx.info(f"Fetching timeline entries for ticket {ticket_id} in {format} format") + # Determine work item type and normalize ID + work_type, normalized_id, display_id = _normalize_work_id(id) + await ctx.info(f"Fetching timeline entries for {work_type} {normalized_id} in {format} format") + + # Use the appropriate resource URI based on work type + if work_type == "ticket": + resource_uri = f"devrev://tickets/{normalized_id}/timeline" + elif work_type == "issue": + resource_uri = f"devrev://issues/{normalized_id}/timeline" + else: + # Fallback - try ticket first, then issue + resource_uri = f"devrev://tickets/{normalized_id}/timeline" - # Use the resource URI to get the enriched timeline - resource_uri = f"devrev://tickets/{ticket_id}/timeline" try: - content = await ctx.read_resource(resource_uri) + # Use the utility function to handle resource reading consistently + timeline_data = await read_resource_content( + ctx, + resource_uri, + parse_json=True, + require_content=False + ) + + if not timeline_data: + return f"No timeline entries found for {work_type} {display_id}" + except Exception as resource_error: await ctx.error(f"Error reading resource {resource_uri}: {str(resource_error)}") - raise resource_error - - if not content: - return f"No timeline entries found for ticket {ticket_id}" - - # Handle the resource response - FastMCP can return different structures - # Extract the actual timeline data from the response - if isinstance(content, list) and len(content) > 0: - # It's a list, likely containing a ReadResourceContents object - first_item = content[0] - if hasattr(first_item, 'content'): - # ReadResourceContents object - try: - timeline_data = json.loads(first_item.content) - except (json.JSONDecodeError, AttributeError): - if format == "full": - return str(first_item.content) if hasattr(first_item, 'content') else str(first_item) - else: - return f"Error: Could not parse timeline data for ticket {ticket_id}" - else: - # Direct data in the list - timeline_data = first_item - elif hasattr(content, 'content'): - # It's a ReadResourceContents object, get the content - try: - timeline_data = json.loads(content.content) - except (json.JSONDecodeError, AttributeError): - if format == "full": - return str(content.content) if hasattr(content, 'content') else str(content) - else: - return f"Error: Could not parse timeline data for ticket {ticket_id}" - elif isinstance(content, str): + # If JSON parsing failed but we got content, try fallback with raw content try: - timeline_data = json.loads(content) - except json.JSONDecodeError: - # If it's already a string, return as-is for full format - if format == "full": - return content + timeline_data = await read_resource_content( + ctx, + resource_uri, + parse_json=False, + require_content=False + ) + if format == "full" and timeline_data: + return str(timeline_data) else: - return f"Error: Could not parse timeline data for ticket {ticket_id}" - else: - # Content is already parsed (dict, list, etc.) - timeline_data = content + return f"Error: Could not parse timeline data for {work_type} {display_id}" + except Exception: + raise resource_error # Debug: Check what we actually received await ctx.info(f"DEBUG: timeline_data type: {type(timeline_data)}") @@ -93,9 +82,9 @@ async def get_timeline_entries( # Format based on requested type if format == "summary": - return _format_summary(timeline_data, ticket_id) + return _format_summary(timeline_data, normalized_id, display_id, work_type) elif format == "detailed": - return _format_detailed(timeline_data, ticket_id) + return _format_detailed(timeline_data, normalized_id, display_id, work_type) else: # format == "full" try: return json.dumps(timeline_data, indent=2, default=str) @@ -105,31 +94,43 @@ async def get_timeline_entries( except Exception as e: await ctx.error(f"Failed to get timeline entries for {id}: {str(e)}") - return f"Failed to get timeline entries for ticket {id}: {str(e)}" + return f"Failed to get timeline entries for work item {id}: {str(e)}" -def _normalize_ticket_id(id: str) -> str: +def _normalize_work_id(id: str) -> tuple[str, str, str]: """ - Normalize various ticket ID formats to just the numeric ID. + Normalize various work ID formats and determine the work type. + + Returns: (work_type, numeric_id, display_id) Accepts: - - TKT-12345 -> 12345 - - tkt-12345 -> 12345 - - don:core:dvrv-us-1:devo/118WAPdKBc:ticket/12345 -> 12345 - - 12345 -> 12345 + - TKT-12345 -> ("ticket", "12345", "TKT-12345") + - ISS-9031 -> ("issue", "9031", "ISS-9031") + - don:core:dvrv-us-1:devo/118WAPdKBc:ticket/12345 -> ("ticket", "12345", "TKT-12345") + - don:core:dvrv-us-1:devo/118WAPdKBc:issue/9031 -> ("issue", "9031", "ISS-9031") + - 12345 -> ("unknown", "12345", "12345") """ - if id.startswith("don:core:") and ":ticket/" in id: - # Extract from full DevRev ID - return id.split(":ticket/")[1] - elif id.upper().startswith("TKT-"): - # Extract from TKT- format (case insensitive) - return id[4:] # Remove first 4 characters (TKT- or tkt-) + id_upper = id.upper() + + if id.startswith("don:core:"): + if ":ticket/" in id: + numeric_id = id.split(":ticket/")[1] + return ("ticket", numeric_id, f"TKT-{numeric_id}") + elif ":issue/" in id: + numeric_id = id.split(":issue/")[1] + return ("issue", numeric_id, f"ISS-{numeric_id}") + elif id_upper.startswith("TKT-"): + numeric_id = id[4:] # Remove TKT- prefix + return ("ticket", numeric_id, id.upper()) + elif id_upper.startswith("ISS-"): + numeric_id = id[4:] # Remove ISS- prefix + return ("issue", numeric_id, id.upper()) else: - # Assume it's already just the ticket number - return id + # Assume it's just a numeric ID - we can't determine the type + return ("unknown", id, id) -def _format_summary(timeline_data, ticket_id: str) -> str: +def _format_summary(timeline_data, numeric_id: str, display_id: str, work_type: str) -> str: """ Format timeline data as a concise summary focusing on key metrics and latest activity. """ @@ -145,7 +146,7 @@ def _format_summary(timeline_data, ticket_id: str) -> str: # Build summary text lines = [ - f"**TKT-{ticket_id} Timeline Summary:**", + f"**{display_id} Timeline Summary:**", "", f"**Subject:** {summary.get('subject', 'Unknown')}", f"**Status:** {summary.get('current_stage', 'Unknown')}", @@ -235,7 +236,7 @@ def _format_summary(timeline_data, ticket_id: str) -> str: return "\n".join(lines) -def _format_detailed(timeline_data, ticket_id: str) -> str: +def _format_detailed(timeline_data, numeric_id: str, display_id: str, work_type: str) -> str: """ Format timeline data with focus on conversation flow and key events. """ @@ -250,7 +251,7 @@ def _format_detailed(timeline_data, ticket_id: str) -> str: conversation = timeline_data.get("conversation_thread", []) lines = [ - f"**TKT-{ticket_id} Detailed Timeline:**", + f"**{display_id} Detailed Timeline:**", "", f"**Subject:** {summary.get('subject', 'Unknown')}", f"**Status:** {summary.get('current_stage', 'Unknown')}", diff --git a/src/devrev_mcp/tools/get_work.py b/src/devrev_mcp/tools/get_work.py new file mode 100644 index 0000000..4a603cc --- /dev/null +++ b/src/devrev_mcp/tools/get_work.py @@ -0,0 +1,58 @@ +""" +DevRev Get Work Tool + +Provides a tool for fetching any DevRev work item (tickets, issues, etc.) by ID. +""" + +import json +from fastmcp import Context +from ..utils import make_devrev_request +from ..endpoints import WORKS_GET +from ..error_handler import tool_error_handler + + +@tool_error_handler("get_work") +async def get_work(id: str, ctx: Context) -> str: + """ + Get a DevRev work item (ticket, issue, etc.) by ID. + + Args: + id: The DevRev work ID - accepts TKT-12345, ISS-9031, or any work item format + ctx: FastMCP context + + Returns: + JSON string containing the work item data + """ + try: + await ctx.info(f"Fetching work item {id}") + + # Make API request to get work item details using works.get + payload = { + "id": id + } + + response = make_devrev_request(WORKS_GET, payload) + + if response.status_code != 200: + await ctx.error(f"DevRev API returned status {response.status_code}") + return json.dumps({ + "error": f"Failed to fetch work item {id}", + "status_code": response.status_code, + "message": response.text + }) + + data = response.json() + work_item = data.get("work") + + if not work_item: + return json.dumps({ + "error": f"Work item {id} not found", + "message": "No work item found with the provided ID" + }) + + # Return the work item data directly + return json.dumps(work_item, indent=2, default=str) + + except Exception as e: + await ctx.error(f"Failed to get work item {id}: {str(e)}") + return f"Failed to get work item {id}: {str(e)}" \ No newline at end of file diff --git a/src/devrev_mcp/utils.py b/src/devrev_mcp/utils.py index 09b983c..5e39bf1 100644 --- a/src/devrev_mcp/utils.py +++ b/src/devrev_mcp/utils.py @@ -5,9 +5,11 @@ This module provides utility functions for making authenticated requests to the DevRev API. """ +import json import os import requests -from typing import Any, Dict +from typing import Any, Dict, List, Union +from fastmcp import Context def make_devrev_request(endpoint: str, payload: Dict[str, Any]) -> requests.Response: """ @@ -45,52 +47,260 @@ def make_devrev_request(endpoint: str, payload: Dict[str, Any]) -> requests.Resp raise requests.RequestException(f"DevRev API request failed for endpoint '{endpoint}': {e}") from e -def normalize_ticket_id(ticket_id: str) -> str: + +async def read_resource_content( + ctx: Context, + resource_uri: str, + parse_json: bool = True, + require_content: bool = True +) -> Union[Dict[str, Any], str, None]: """ - Normalize ticket ID to TKT-XXXXX format for API calls. + Read content from a DevRev resource URI with consistent error handling. + + This utility handles the common pattern of reading from ctx.read_resource, + extracting content from ReadResourceContents objects, and optionally parsing JSON. Args: - ticket_id: The input ticket ID (e.g., "12345", "TKT-12345", "tkt-12345") + ctx: FastMCP context + resource_uri: The resource URI to read (e.g., "devrev://works/12345") + parse_json: If True, parse the content as JSON. If False, return raw string. + require_content: If True, raise an error if no content is found. Returns: - Normalized ticket ID in TKT-XXXXX format + - Dict if parse_json=True and content is valid JSON + - str if parse_json=False or JSON parsing fails + - None if require_content=False and no content found + + Raises: + ValueError: If require_content=True and no content is found + json.JSONDecodeError: If parse_json=True but content is not valid JSON + Exception: If reading the resource fails + """ + try: + await ctx.info(f"Reading resource: {resource_uri}") + resource_result = await ctx.read_resource(resource_uri) + + # Extract content following the established pattern + content_data = None + + if isinstance(resource_result, list) and len(resource_result) > 0: + # It's a list of ReadResourceContents objects + for i, content_item in enumerate(resource_result): + if hasattr(content_item, 'content'): + try: + content_data = content_item.content + if i > 0: + await ctx.info(f"Successfully got content from item {i}") + break + except Exception as e: + await ctx.warning(f"Content item {i} could not be accessed: {e}") + continue + elif hasattr(resource_result, 'content'): + # Single ReadResourceContents object + content_data = resource_result.content + elif isinstance(resource_result, str): + # Direct string content + content_data = resource_result + else: + # Fallback to string conversion + content_data = str(resource_result) + + # Check if we got content + if not content_data: + if require_content: + raise ValueError(f"No content found in resource {resource_uri}") + else: + await ctx.warning(f"No content found in resource {resource_uri}") + return None + + # Parse JSON if requested + if parse_json: + try: + parsed_data = json.loads(content_data) + await ctx.info(f"Successfully parsed JSON from resource {resource_uri}") + return parsed_data + except json.JSONDecodeError as e: + await ctx.error(f"Failed to parse JSON from resource {resource_uri}: {e}") + if require_content: + raise + else: + return content_data + else: + return content_data + + except Exception as e: + await ctx.error(f"Failed to read resource {resource_uri}: {str(e)}") + raise + + +async def get_link_types(ctx: Context, cache: dict) -> Dict[str, Dict[str, str]]: + """ + Fetch and cache link types from DevRev API. - Examples: - normalize_ticket_id("12345") -> "TKT-12345" - normalize_ticket_id("TKT-12345") -> "TKT-12345" - normalize_ticket_id("tkt-12345") -> "TKT-12345" + Returns: + Dictionary mapping link type IDs to their forward_name and backward_name """ - if not ticket_id: - return ticket_id + cache_key = "devrev://link_types" + cached_value = cache.get(cache_key) - # Remove any existing TKT- prefix (case insensitive) - if ticket_id.upper().startswith("TKT-"): - numeric_id = ticket_id[4:] # Remove TKT- or tkt- - else: - numeric_id = ticket_id + if cached_value is not None: + return cached_value - # Return normalized format - return f"TKT-{numeric_id}" + from .endpoints import LINK_TYPES_LIST # Import here to avoid circular imports + + try: + response = make_devrev_request(LINK_TYPES_LIST, {}) + + if response.status_code != 200: + await ctx.warning(f"Could not fetch link types: HTTP {response.status_code}") + return {} + + data = response.json() + link_types = data.get("link_types", []) + + # Build lookup dictionary + link_type_map = {} + for link_type in link_types: + link_type_id = link_type.get("id", "") + forward_name = link_type.get("forward_name", "") + backward_name = link_type.get("backward_name", "") + + if link_type_id: + link_type_map[link_type_id] = { + "forward_name": forward_name, + "backward_name": backward_name + } + + cache.set(cache_key, link_type_map) + await ctx.info(f"Cached {len(link_type_map)} link types") + return link_type_map + + except Exception as e: + await ctx.warning(f"Error fetching link types: {str(e)}") + return {} -def extract_ticket_id_from_object(object_id: str) -> str: +async def fetch_linked_work_items( + work_item_id: str, + work_item_display_id: str, + work_item_type: str, + ctx: Context, + cache: dict = None +) -> List[Dict[str, Any]]: """ - Extract numeric ticket ID from object ID containing TKT- prefix. + Fetch and process linked work items for any DevRev work item. + + This utility extracts the linked work items logic from the ticket resource + to make it reusable across different resource types (tickets, issues, etc.). Args: - object_id: Object ID that may contain TKT- prefix + work_item_id: The full don:core ID of the work item (e.g., "don:core:dvrv-us-1:devo/123:ticket/456") + work_item_display_id: The display ID of the work item (e.g., "TKT-12345", "ISS-9031") + work_item_type: The type of work item ("ticket", "issue", etc.) + ctx: FastMCP context for logging + cache: Cache dictionary for storing link types Returns: - Numeric part of ticket ID - - Examples: - extract_ticket_id_from_object("TKT-12345") -> "12345" - extract_ticket_id_from_object("12345") -> "12345" + List of linked work items with navigation links and metadata """ - if not object_id: - return object_id + from .endpoints import LINKS_LIST # Import here to avoid circular imports - if "TKT-" in object_id: - return object_id.replace("TKT-", "") + # Get link types for better relationship descriptions + link_types_map = {} + if cache is not None: + link_types_map = await get_link_types(ctx, cache) - return object_id + try: + links_response = make_devrev_request( + LINKS_LIST, + {"object": work_item_id} + ) + + if links_response.status_code != 200: + await ctx.warning(f"Could not fetch links for {work_item_type} {work_item_display_id}") + return [] + + links_data = links_response.json() + links = links_data.get("links", []) + + # Process links to extract linked work items + linked_work_items = [] + current_work_item_id = work_item_id # The current work item's don:core ID + + for link in links: + # Each link has source, target, link_type, etc. + target = link.get("target", {}) + source = link.get("source", {}) + link_type = link.get("link_type", "unknown") + + # Process both target and source, but exclude the current work item itself + for linked_item, relationship_direction in [(target, "outbound"), (source, "inbound")]: + if linked_item and linked_item.get("id") and linked_item.get("id") != current_work_item_id: + linked_item_id = linked_item.get("id", "") + linked_item_type = linked_item.get("type", "unknown") + linked_item_display_id = linked_item.get("display_id", "") + linked_item_title = linked_item.get("title", "") + + # Get proper relationship description using link types + relationship_description = "" + link_type_info = link_types_map.get(link_type, {}) + + if relationship_direction == "outbound": + # Current item -> linked item (use forward_name) + forward_name = link_type_info.get("forward_name", link_type) + relationship_description = f"{work_item_display_id} {forward_name} {linked_item_display_id}" + else: + # linked item -> Current item (use backward_name) + backward_name = link_type_info.get("backward_name", link_type) + relationship_description = f"{linked_item_display_id} {backward_name} {work_item_display_id}" + + processed_item = { + "id": linked_item_id, + "type": linked_item_type, + "display_id": linked_item_display_id, + "title": linked_item_title, + "link_type": link_type, + "relationship_direction": relationship_direction, + "relationship_description": relationship_description, + "stage": linked_item.get("stage", {}).get("name", "unknown"), + "priority": linked_item.get("priority", "unknown"), + "owned_by": linked_item.get("owned_by", []), + "links": {} + } + + # Add external reference if available (e.g., Jira link) + sync_metadata = linked_item.get("sync_metadata", {}) + if sync_metadata.get("external_reference"): + processed_item["external_reference"] = sync_metadata["external_reference"] + processed_item["origin_system"] = sync_metadata.get("origin_system", "unknown") + + # Add appropriate navigation links based on linked work item type + if linked_item_display_id: + processed_item["links"]["work_item"] = f"devrev://works/{linked_item_display_id}" + + if linked_item_type == "ticket" and linked_item_display_id.startswith("TKT-"): + ticket_num = linked_item_display_id.replace("TKT-", "") + processed_item["links"].update({ + "ticket": f"devrev://tickets/{ticket_num}", + "timeline": f"devrev://tickets/{ticket_num}/timeline", + "artifacts": f"devrev://tickets/{ticket_num}/artifacts" + }) + elif linked_item_type == "issue" and linked_item_display_id.startswith("ISS-"): + issue_num = linked_item_display_id.replace("ISS-", "") + processed_item["links"].update({ + "issue": f"devrev://issues/{issue_num}", + "timeline": f"devrev://issues/{issue_num}/timeline", + "artifacts": f"devrev://issues/{issue_num}/artifacts" + }) + + # Check if we already have this work item (avoid duplicates) + existing_item = next((item for item in linked_work_items if item["id"] == linked_item_id), None) + if not existing_item: + linked_work_items.append(processed_item) + + await ctx.info(f"Added {len(linked_work_items)} linked work items to {work_item_type} {work_item_display_id}") + return linked_work_items + + except Exception as e: + await ctx.warning(f"Error fetching links for {work_item_type} {work_item_display_id}: {str(e)}") + return [] From e652b9ef72c9710ee7b9f8994f5de56323049965 Mon Sep 17 00:00:00 2001 From: Sara Date: Tue, 3 Jun 2025 12:17:46 -0400 Subject: [PATCH 16/17] cleaning up --- src/devrev_mcp/cache.py | 7 +- src/devrev_mcp/server.py | 336 ++++++++++++++---- src/devrev_mcp/tools/get_ticket.py | 4 + src/devrev_mcp/tools/get_timeline_entries.py | 351 ++++++++++--------- src/devrev_mcp/utils.py | 23 +- 5 files changed, 483 insertions(+), 238 deletions(-) diff --git a/src/devrev_mcp/cache.py b/src/devrev_mcp/cache.py index e162184..a617274 100644 --- a/src/devrev_mcp/cache.py +++ b/src/devrev_mcp/cache.py @@ -8,11 +8,14 @@ from typing import Any, Dict, Optional, Union import json +# Cache configuration constants +DEFAULT_CACHE_SIZE = 500 + class SimpleCache: """Simple LRU cache with size limit to prevent memory leaks.""" - def __init__(self, max_size: int = 500): + def __init__(self, max_size: int = DEFAULT_CACHE_SIZE): """Initialize cache with maximum size limit.""" self.max_size = max_size self._cache: OrderedDict[str, str] = OrderedDict() @@ -62,4 +65,4 @@ def __contains__(self, key: str) -> bool: # Global cache instance - replaces devrev_cache = {} -devrev_cache = SimpleCache(max_size=500) \ No newline at end of file +devrev_cache = SimpleCache(max_size=DEFAULT_CACHE_SIZE) \ No newline at end of file diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index 016574b..89785b1 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -137,51 +137,31 @@ async def update_object( # Specialized resource handlers for different DevRev object types -# Resource metadata constants -TICKET_RESOURCE_DESCRIPTION = "Access comprehensive DevRev ticket information with enriched timeline and artifacts. Supports multiple URI formats: numeric (12345), TKT format (TKT-12345), and full don:core IDs." -TICKET_RESOURCE_TAGS = ["ticket", "devrev", "customer-support", "enriched", "navigation"] - -TIMELINE_RESOURCE_DESCRIPTION = "Access enriched ticket timeline with conversation flow, artifacts, and detailed visibility information. Includes customer context, visual visibility indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ), and comprehensive audit trail." -TIMELINE_RESOURCE_TAGS = ["timeline", "devrev", "customer-support", "enriched", "conversation", "visibility", "audit"] - -TIMELINE_ENTRY_RESOURCE_DESCRIPTION = "Access individual timeline entry with detailed conversation data and navigation links. Provides specific entry context within ticket timeline." +# Resource tag constants +TICKET_RESOURCE_TAGS = ["ticket", "devrev", "customer-support", "navigation"] +TIMELINE_RESOURCE_TAGS = ["timeline", "devrev", "customer-support", "conversation", "visibility", "audit"] TIMELINE_ENTRY_RESOURCE_TAGS = ["timeline-entry", "devrev", "customer-support", "conversation", "navigation"] - -TICKET_ARTIFACTS_RESOURCE_DESCRIPTION = "Access all artifacts associated with a specific ticket. Returns collection of files, screenshots, and documents with download links and metadata." TICKET_ARTIFACTS_RESOURCE_TAGS = ["artifacts", "devrev", "customer-support", "collection", "files", "navigation"] - -ARTIFACT_RESOURCE_DESCRIPTION = "Access DevRev artifact metadata with temporary download URLs. Provides file information, content type, and secure download links." ARTIFACT_RESOURCE_TAGS = ["artifact", "devrev", "files", "metadata", "download"] - -WORK_RESOURCE_DESCRIPTION = "Access any DevRev work item with unified interface for tickets, issues, and other work types. Supports display ID formats (TKT-12345, ISS-9031) with navigation links." WORK_RESOURCE_TAGS = ["work", "devrev", "unified", "tickets", "issues", "navigation"] - -ISSUE_RESOURCE_DESCRIPTION = "Access comprehensive DevRev issue information with enriched timeline and artifacts. Supports multiple URI formats: numeric (9031), ISS format (ISS-9031), and full don:core IDs." -ISSUE_RESOURCE_TAGS = ["issue", "devrev", "internal-work", "enriched", "navigation"] - -# Additional resource patterns for increased exposure -TIMELINE_ALT_RESOURCE_DESCRIPTION = "Access ticket timeline with alternative URI formats. Supports TKT- format and numeric IDs for flexible timeline access." -TIMELINE_ALT_RESOURCE_TAGS = ["timeline", "devrev", "customer-support", "enriched", "alternative-access"] +ISSUE_RESOURCE_TAGS = ["issue", "devrev", "internal-work", "navigation"] @mcp.resource( uri="devrev://tickets/{ticket_id}", - description=TICKET_RESOURCE_DESCRIPTION, tags=TICKET_RESOURCE_TAGS ) @mcp.resource( uri="devrev://tickets/TKT-{ticket_number}", - description=TICKET_RESOURCE_DESCRIPTION, tags=TICKET_RESOURCE_TAGS ) @mcp.resource( uri="devrev://tickets/don:core:dvrv-us-1:devo/{dev_org_id}:ticket/{ticket_number}", - description=TICKET_RESOURCE_DESCRIPTION, tags=TICKET_RESOURCE_TAGS ) async def ticket(ticket_id: str = None, ticket_number: str = None, dev_org_id: str = None, ctx: Context = None) -> str: """ - Access DevRev ticket details with navigation links. - Supports multiple URI formats - all normalize to numeric ticket ID. + Access comprehensive DevRev ticket information with timeline and artifacts. + Supports multiple URI formats: numeric (12345), TKT format (TKT-12345), and full don:core IDs. Args: ticket_id: The DevRev ticket ID (numeric, e.g., 12345) @@ -198,23 +178,20 @@ async def ticket(ticket_id: str = None, ticket_number: str = None, dev_org_id: s @mcp.resource( uri="devrev://tickets/{ticket_id}/timeline", - description=TIMELINE_RESOURCE_DESCRIPTION, tags=TIMELINE_RESOURCE_TAGS ) @mcp.resource( uri="devrev://timeline/{ticket_id}", - description=TIMELINE_ALT_RESOURCE_DESCRIPTION, - tags=TIMELINE_ALT_RESOURCE_TAGS + tags=TIMELINE_RESOURCE_TAGS ) @mcp.resource( uri="devrev://timeline/TKT-{ticket_number}", - description=TIMELINE_ALT_RESOURCE_DESCRIPTION, - tags=TIMELINE_ALT_RESOURCE_TAGS + tags=TIMELINE_RESOURCE_TAGS ) async def ticket_timeline(ticket_id: str = None, ticket_number: str = None, ctx: Context = None) -> str: """ - Access enriched timeline for a ticket with structured conversation format. - Supports multiple URI formats for flexible access. + Access ticket timeline with conversation flow, artifacts, and detailed visibility information. + Includes customer context, visual visibility indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ), and comprehensive audit trail. Args: ticket_id: The DevRev ticket ID (numeric, e.g., 12345) @@ -230,23 +207,20 @@ async def ticket_timeline(ticket_id: str = None, ticket_number: str = None, ctx: @mcp.resource( uri="devrev://tickets/{ticket_id}/timeline/{entry_id}", - description=TIMELINE_ENTRY_RESOURCE_DESCRIPTION, tags=TIMELINE_ENTRY_RESOURCE_TAGS ) @mcp.resource( uri="devrev://tickets/TKT-{ticket_number}/timeline/{entry_id}", - description=TIMELINE_ENTRY_RESOURCE_DESCRIPTION, tags=TIMELINE_ENTRY_RESOURCE_TAGS ) @mcp.resource( uri="devrev://tickets/don:core:dvrv-us-1:devo/{dev_org_id}:ticket/{ticket_number}/timeline/{entry_id}", - description=TIMELINE_ENTRY_RESOURCE_DESCRIPTION, tags=TIMELINE_ENTRY_RESOURCE_TAGS ) async def timeline_entry(ticket_id: str = None, ticket_number: str = None, dev_org_id: str = None, entry_id: str = None, ctx: Context = None) -> str: """ - Access specific timeline entry details. - Supports multiple URI formats for flexible access. + Access individual timeline entry with detailed conversation data and navigation links. + Provides specific entry context within ticket timeline. Args: ticket_id: The DevRev ticket ID (numeric, e.g., 12345) @@ -280,23 +254,19 @@ async def timeline_entry(ticket_id: str = None, ticket_number: str = None, dev_o @mcp.resource( uri="devrev://tickets/{ticket_id}/artifacts", - description=TICKET_ARTIFACTS_RESOURCE_DESCRIPTION, tags=TICKET_ARTIFACTS_RESOURCE_TAGS ) @mcp.resource( uri="devrev://tickets/TKT-{ticket_number}/artifacts", - description=TICKET_ARTIFACTS_RESOURCE_DESCRIPTION, tags=TICKET_ARTIFACTS_RESOURCE_TAGS ) @mcp.resource( uri="devrev://tickets/don:core:dvrv-us-1:devo/{dev_org_id}:ticket/{ticket_number}/artifacts", - description=TICKET_ARTIFACTS_RESOURCE_DESCRIPTION, tags=TICKET_ARTIFACTS_RESOURCE_TAGS ) async def ticket_artifacts(ticket_id: str = None, ticket_number: str = None, dev_org_id: str = None, ctx: Context = None) -> str: """ - Access all artifacts for a ticket. - Supports multiple URI formats for flexible access. + Access all artifacts associated with a specific ticket. Returns collection of files, screenshots, and documents with download links and metadata. Args: ticket_id: The DevRev ticket ID (numeric, e.g., 12345) @@ -313,12 +283,11 @@ async def ticket_artifacts(ticket_id: str = None, ticket_number: str = None, dev @mcp.resource( uri="devrev://artifacts/{artifact_id}", - description=ARTIFACT_RESOURCE_DESCRIPTION, tags=ARTIFACT_RESOURCE_TAGS ) async def artifact(artifact_id: str, ctx: Context) -> str: """ - Access DevRev artifact metadata. + Access DevRev artifact metadata with temporary download URLs. Provides file information, content type, and secure download links. Args: artifact_id: The DevRev artifact ID @@ -333,17 +302,15 @@ async def artifact(artifact_id: str, ctx: Context) -> str: @mcp.resource( uri="devrev://works/don:core:dvrv-us-1:devo/{dev_org_id}:{work_type}/{work_number}", - description=WORK_RESOURCE_DESCRIPTION, tags=WORK_RESOURCE_TAGS ) @mcp.resource( uri="devrev://works/{work_id}", - description=WORK_RESOURCE_DESCRIPTION, tags=WORK_RESOURCE_TAGS ) async def works(ctx: Context, work_id: str | None = None, work_type: str | None = None, work_number: str | None = None, dev_org_id: str | None = None) -> str: """ - Access DevRev work item details using unified work ID format. + Access any DevRev work item with unified interface for tickets, issues, and other work types. Supports display ID formats (TKT-12345, ISS-9031) with navigation links. Args: work_id: The DevRev work ID (e.g., TKT-12345, ISS-9031) @@ -362,23 +329,19 @@ async def works(ctx: Context, work_id: str | None = None, work_type: str | None @mcp.resource( uri="devrev://issues/{issue_number}", - description=ISSUE_RESOURCE_DESCRIPTION, tags=ISSUE_RESOURCE_TAGS ) @mcp.resource( uri="devrev://issues/ISS-{issue_number}", - description=ISSUE_RESOURCE_DESCRIPTION, tags=ISSUE_RESOURCE_TAGS ) @mcp.resource( uri="devrev://issues/don:core:dvrv-us-1:devo/{dev_org_id}:issue/{issue_number}", - description=ISSUE_RESOURCE_DESCRIPTION, tags=ISSUE_RESOURCE_TAGS ) async def issue(issue_number: str = None, dev_org_id: str = None, ctx: Context = None) -> str: """ - Access DevRev issue details with navigation links. - Supports multiple URI formats - all normalize to numeric issue number. + Access comprehensive DevRev issue information with timeline and artifacts. Supports multiple URI formats: numeric (9031), ISS format (ISS-9031), and full don:core IDs. Args: issue_id: The DevRev issue ID (numeric, e.g., 9031) @@ -395,18 +358,15 @@ async def issue(issue_number: str = None, dev_org_id: str = None, ctx: Context = @mcp.resource( uri="devrev://issues/{issue_id}/timeline", - description="Access enriched issue timeline with conversation flow, artifacts, and detailed visibility information. Includes internal context, visual visibility indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ), and comprehensive audit trail.", - tags=["issue-timeline", "devrev", "internal-work", "enriched", "conversation", "visibility", "audit"] + tags=["issue-timeline", "devrev", "internal-work", "conversation", "visibility", "audit"] ) @mcp.resource( uri="devrev://issues/ISS-{issue_number}/timeline", - description="Access enriched issue timeline with conversation flow, artifacts, and detailed visibility information. Includes internal context, visual visibility indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ), and comprehensive audit trail.", - tags=["issue-timeline", "devrev", "internal-work", "enriched", "conversation", "visibility", "audit"] + tags=["issue-timeline", "devrev", "internal-work", "conversation", "visibility", "audit"] ) async def issue_timeline(issue_id: str = None, issue_number: str = None, ctx: Context = None) -> str: """ - Access enriched timeline for an issue with structured conversation format. - Supports multiple URI formats for flexible access. + Access issue timeline with conversation flow, artifacts, and detailed visibility information. Includes internal context, visual visibility indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ), and comprehensive audit trail. Args: issue_id: The DevRev issue ID (numeric, e.g., 9031) @@ -440,18 +400,15 @@ async def issue_timeline(issue_id: str = None, issue_number: str = None, ctx: Co @mcp.resource( uri="devrev://issues/{issue_id}/artifacts", - description="Access all artifacts associated with a specific issue. Returns collection of files, screenshots, and documents with download links and metadata.", tags=["issue-artifacts", "devrev", "internal-work", "collection", "files", "navigation"] ) @mcp.resource( uri="devrev://issues/ISS-{issue_number}/artifacts", - description="Access all artifacts associated with a specific issue. Returns collection of files, screenshots, and documents with download links and metadata.", tags=["issue-artifacts", "devrev", "internal-work", "collection", "files", "navigation"] ) async def issue_artifacts(issue_id: str = None, issue_number: str = None, ctx: Context = None) -> str: """ - Access all artifacts for an issue. - Supports multiple URI formats for flexible access. + Access all artifacts associated with a specific issue. Returns collection of files, screenshots, and documents with download links and metadata. Args: issue_id: The DevRev issue ID (numeric, e.g., 9031) @@ -612,6 +569,259 @@ async def create_timeline_comment(work_id: str, body: str, ctx: Context) -> str: """ return await create_timeline_comment_tool(work_id, body, ctx) +@mcp.prompt( + name="investigate_ticket", + description="""Systematic DevRev ticket investigation following the established support playbook. + +This prompt guides you through a comprehensive 6-step investigation process designed to: +- Thoroughly understand customer issues +- Identify root causes systematically +- Research similar patterns and solutions +- Document findings with proper timestamps +- Provide actionable resolution paths + +The investigation follows verification checkpoints at each step to ensure completeness and accuracy. All findings are documented chronologically for future reference and pattern analysis.""", + tags=["investigation", "support", "ticket", "playbook", "systematic", "devrev"] +) +async def investigate_ticket( + ticket_id: str, + customer_context: str = "", + priority_level: str = "normal", + special_notes: str = "", + ctx: Context = None +) -> str: + """ + Generate a systematic ticket investigation prompt following the support playbook. + + Args: + ticket_id: The DevRev ticket ID to investigate (e.g., TKT-12345, DR-67890) + customer_context: Additional customer context or background (optional) + priority_level: Investigation priority (low/normal/high/critical, default: normal) + special_notes: Any special considerations or constraints (optional) + + Returns: + Formatted investigation prompt with systematic workflow + """ + from datetime import datetime + + current_timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") + + # Build the investigation prompt + prompt_content = f"""# DevRev Ticket Investigation: {ticket_id} + +**Investigation Started:** {current_timestamp} +**Priority Level:** {priority_level.upper()} +**Investigator:** Claude MCP Agent + +--- + +## ๐ŸŽฏ **Investigation Objectives** + +- [ ] **STEP 1**: Fetch ticket data and establish investigation structure +- [ ] **STEP 2**: Analyze customer issue and gather complete context +- [ ] **STEP 3**: Identify affected repository/component +- [ ] **STEP 4**: Research similar issues and patterns +- [ ] **STEP 5**: Review documentation and known solutions +- [ ] **STEP 6**: Set up test environment if reproduction needed + +--- + +## ๐Ÿ“‹ **STEP 1: Investigation Setup & Data Gathering** + +### **Initial Data Collection** +```bash +# Get current investigation timestamp +date '+%Y-%m-%d %H:%M:%S' + +# Fetch primary ticket information +get_ticket(id="{ticket_id}") + +# Get complete timeline with all communications +get_timeline_entries(id="{ticket_id}", format="detailed") +``` + +### **Investigation Structure Setup** +- [ ] Create timestamped todo list for systematic tracking +- [ ] Begin investigation log with chronological documentation +- [ ] Note any artifacts or attachments for download + +**Customer Context:** {customer_context if customer_context else "To be determined from ticket data"} + +**Special Considerations:** {special_notes if special_notes else "None specified"} + +--- + +## ๐Ÿ” **STEP 2: Customer Issue Analysis** + +### **Required Analysis Points:** +- [ ] **Exact Issue Description**: What specifically is the customer experiencing? +- [ ] **Customer Environment**: What are their technical configuration details? +- [ ] **Impact Assessment**: How is this affecting their workflow? +- [ ] **Timeline Review**: What communications have occurred? +- [ ] **Artifacts Analysis**: Are there logs, screenshots, or files attached? + +### **Verification Checkpoint #2:** +- โœ… Have you fully understood the EXACT customer issue? +- โœ… Do you have complete context on their environment/configuration? +- โœ… Have you reviewed ALL timeline entries for full context? + +--- + +## ๐Ÿ—‚๏ธ **STEP 3: Repository & Component Identification** + +### **Component Mapping:** +- **UI/Web Application Issues** โ†’ `/Users/sara/work/fossa/FOSSA/` +- **CLI/Scanning Issues** โ†’ `/Users/sara/work/fossa/fossa-cli/` +- **API/Authentication Issues** โ†’ `/Users/sara/work/fossa/FOSSA/` +- **Documentation Issues** โ†’ `/Users/sara/work/docs/` + +### **Repository Investigation:** +```bash +# Search for related code in identified repository +# Use Glob/Grep tools for targeted searches +# Document specific file paths and line numbers +``` + +**Verification Checkpoint #3:** Have you correctly identified the repository for investigation? + +--- + +## ๐Ÿ” **STEP 4: Similar Issue Research** + +### **Pattern Analysis:** +```bash +# Search for similar tickets +search(query="[key issue terms]", namespace="ticket") + +# Look for related issues +search(query="[key issue terms]", namespace="issue") +``` + +### **Analysis Requirements:** +- [ ] Identify tickets with similar symptoms +- [ ] Distinguish surface similarities from root cause similarities +- [ ] Note key differences between seemingly similar cases +- [ ] Check resolution patterns for similar issues + +**Verification Checkpoint #4:** Are you distinguishing between surface and root cause similarities? + +--- + +## ๐Ÿ“š **STEP 5: Documentation & Knowledge Base** + +### **Documentation Review:** +```bash +# Check common issues documentation +Read(/Users/sara/work/docs/common-issues.md) + +# Search for issue-specific documentation +Grep(pattern="[issue-specific-terms]", path="/Users/sara/work/docs/", include="*.md") +``` + +### **Knowledge Assembly:** +- [ ] Review known workarounds and solutions +- [ ] Check architectural documentation if relevant +- [ ] Identify any edge cases or special configurations +- [ ] Gather relevant technical background + +**Verification Checkpoint #5:** Have you checked ALL relevant documentation and considered edge cases? + +--- + +## ๐Ÿงช **STEP 6: Test Environment Setup (If Needed)** + +### **Environment Preparation:** +If reproduction is required: + +```bash +# Create ticket-specific directory +mkdir -p tickets/{ticket_id} +cd tickets/{ticket_id} + +# Configure .fossa.yml with correct project ID +cat > .fossa.yml << EOF +version: 3 + +project: + id: {ticket_id} +EOF +``` + +### **Setup Verification:** +- [ ] Directory created with exact ticket ID +- [ ] Configuration file properly set up +- [ ] Test isolation properly established +- [ ] All setup steps documented + +**Verification Checkpoint #6:** Is your test directory correct, config accurate, and setup documented? + +--- + +## ๐Ÿ“ **Investigation Documentation Requirements** + +### **Continuous Logging:** +- Update investigation file at EVERY step with timestamps +- Document all tool usage and results +- Record verification checkpoint completions +- Note any deviations from standard workflow + +### **Final Documentation:** +- [ ] Complete investigation summary +- [ ] Root cause identification (if found) +- [ ] Resolution recommendations +- [ ] Escalation path (if needed) +- [ ] Pattern analysis for future reference + +--- + +## โšก **Priority-Specific Guidelines** + +**Current Priority: {priority_level.upper()}** + +""" + + # Add priority-specific guidance + if priority_level.lower() == "critical": + prompt_content += """ +### **CRITICAL PRIORITY ACTIONS:** +- Immediately notify relevant teams after initial analysis +- Document customer impact in business terms +- Prepare interim updates for customer communication +- Consider immediate workaround identification +- Escalate to engineering if root cause requires code changes + +""" + elif priority_level.lower() == "high": + prompt_content += """ +### **HIGH PRIORITY ACTIONS:** +- Complete investigation within current session +- Prepare detailed customer communication +- Identify workarounds if available +- Document for pattern tracking + +""" + + prompt_content += """ +--- + +## ๐Ÿš€ **Next Steps** + +1. **Begin with STEP 1** - Execute the data gathering commands above +2. **Follow each verification checkpoint** systematically +3. **Document all findings** with timestamps in investigation file +4. **Complete all steps** before providing recommendations +5. **Prepare final summary** with actionable next steps + +**Remember:** This investigation follows the established support playbook for consistency and thoroughness. Each step builds on the previous to ensure no critical information is missed. + +--- + +*Generated by DevRev MCP Investigation Prompt v1.0* +""" + + return prompt_content + + def main(): """Main entry point for the DevRev MCP server.""" # Run the server diff --git a/src/devrev_mcp/tools/get_ticket.py b/src/devrev_mcp/tools/get_ticket.py index 3756fe1..dbd15a6 100644 --- a/src/devrev_mcp/tools/get_ticket.py +++ b/src/devrev_mcp/tools/get_ticket.py @@ -25,6 +25,10 @@ async def get_ticket( Returns: JSON string containing the ticket data with timeline entries and artifacts """ + # Input validation + if not id or not id.strip(): + raise ValueError("ID parameter is required and cannot be empty") + try: await ctx.info(f"Fetching work item {id} with timeline entries and artifacts") diff --git a/src/devrev_mcp/tools/get_timeline_entries.py b/src/devrev_mcp/tools/get_timeline_entries.py index 6eaea61..9e5aadf 100644 --- a/src/devrev_mcp/tools/get_timeline_entries.py +++ b/src/devrev_mcp/tools/get_timeline_entries.py @@ -28,63 +28,42 @@ async def get_timeline_entries( Returns: Formatted timeline entries based on the requested format """ + # Input validation + if not id or not id.strip(): + raise ValueError("ID parameter is required and cannot be empty") + + if format not in ["summary", "detailed", "full"]: + raise ValueError(f"Invalid format '{format}'. Must be one of: summary, detailed, full") + try: - # Determine work item type and normalize ID - work_type, normalized_id, display_id = _normalize_work_id(id) - await ctx.info(f"Fetching timeline entries for {work_type} {normalized_id} in {format} format") + await ctx.info(f"Fetching timeline entries for {id} in {format} format") - # Use the appropriate resource URI based on work type - if work_type == "ticket": - resource_uri = f"devrev://tickets/{normalized_id}/timeline" - elif work_type == "issue": - resource_uri = f"devrev://issues/{normalized_id}/timeline" - else: - # Fallback - try ticket first, then issue - resource_uri = f"devrev://tickets/{normalized_id}/timeline" + # Try different resource URIs and let pattern matching handle the ID format + resource_uris = [ + f"devrev://tickets/{id}/timeline", + f"devrev://issues/{id}/timeline" + ] - try: - # Use the utility function to handle resource reading consistently - timeline_data = await read_resource_content( - ctx, - resource_uri, - parse_json=True, - require_content=False - ) - - if not timeline_data: - return f"No timeline entries found for {work_type} {display_id}" - - except Exception as resource_error: - await ctx.error(f"Error reading resource {resource_uri}: {str(resource_error)}") - # If JSON parsing failed but we got content, try fallback with raw content + timeline_data = None + for resource_uri in resource_uris: try: - timeline_data = await read_resource_content( - ctx, - resource_uri, - parse_json=False, - require_content=False - ) - if format == "full" and timeline_data: - return str(timeline_data) - else: - return f"Error: Could not parse timeline data for {work_type} {display_id}" + timeline_data = await _read_timeline_data_with_fallback(ctx, resource_uri, id, format) + if timeline_data and not isinstance(timeline_data, str): # Found valid data + break except Exception: - raise resource_error + continue # Try next URI + + if not timeline_data: + return f"No timeline entries found for {id}" + if isinstance(timeline_data, str): # Error message returned + return timeline_data - # Debug: Check what we actually received - await ctx.info(f"DEBUG: timeline_data type: {type(timeline_data)}") - if isinstance(timeline_data, dict): - await ctx.info(f"DEBUG: timeline_data keys: {list(timeline_data.keys())}") - elif isinstance(timeline_data, list): - await ctx.info(f"DEBUG: timeline_data length: {len(timeline_data)}") - if timeline_data: - await ctx.info(f"DEBUG: first item type: {type(timeline_data[0])}") # Format based on requested type if format == "summary": - return _format_summary(timeline_data, normalized_id, display_id, work_type) + return _format_summary(timeline_data, id) elif format == "detailed": - return _format_detailed(timeline_data, normalized_id, display_id, work_type) + return _format_detailed(timeline_data, id) else: # format == "full" try: return json.dumps(timeline_data, indent=2, default=str) @@ -97,146 +76,32 @@ async def get_timeline_entries( return f"Failed to get timeline entries for work item {id}: {str(e)}" -def _normalize_work_id(id: str) -> tuple[str, str, str]: - """ - Normalize various work ID formats and determine the work type. - - Returns: (work_type, numeric_id, display_id) - - Accepts: - - TKT-12345 -> ("ticket", "12345", "TKT-12345") - - ISS-9031 -> ("issue", "9031", "ISS-9031") - - don:core:dvrv-us-1:devo/118WAPdKBc:ticket/12345 -> ("ticket", "12345", "TKT-12345") - - don:core:dvrv-us-1:devo/118WAPdKBc:issue/9031 -> ("issue", "9031", "ISS-9031") - - 12345 -> ("unknown", "12345", "12345") - """ - id_upper = id.upper() - - if id.startswith("don:core:"): - if ":ticket/" in id: - numeric_id = id.split(":ticket/")[1] - return ("ticket", numeric_id, f"TKT-{numeric_id}") - elif ":issue/" in id: - numeric_id = id.split(":issue/")[1] - return ("issue", numeric_id, f"ISS-{numeric_id}") - elif id_upper.startswith("TKT-"): - numeric_id = id[4:] # Remove TKT- prefix - return ("ticket", numeric_id, id.upper()) - elif id_upper.startswith("ISS-"): - numeric_id = id[4:] # Remove ISS- prefix - return ("issue", numeric_id, id.upper()) - else: - # Assume it's just a numeric ID - we can't determine the type - return ("unknown", id, id) -def _format_summary(timeline_data, numeric_id: str, display_id: str, work_type: str) -> str: +def _format_summary(timeline_data, display_id: str) -> str: """ Format timeline data as a concise summary focusing on key metrics and latest activity. """ # Handle both dict and list formats if isinstance(timeline_data, list): - # If it's a list, treat it as the conversation thread conversation = timeline_data summary = {} else: - # If it's a dict, extract the expected fields summary = timeline_data.get("summary", {}) conversation = timeline_data.get("conversation_thread", []) - # Build summary text - lines = [ - f"**{display_id} Timeline Summary:**", - "", - f"**Subject:** {summary.get('subject', 'Unknown')}", - f"**Status:** {summary.get('current_stage', 'Unknown')}", - f"**Customer:** {summary.get('customer', 'Unknown')}", - f"**Created:** {summary.get('created_date', 'Unknown')}", - ] - - # Add message counts with visibility breakdown - customer_messages = [msg for msg in conversation if msg.get("speaker", {}).get("type") == "customer"] - support_messages = [msg for msg in conversation if msg.get("speaker", {}).get("type") == "support"] - - lines.extend([ - "", - (f"**Activity:** {len(customer_messages)} customer messages, " - f"{len(support_messages)} support responses"), - ]) - - # Add visibility summary if available - if isinstance(timeline_data, dict) and "visibility_summary" in timeline_data: - vis_summary = timeline_data["visibility_summary"] - lines.extend([ - "", - "**Visibility Summary:**", - (f"- Customer-visible entries: {vis_summary.get('customer_visible_entries', 0)} " - f"({vis_summary.get('customer_visible_percentage', 0)}%)"), - (f"- Internal-only entries: {vis_summary.get('internal_only_entries', 0)} " - f"({vis_summary.get('internal_only_percentage', 0)}%)"), - ]) - - # Show breakdown by visibility level - breakdown = vis_summary.get("visibility_breakdown", {}) - if breakdown: - lines.append("- Visibility levels:") - for level, count in breakdown.items(): - description = VisibilityInfo.from_visibility(level).description - lines.append(f" โ€ข {level}: {count} entries ({description})") - - # Add last activity timestamps - if summary.get("last_customer_message"): - lines.append(f"**Last customer message:** {summary['last_customer_message']}") - if summary.get("last_support_response"): - lines.append(f"**Last support response:** {summary['last_support_response']}") - - # Add latest messages preview with visibility indicators - if conversation: - lines.extend([ - "", - "**Recent Activity:**" - ]) - - # Show last 3 messages - recent_messages = conversation[-3:] if len(conversation) > 3 else conversation - for msg in recent_messages: - speaker = msg.get("speaker", {}) - timestamp = msg.get("timestamp", "")[:10] # Just date part - message_preview = (msg.get("message", "")[:100] + - ("..." if len(msg.get("message", "")) > 100 else "")) - - # Add visibility indicator - visibility_info = msg.get("visibility_info", {}) - visibility_indicator = "" - if visibility_info: - level = visibility_info.get("level", "external") - if level == "private": - visibility_indicator = "๐Ÿ”’ " - elif level == "internal": - visibility_indicator = "๐Ÿข " - elif level == "external": - visibility_indicator = "๐Ÿ‘ฅ " - elif level == "public": - visibility_indicator = "๐ŸŒ " - - lines.append( - f"- **{speaker.get('name', 'Unknown')}** ({timestamp}): " - f"{visibility_indicator}{message_preview}" - ) - - # Add artifacts info - if isinstance(timeline_data, dict): - artifacts = timeline_data.get("all_artifacts", []) - if artifacts: - lines.extend([ - "", - f"**Attachments:** {len(artifacts)} file(s) attached" - ]) + lines = [] + lines.extend(_build_summary_header(display_id, summary)) + lines.extend(_build_activity_counts(conversation)) + lines.extend(_build_visibility_summary(timeline_data)) + lines.extend(_build_last_activity(summary)) + lines.extend(_build_recent_messages(conversation)) + lines.extend(_build_artifacts_info(timeline_data)) return "\n".join(lines) -def _format_detailed(timeline_data, numeric_id: str, display_id: str, work_type: str) -> str: +def _format_detailed(timeline_data, display_id: str) -> str: """ Format timeline data with focus on conversation flow and key events. """ @@ -354,4 +219,148 @@ def _format_detailed(timeline_data, numeric_id: str, display_id: str, work_type: f"({vis_summary.get('internal_only_percentage', 0)}%)") ]) - return "\n".join(lines) \ No newline at end of file + return "\n".join(lines) + + +async def _read_timeline_data_with_fallback(ctx, resource_uri: str, display_id: str, format: str): + """ + Read timeline data with fallback to raw content if JSON parsing fails. + + Returns: + Timeline data dict/list or error message string + """ + try: + # Try to read as JSON first + timeline_data = await read_resource_content( + ctx, + resource_uri, + parse_json=True, + require_content=False + ) + + if not timeline_data: + return f"No timeline entries found for {display_id}" + + return timeline_data + + except Exception as resource_error: + await ctx.error(f"Error reading resource {resource_uri}: {str(resource_error)}") + + # Fallback: try reading as raw content + try: + timeline_data = await read_resource_content( + ctx, + resource_uri, + parse_json=False, + require_content=False + ) + if format == "full" and timeline_data: + return str(timeline_data) + else: + return f"Error: Could not parse timeline data for {display_id}" + except Exception: + raise resource_error + + +def _build_summary_header(display_id: str, summary: dict) -> list: + """Build the header section of the summary.""" + return [ + f"**{display_id} Timeline Summary:**", + "", + f"**Subject:** {summary.get('subject', 'Unknown')}", + f"**Status:** {summary.get('current_stage', 'Unknown')}", + f"**Customer:** {summary.get('customer', 'Unknown')}", + f"**Created:** {summary.get('created_date', 'Unknown')}", + ] + + +def _build_activity_counts(conversation: list) -> list: + """Build activity counts section.""" + customer_messages = [msg for msg in conversation if msg.get("speaker", {}).get("type") == "customer"] + support_messages = [msg for msg in conversation if msg.get("speaker", {}).get("type") == "support"] + + return [ + "", + (f"**Activity:** {len(customer_messages)} customer messages, " + f"{len(support_messages)} support responses"), + ] + + +def _build_visibility_summary(timeline_data) -> list: + """Build visibility summary section.""" + lines = [] + if isinstance(timeline_data, dict) and "visibility_summary" in timeline_data: + vis_summary = timeline_data["visibility_summary"] + lines.extend([ + "", + "**Visibility Summary:**", + (f"- Customer-visible entries: {vis_summary.get('customer_visible_entries', 0)} " + f"({vis_summary.get('customer_visible_percentage', 0)}%)"), + (f"- Internal-only entries: {vis_summary.get('internal_only_entries', 0)} " + f"({vis_summary.get('internal_only_percentage', 0)}%)"), + ]) + return lines + + +def _build_last_activity(summary: dict) -> list: + """Build last activity timestamps section.""" + lines = [] + if summary.get("last_customer_message"): + lines.append(f"**Last customer message:** {summary['last_customer_message']}") + if summary.get("last_support_response"): + lines.append(f"**Last support response:** {summary['last_support_response']}") + return lines + + +def _build_recent_messages(conversation: list) -> list: + """Build recent messages section.""" + lines = [] + if conversation: + lines.extend([ + "", + "**Recent Activity:**" + ]) + + recent_messages = conversation[-3:] if len(conversation) > 3 else conversation + for msg in recent_messages: + speaker = msg.get("speaker", {}) + timestamp = msg.get("timestamp", "") + timestamp = timestamp[:10] if len(timestamp) >= 10 else timestamp + message_preview = (msg.get("message", "")[:100] + + ("..." if len(msg.get("message", "")) > 100 else "")) + + visibility_indicator = _get_visibility_indicator(msg.get("visibility_info", {})) + + lines.append( + f"- **{speaker.get('name', 'Unknown')}** ({timestamp}): " + f"{visibility_indicator}{message_preview}" + ) + return lines + + +def _build_artifacts_info(timeline_data) -> list: + """Build artifacts information section.""" + lines = [] + if isinstance(timeline_data, dict): + artifacts = timeline_data.get("all_artifacts", []) + if artifacts: + lines.extend([ + "", + f"**Attachments:** {len(artifacts)} file(s) attached" + ]) + return lines + + +def _get_visibility_indicator(visibility_info: dict) -> str: + """Get visibility indicator emoji for a message.""" + if not visibility_info: + return "" + + level = visibility_info.get("level", "external") + indicators = { + "private": "๐Ÿ”’ ", + "internal": "๐Ÿข ", + "external": "๐Ÿ‘ฅ ", + "public": "๐ŸŒ " + } + return indicators.get(level, "") \ No newline at end of file diff --git a/src/devrev_mcp/utils.py b/src/devrev_mcp/utils.py index 5e39bf1..bca3caa 100644 --- a/src/devrev_mcp/utils.py +++ b/src/devrev_mcp/utils.py @@ -11,6 +11,24 @@ from typing import Any, Dict, List, Union from fastmcp import Context +# Global session for connection pooling +_session: requests.Session = None + +def _get_session() -> requests.Session: + """Get or create a shared requests session for connection pooling.""" + global _session + if _session is None: + _session = requests.Session() + # Configure session for optimal performance + adapter = requests.adapters.HTTPAdapter( + pool_connections=10, + pool_maxsize=20, + max_retries=3 + ) + _session.mount('https://', adapter) + _session.mount('http://', adapter) + return _session + def make_devrev_request(endpoint: str, payload: Dict[str, Any]) -> requests.Response: """ Make an authenticated request to the DevRev API. @@ -28,7 +46,7 @@ def make_devrev_request(endpoint: str, payload: Dict[str, Any]) -> requests.Resp """ api_key = os.environ.get("DEVREV_API_KEY") if not api_key: - raise ValueError("DEVREV_API_KEY environment variable is not set") + raise ValueError("API authentication not configured") headers = { "Authorization": f"{api_key}", @@ -36,7 +54,8 @@ def make_devrev_request(endpoint: str, payload: Dict[str, Any]) -> requests.Resp } try: - response = requests.post( + session = _get_session() + response = session.post( f"https://api.devrev.ai/{endpoint}", headers=headers, json=payload, From c8c0af4ea795f023d67f3f1789fe0bb1b5097e95 Mon Sep 17 00:00:00 2001 From: Sara Date: Tue, 3 Jun 2025 12:36:25 -0400 Subject: [PATCH 17/17] more cleanup :) --- src/devrev_mcp/error_handler.py | 3 +- src/devrev_mcp/server.py | 34 +++++++-------- src/devrev_mcp/tools/create_object.py | 4 +- src/devrev_mcp/tools/update_object.py | 8 ++-- src/devrev_mcp/utils.py | 60 ++++++++++++++++++++------- 5 files changed, 69 insertions(+), 40 deletions(-) diff --git a/src/devrev_mcp/error_handler.py b/src/devrev_mcp/error_handler.py index 40837a7..a476a4b 100644 --- a/src/devrev_mcp/error_handler.py +++ b/src/devrev_mcp/error_handler.py @@ -119,7 +119,8 @@ async def wrapper(*args, **kwargs): # Convert to standardized error mcp_error = DevRevMCPError( f"Unexpected error: {str(e)}", - "INTERNAL_ERROR" + "INTERNAL_ERROR", + details={"original_exception": type(e).__name__, "cause": str(e)} ) return create_error_response(mcp_error, resource_type, resource_id) diff --git a/src/devrev_mcp/server.py b/src/devrev_mcp/server.py index 89785b1..6f57f99 100644 --- a/src/devrev_mcp/server.py +++ b/src/devrev_mcp/server.py @@ -88,9 +88,9 @@ async def create_object( type: str, title: str, applies_to_part: str, + ctx: Context, body: str = "", - owned_by: list[str] = None, - ctx: Context = None + owned_by: list[str] | None = None ) -> str: """ Create a new issue or ticket in DevRev. @@ -105,7 +105,7 @@ async def create_object( Returns: JSON string containing the created object information """ - return await create_object_tool(type, title, applies_to_part, body, owned_by, ctx) + return await create_object_tool(type, title, applies_to_part, ctx, body, owned_by) @mcp.tool( name="update_object", @@ -115,9 +115,9 @@ async def create_object( async def update_object( id: str, type: str, - title: str = None, - body: str = None, - ctx: Context = None + ctx: Context, + title: str | None = None, + body: str | None = None ) -> str: """ Update an existing issue or ticket in DevRev. @@ -132,7 +132,7 @@ async def update_object( Returns: JSON string containing the updated object information """ - return await update_object_tool(id, type, title, body, ctx, devrev_cache) + return await update_object_tool(id, type, ctx, devrev_cache, title, body) # Specialized resource handlers for different DevRev object types @@ -158,7 +158,7 @@ async def update_object( uri="devrev://tickets/don:core:dvrv-us-1:devo/{dev_org_id}:ticket/{ticket_number}", tags=TICKET_RESOURCE_TAGS ) -async def ticket(ticket_id: str = None, ticket_number: str = None, dev_org_id: str = None, ctx: Context = None) -> str: +async def ticket(ctx: Context, ticket_id: str = None, ticket_number: str = None, dev_org_id: str = None) -> str: """ Access comprehensive DevRev ticket information with timeline and artifacts. Supports multiple URI formats: numeric (12345), TKT format (TKT-12345), and full don:core IDs. @@ -188,7 +188,7 @@ async def ticket(ticket_id: str = None, ticket_number: str = None, dev_org_id: s uri="devrev://timeline/TKT-{ticket_number}", tags=TIMELINE_RESOURCE_TAGS ) -async def ticket_timeline(ticket_id: str = None, ticket_number: str = None, ctx: Context = None) -> str: +async def ticket_timeline(ctx: Context, ticket_id: str = None, ticket_number: str = None) -> str: """ Access ticket timeline with conversation flow, artifacts, and detailed visibility information. Includes customer context, visual visibility indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ), and comprehensive audit trail. @@ -217,7 +217,7 @@ async def ticket_timeline(ticket_id: str = None, ticket_number: str = None, ctx: uri="devrev://tickets/don:core:dvrv-us-1:devo/{dev_org_id}:ticket/{ticket_number}/timeline/{entry_id}", tags=TIMELINE_ENTRY_RESOURCE_TAGS ) -async def timeline_entry(ticket_id: str = None, ticket_number: str = None, dev_org_id: str = None, entry_id: str = None, ctx: Context = None) -> str: +async def timeline_entry(ctx: Context, ticket_id: str = None, ticket_number: str = None, dev_org_id: str = None, entry_id: str = None) -> str: """ Access individual timeline entry with detailed conversation data and navigation links. Provides specific entry context within ticket timeline. @@ -264,7 +264,7 @@ async def timeline_entry(ticket_id: str = None, ticket_number: str = None, dev_o uri="devrev://tickets/don:core:dvrv-us-1:devo/{dev_org_id}:ticket/{ticket_number}/artifacts", tags=TICKET_ARTIFACTS_RESOURCE_TAGS ) -async def ticket_artifacts(ticket_id: str = None, ticket_number: str = None, dev_org_id: str = None, ctx: Context = None) -> str: +async def ticket_artifacts(ctx: Context, ticket_id: str = None, ticket_number: str = None, dev_org_id: str = None) -> str: """ Access all artifacts associated with a specific ticket. Returns collection of files, screenshots, and documents with download links and metadata. @@ -339,7 +339,7 @@ async def works(ctx: Context, work_id: str | None = None, work_type: str | None uri="devrev://issues/don:core:dvrv-us-1:devo/{dev_org_id}:issue/{issue_number}", tags=ISSUE_RESOURCE_TAGS ) -async def issue(issue_number: str = None, dev_org_id: str = None, ctx: Context = None) -> str: +async def issue(ctx: Context, issue_number: str = None, dev_org_id: str = None) -> str: """ Access comprehensive DevRev issue information with timeline and artifacts. Supports multiple URI formats: numeric (9031), ISS format (ISS-9031), and full don:core IDs. @@ -364,7 +364,7 @@ async def issue(issue_number: str = None, dev_org_id: str = None, ctx: Context = uri="devrev://issues/ISS-{issue_number}/timeline", tags=["issue-timeline", "devrev", "internal-work", "conversation", "visibility", "audit"] ) -async def issue_timeline(issue_id: str = None, issue_number: str = None, ctx: Context = None) -> str: +async def issue_timeline(ctx: Context, issue_id: str = None, issue_number: str = None) -> str: """ Access issue timeline with conversation flow, artifacts, and detailed visibility information. Includes internal context, visual visibility indicators (๐Ÿ”’๐Ÿข๐Ÿ‘ฅ๐ŸŒ), and comprehensive audit trail. @@ -406,7 +406,7 @@ async def issue_timeline(issue_id: str = None, issue_number: str = None, ctx: Co uri="devrev://issues/ISS-{issue_number}/artifacts", tags=["issue-artifacts", "devrev", "internal-work", "collection", "files", "navigation"] ) -async def issue_artifacts(issue_id: str = None, issue_number: str = None, ctx: Context = None) -> str: +async def issue_artifacts(ctx: Context, issue_id: str = None, issue_number: str = None) -> str: """ Access all artifacts associated with a specific issue. Returns collection of files, screenshots, and documents with download links and metadata. @@ -465,7 +465,7 @@ async def issue_artifacts(issue_id: str = None, issue_number: str = None, ctx: C """, tags=["timeline", "devrev", "tickets", "history", "conversations", "audit", "visibility"] ) -async def get_timeline_entries(id: str, format: str = "summary", ctx: Context = None) -> str: +async def get_timeline_entries(id: str, ctx: Context, format: str = "summary") -> str: """ Get all timeline entries for a DevRev ticket using its ID with flexible formatting. @@ -585,10 +585,10 @@ async def create_timeline_comment(work_id: str, body: str, ctx: Context) -> str: ) async def investigate_ticket( ticket_id: str, + ctx: Context, customer_context: str = "", priority_level: str = "normal", - special_notes: str = "", - ctx: Context = None + special_notes: str = "" ) -> str: """ Generate a systematic ticket investigation prompt following the support playbook. diff --git a/src/devrev_mcp/tools/create_object.py b/src/devrev_mcp/tools/create_object.py index 6904b37..13e9932 100644 --- a/src/devrev_mcp/tools/create_object.py +++ b/src/devrev_mcp/tools/create_object.py @@ -16,9 +16,9 @@ async def create_object( type: str, title: str, applies_to_part: str, + ctx: Context, body: str = "", - owned_by: list[str] = None, - ctx: Context = None + owned_by: list[str] | None = None ) -> str: """ Create a new issue or ticket in DevRev. diff --git a/src/devrev_mcp/tools/update_object.py b/src/devrev_mcp/tools/update_object.py index a6cb10b..efbc36a 100644 --- a/src/devrev_mcp/tools/update_object.py +++ b/src/devrev_mcp/tools/update_object.py @@ -15,10 +15,10 @@ async def update_object( id: str, type: str, - title: str = None, - body: str = None, - ctx: Context = None, - devrev_cache: dict = None + ctx: Context, + devrev_cache: dict | None = None, + title: str | None = None, + body: str | None = None ) -> str: """ Update an existing issue or ticket in DevRev. diff --git a/src/devrev_mcp/utils.py b/src/devrev_mcp/utils.py index bca3caa..bca1994 100644 --- a/src/devrev_mcp/utils.py +++ b/src/devrev_mcp/utils.py @@ -11,23 +11,41 @@ from typing import Any, Dict, List, Union from fastmcp import Context -# Global session for connection pooling -_session: requests.Session = None +class SessionManager: + """Singleton session manager for connection pooling and lifecycle management.""" + + _instance: 'SessionManager' = None + _session: requests.Session = None + + def __new__(cls) -> 'SessionManager': + if cls._instance is None: + cls._instance = super().__new__(cls) + return cls._instance + + def get_session(self) -> requests.Session: + """Get or create a shared requests session for connection pooling.""" + if self._session is None: + self._session = requests.Session() + # Configure session for optimal performance + adapter = requests.adapters.HTTPAdapter( + pool_connections=10, + pool_maxsize=20, + max_retries=3 + ) + self._session.mount('https://', adapter) + self._session.mount('http://', adapter) + return self._session + + def close_session(self) -> None: + """Close the session and clean up resources.""" + if self._session is not None: + self._session.close() + self._session = None +# Module-level convenience function def _get_session() -> requests.Session: - """Get or create a shared requests session for connection pooling.""" - global _session - if _session is None: - _session = requests.Session() - # Configure session for optimal performance - adapter = requests.adapters.HTTPAdapter( - pool_connections=10, - pool_maxsize=20, - max_retries=3 - ) - _session.mount('https://', adapter) - _session.mount('http://', adapter) - return _session + """Get the shared session instance.""" + return SessionManager().get_session() def make_devrev_request(endpoint: str, payload: Dict[str, Any]) -> requests.Response: """ @@ -62,8 +80,18 @@ def make_devrev_request(endpoint: str, payload: Dict[str, Any]) -> requests.Resp timeout=30 # Add timeout for better error handling ) return response + except requests.Timeout as e: + from .error_handler import APIError + raise APIError(endpoint, 408, "Request timeout after 30 seconds") from e + except requests.ConnectionError as e: + from .error_handler import APIError + raise APIError(endpoint, 503, f"Connection failed: {str(e)}") from e + except requests.HTTPError as e: + from .error_handler import APIError + raise APIError(endpoint, getattr(e.response, 'status_code', 500), f"HTTP error: {str(e)}") from e except requests.RequestException as e: - raise requests.RequestException(f"DevRev API request failed for endpoint '{endpoint}': {e}") from e + from .error_handler import APIError + raise APIError(endpoint, 500, f"Request failed: {str(e)}") from e