Skip to content

feat(usage): aggregated tag daily activity endpoint + UI fallback; MCP optional; tests included #13446

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 9 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -93,4 +93,5 @@ test.py

litellm_config.yaml
.cursor
.vscode/launch.json
.vscode/launch.json
scripts/seed_mock_data.py
26 changes: 11 additions & 15 deletions litellm/proxy/management_endpoints/common_daily_activity.py
Original file line number Diff line number Diff line change
Expand Up @@ -118,24 +118,24 @@ def update_breakdown_metrics(
record,
)

if record.mcp_namespaced_tool_name:
if record.mcp_namespaced_tool_name not in breakdown.mcp_servers:
breakdown.mcp_servers[record.mcp_namespaced_tool_name] = MetricWithMetadata(
# MCP breakdown (schema may not have this column on older deployments)
mcp_tool_name = getattr(record, "mcp_namespaced_tool_name", None)
if mcp_tool_name:
if mcp_tool_name not in breakdown.mcp_servers:
breakdown.mcp_servers[mcp_tool_name] = MetricWithMetadata(
metrics=SpendMetrics(),
metadata={},
)
breakdown.mcp_servers[record.mcp_namespaced_tool_name].metrics = update_metrics(
breakdown.mcp_servers[record.mcp_namespaced_tool_name].metrics, record
breakdown.mcp_servers[mcp_tool_name].metrics = update_metrics(
breakdown.mcp_servers[mcp_tool_name].metrics, record
)

# Update API key breakdown for this MCP server
if (
record.api_key
not in breakdown.mcp_servers[
record.mcp_namespaced_tool_name
].api_key_breakdown
not in breakdown.mcp_servers[mcp_tool_name].api_key_breakdown
):
breakdown.mcp_servers[record.mcp_namespaced_tool_name].api_key_breakdown[
breakdown.mcp_servers[mcp_tool_name].api_key_breakdown[
record.api_key
] = KeyMetricWithMetadata(
metrics=SpendMetrics(),
Expand All @@ -149,12 +149,8 @@ def update_breakdown_metrics(
),
)

breakdown.mcp_servers[record.mcp_namespaced_tool_name].api_key_breakdown[
record.api_key
].metrics = update_metrics(
breakdown.mcp_servers[record.mcp_namespaced_tool_name]
.api_key_breakdown[record.api_key]
.metrics,
breakdown.mcp_servers[mcp_tool_name].api_key_breakdown[record.api_key].metrics = update_metrics(
breakdown.mcp_servers[mcp_tool_name].api_key_breakdown[record.api_key].metrics,
record,
)

Expand Down
42 changes: 42 additions & 0 deletions litellm/proxy/management_endpoints/tag_management_endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
from litellm.proxy.management_endpoints.common_daily_activity import (
SpendAnalyticsPaginatedResponse,
get_daily_activity,
get_daily_activity_aggregated,
)
from litellm.types.tag_management import (
LiteLLM_DailyTagSpendTable,
Expand Down Expand Up @@ -480,3 +481,44 @@ async def get_tag_daily_activity(
page=page,
page_size=page_size,
)


# Aggregated variant to avoid heavy pagination queries on large datasets
from litellm.proxy.management_helpers.utils import management_endpoint_wrapper


@router.get(
"/tag/daily/activity/aggregated",
response_model=SpendAnalyticsPaginatedResponse,
tags=["tag management"],
dependencies=[Depends(user_api_key_auth)],
)
@management_endpoint_wrapper
async def get_tag_daily_activity_aggregated(
tags: Optional[str] = None,
start_date: Optional[str] = None,
end_date: Optional[str] = None,
model: Optional[str] = None,
api_key: Optional[str] = None,
user_api_key_dict: UserAPIKeyAuth = Depends(user_api_key_auth),
):
"""
Aggregated analytics for tag daily activity without pagination.
Returns the same response shape as the paginated endpoint with single-page metadata.
"""
from litellm.proxy.proxy_server import prisma_client

# Convert comma-separated tags string to list if provided
tag_list = tags.split(",") if tags else None

return await get_daily_activity_aggregated(
prisma_client=prisma_client,
table_name="litellm_dailytagspend",
entity_id_field="tag",
entity_id=tag_list,
entity_metadata_field=None,
start_date=start_date,
end_date=end_date,
model=model,
api_key=api_key,
)
60 changes: 60 additions & 0 deletions tests/test_litellm/test_common_daily_activity_mcp_optional.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
import sys
import os

sys.path.insert(0, os.path.abspath("../../"))

from litellm.types.proxy.management_endpoints.common_daily_activity import (
BreakdownMetrics,
SpendMetrics,
)
from litellm.proxy.management_endpoints.common_daily_activity import (
update_breakdown_metrics,
)


class _RecordWithoutMCP:
"""Minimal record shape required by update_breakdown_metrics.

Intentionally does NOT provide mcp_namespaced_tool_name to ensure the function
handles missing field gracefully.
"""

def __init__(self) -> None:
self.model = "gpt-4o"
self.model_group = "gpt-4o"
self.api_key = "vk_test"
self.custom_llm_provider = "openai"
self.spend = 1.23
self.prompt_tokens = 100
self.completion_tokens = 50
self.cache_read_input_tokens = 10
self.cache_creation_input_tokens = 5
self.api_requests = 3
self.successful_requests = 3
self.failed_requests = 0


def test_update_breakdown_metrics_with_missing_mcp_field():
record = _RecordWithoutMCP()
breakdown = BreakdownMetrics()

model_metadata = {}
provider_metadata = {}
api_key_metadata = {}

# Should not raise even though the record lacks `mcp_namespaced_tool_name`
updated = update_breakdown_metrics(
breakdown=breakdown,
record=record,
model_metadata=model_metadata,
provider_metadata=provider_metadata,
api_key_metadata=api_key_metadata,
)

# Asserts: model and provider metrics got updated
assert "gpt-4o" in updated.models
assert "openai" in updated.providers
# MCP breakdown remains empty without the field
assert updated.mcp_servers == {}


63 changes: 63 additions & 0 deletions tests/test_litellm/test_tag_aggregated_endpoint.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
import os
import sys
from datetime import datetime, timedelta

sys.path.insert(0, os.path.abspath("../../"))

import pytest

import litellm
from litellm.caching.caching import DualCache
from litellm.proxy.utils import PrismaClient, ProxyLogging
from litellm.proxy.management_endpoints.common_daily_activity import (
get_daily_activity_aggregated,
)

proxy_logging_obj = ProxyLogging(user_api_key_cache=DualCache())


@pytest.fixture
def prisma_client():
from litellm.proxy.proxy_cli import append_query_params

params = {"connection_limit": 100, "pool_timeout": 60}
database_url = os.getenv("DATABASE_URL")
modified_url = append_query_params(database_url, params)
os.environ["DATABASE_URL"] = modified_url

prisma_client = PrismaClient(
database_url=os.environ["DATABASE_URL"], proxy_logging_obj=proxy_logging_obj
)
return prisma_client


@pytest.mark.asyncio
async def test_tag_daily_activity_aggregated_smoke(prisma_client):
await prisma_client.connect()

# use the last 7 days
end = datetime.utcnow().date()
start = end - timedelta(days=7)

# call the shared aggregated function directly (as the endpoint does)
resp = await get_daily_activity_aggregated(
prisma_client=prisma_client,
table_name="litellm_dailytagspend",
entity_id_field="tag",
entity_id=None,
entity_metadata_field=None,
start_date=start.strftime("%Y-%m-%d"),
end_date=end.strftime("%Y-%m-%d"),
model=None,
api_key=None,
)

assert resp is not None
assert hasattr(resp, "results")
assert hasattr(resp, "metadata")
# results can be empty in a fresh DB, but the shape must be correct
assert resp.metadata.page == 1
assert resp.metadata.total_pages == 1
assert resp.metadata.has_more is False


36 changes: 22 additions & 14 deletions ui/litellm-dashboard/src/components/networking.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -1636,46 +1636,54 @@ export const tagDailyActivityCall = async (
tags: string[] | null = null
) => {
/**
* Get daily user activity on proxy
* Prefer aggregated endpoint (no pagination). Fall back to paginated if unavailable
*/
try {
const buildQuery = (useAggregated: boolean) => {
let url = proxyBaseUrl
? `${proxyBaseUrl}/tag/daily/activity`
: `/tag/daily/activity`;
? `${proxyBaseUrl}${useAggregated ? "/tag/daily/activity/aggregated" : "/tag/daily/activity"}`
: `${useAggregated ? "/tag/daily/activity/aggregated" : "/tag/daily/activity"}`;
const queryParams = new URLSearchParams();
queryParams.append("start_date", formatDate(startTime));
queryParams.append("end_date", formatDate(endTime));
queryParams.append("page_size", "1000");
queryParams.append("page", page.toString());
if (tags) {
if (!useAggregated) {
queryParams.append("page_size", "1000");
queryParams.append("page", page.toString());
}
if (tags && tags.length > 0) {
queryParams.append("tags", tags.join(","));
}
const queryString = queryParams.toString();
if (queryString) {
url += `?${queryString}`;
}
return url;
};

const fetchOnce = async (useAggregated: boolean) => {
const url = buildQuery(useAggregated);
const response = await fetch(url, {
method: "GET",
headers: {
[globalLitellmHeaderName]: `Bearer ${accessToken}`,
"Content-Type": "application/json",
},
});

if (!response.ok) {
const errorData = await response.json();
const errorMessage = deriveErrorMessage(errorData);
handleError(errorMessage);
throw new Error(errorMessage);
}
return response.json();
};


const data = await response.json();
return data;
} catch (error) {
console.error("Failed to create key:", error);
throw error;
try {
// Try aggregated first
return await fetchOnce(true);
} catch (err) {
console.warn("/tag/daily/activity/aggregated failed, falling back to paginated", err);
// Fallback to paginated API
return await fetchOnce(false);
}
};

Expand Down
Loading