Added LiteLLM to the stack
This commit is contained in:
209
Development/litellm/db_scripts/create_views.py
Normal file
209
Development/litellm/db_scripts/create_views.py
Normal file
@@ -0,0 +1,209 @@
|
||||
"""
|
||||
python script to pre-create all views required by LiteLLM Proxy Server
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
|
||||
# Enter your DATABASE_URL here
|
||||
|
||||
from prisma import Prisma
|
||||
|
||||
db = Prisma(
|
||||
http={
|
||||
"timeout": 60000,
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
async def check_view_exists(): # noqa: PLR0915
|
||||
"""
|
||||
Checks if the LiteLLM_VerificationTokenView and MonthlyGlobalSpend exists in the user's db.
|
||||
|
||||
LiteLLM_VerificationTokenView: This view is used for getting the token + team data in user_api_key_auth
|
||||
|
||||
MonthlyGlobalSpend: This view is used for the admin view to see global spend for this month
|
||||
|
||||
If the view doesn't exist, one will be created.
|
||||
"""
|
||||
|
||||
# connect to dB
|
||||
await db.connect()
|
||||
try:
|
||||
# Try to select one row from the view
|
||||
await db.query_raw("""SELECT 1 FROM "LiteLLM_VerificationTokenView" LIMIT 1""")
|
||||
print("LiteLLM_VerificationTokenView Exists!") # noqa
|
||||
except Exception:
|
||||
# If an error occurs, the view does not exist, so create it
|
||||
await db.execute_raw(
|
||||
"""
|
||||
CREATE VIEW "LiteLLM_VerificationTokenView" AS
|
||||
SELECT
|
||||
v.*,
|
||||
t.spend AS team_spend,
|
||||
t.max_budget AS team_max_budget,
|
||||
t.tpm_limit AS team_tpm_limit,
|
||||
t.rpm_limit AS team_rpm_limit
|
||||
FROM "LiteLLM_VerificationToken" v
|
||||
LEFT JOIN "LiteLLM_TeamTable" t ON v.team_id = t.team_id;
|
||||
"""
|
||||
)
|
||||
|
||||
print("LiteLLM_VerificationTokenView Created!") # noqa
|
||||
|
||||
try:
|
||||
await db.query_raw("""SELECT 1 FROM "MonthlyGlobalSpend" LIMIT 1""")
|
||||
print("MonthlyGlobalSpend Exists!") # noqa
|
||||
except Exception:
|
||||
sql_query = """
|
||||
CREATE OR REPLACE VIEW "MonthlyGlobalSpend" AS
|
||||
SELECT
|
||||
DATE("startTime") AS date,
|
||||
SUM("spend") AS spend
|
||||
FROM
|
||||
"LiteLLM_SpendLogs"
|
||||
WHERE
|
||||
"startTime" >= (CURRENT_DATE - INTERVAL '30 days')
|
||||
GROUP BY
|
||||
DATE("startTime");
|
||||
"""
|
||||
await db.execute_raw(query=sql_query)
|
||||
|
||||
print("MonthlyGlobalSpend Created!") # noqa
|
||||
|
||||
try:
|
||||
await db.query_raw("""SELECT 1 FROM "Last30dKeysBySpend" LIMIT 1""")
|
||||
print("Last30dKeysBySpend Exists!") # noqa
|
||||
except Exception:
|
||||
sql_query = """
|
||||
CREATE OR REPLACE VIEW "Last30dKeysBySpend" AS
|
||||
SELECT
|
||||
L."api_key",
|
||||
V."key_alias",
|
||||
V."key_name",
|
||||
SUM(L."spend") AS total_spend
|
||||
FROM
|
||||
"LiteLLM_SpendLogs" L
|
||||
LEFT JOIN
|
||||
"LiteLLM_VerificationToken" V
|
||||
ON
|
||||
L."api_key" = V."token"
|
||||
WHERE
|
||||
L."startTime" >= (CURRENT_DATE - INTERVAL '30 days')
|
||||
GROUP BY
|
||||
L."api_key", V."key_alias", V."key_name"
|
||||
ORDER BY
|
||||
total_spend DESC;
|
||||
"""
|
||||
await db.execute_raw(query=sql_query)
|
||||
|
||||
print("Last30dKeysBySpend Created!") # noqa
|
||||
|
||||
try:
|
||||
await db.query_raw("""SELECT 1 FROM "Last30dModelsBySpend" LIMIT 1""")
|
||||
print("Last30dModelsBySpend Exists!") # noqa
|
||||
except Exception:
|
||||
sql_query = """
|
||||
CREATE OR REPLACE VIEW "Last30dModelsBySpend" AS
|
||||
SELECT
|
||||
"model",
|
||||
SUM("spend") AS total_spend
|
||||
FROM
|
||||
"LiteLLM_SpendLogs"
|
||||
WHERE
|
||||
"startTime" >= (CURRENT_DATE - INTERVAL '30 days')
|
||||
AND "model" != ''
|
||||
GROUP BY
|
||||
"model"
|
||||
ORDER BY
|
||||
total_spend DESC;
|
||||
"""
|
||||
await db.execute_raw(query=sql_query)
|
||||
|
||||
print("Last30dModelsBySpend Created!") # noqa
|
||||
try:
|
||||
await db.query_raw("""SELECT 1 FROM "MonthlyGlobalSpendPerKey" LIMIT 1""")
|
||||
print("MonthlyGlobalSpendPerKey Exists!") # noqa
|
||||
except Exception:
|
||||
sql_query = """
|
||||
CREATE OR REPLACE VIEW "MonthlyGlobalSpendPerKey" AS
|
||||
SELECT
|
||||
DATE("startTime") AS date,
|
||||
SUM("spend") AS spend,
|
||||
api_key as api_key
|
||||
FROM
|
||||
"LiteLLM_SpendLogs"
|
||||
WHERE
|
||||
"startTime" >= (CURRENT_DATE - INTERVAL '30 days')
|
||||
GROUP BY
|
||||
DATE("startTime"),
|
||||
api_key;
|
||||
"""
|
||||
await db.execute_raw(query=sql_query)
|
||||
|
||||
print("MonthlyGlobalSpendPerKey Created!") # noqa
|
||||
try:
|
||||
await db.query_raw(
|
||||
"""SELECT 1 FROM "MonthlyGlobalSpendPerUserPerKey" LIMIT 1"""
|
||||
)
|
||||
print("MonthlyGlobalSpendPerUserPerKey Exists!") # noqa
|
||||
except Exception:
|
||||
sql_query = """
|
||||
CREATE OR REPLACE VIEW "MonthlyGlobalSpendPerUserPerKey" AS
|
||||
SELECT
|
||||
DATE("startTime") AS date,
|
||||
SUM("spend") AS spend,
|
||||
api_key as api_key,
|
||||
"user" as "user"
|
||||
FROM
|
||||
"LiteLLM_SpendLogs"
|
||||
WHERE
|
||||
"startTime" >= (CURRENT_DATE - INTERVAL '30 days')
|
||||
GROUP BY
|
||||
DATE("startTime"),
|
||||
"user",
|
||||
api_key;
|
||||
"""
|
||||
await db.execute_raw(query=sql_query)
|
||||
|
||||
print("MonthlyGlobalSpendPerUserPerKey Created!") # noqa
|
||||
|
||||
try:
|
||||
await db.query_raw("""SELECT 1 FROM "DailyTagSpend" LIMIT 1""")
|
||||
print("DailyTagSpend Exists!") # noqa
|
||||
except Exception:
|
||||
sql_query = """
|
||||
CREATE OR REPLACE VIEW "DailyTagSpend" AS
|
||||
SELECT
|
||||
jsonb_array_elements_text(request_tags) AS individual_request_tag,
|
||||
DATE(s."startTime") AS spend_date,
|
||||
COUNT(*) AS log_count,
|
||||
SUM(spend) AS total_spend
|
||||
FROM "LiteLLM_SpendLogs" s
|
||||
GROUP BY individual_request_tag, DATE(s."startTime");
|
||||
"""
|
||||
await db.execute_raw(query=sql_query)
|
||||
|
||||
print("DailyTagSpend Created!") # noqa
|
||||
|
||||
try:
|
||||
await db.query_raw("""SELECT 1 FROM "Last30dTopEndUsersSpend" LIMIT 1""")
|
||||
print("Last30dTopEndUsersSpend Exists!") # noqa
|
||||
except Exception:
|
||||
sql_query = """
|
||||
CREATE VIEW "Last30dTopEndUsersSpend" AS
|
||||
SELECT end_user, COUNT(*) AS total_events, SUM(spend) AS total_spend
|
||||
FROM "LiteLLM_SpendLogs"
|
||||
WHERE end_user <> '' AND end_user <> user
|
||||
AND "startTime" >= CURRENT_DATE - INTERVAL '30 days'
|
||||
GROUP BY end_user
|
||||
ORDER BY total_spend DESC
|
||||
LIMIT 100;
|
||||
"""
|
||||
await db.execute_raw(query=sql_query)
|
||||
|
||||
print("Last30dTopEndUsersSpend Created!") # noqa
|
||||
|
||||
return
|
||||
|
||||
|
||||
asyncio.run(check_view_exists())
|
187
Development/litellm/db_scripts/migrate_keys.py
Normal file
187
Development/litellm/db_scripts/migrate_keys.py
Normal file
@@ -0,0 +1,187 @@
|
||||
from prisma import Prisma
|
||||
import csv
|
||||
import json
|
||||
import asyncio
|
||||
from datetime import datetime
|
||||
from typing import Optional, List, Dict, Any
|
||||
|
||||
import os
|
||||
|
||||
## VARIABLES
|
||||
DATABASE_URL = "postgresql://postgres:postgres@localhost:5432/litellm"
|
||||
CSV_FILE_PATH = "./path_to_csv.csv"
|
||||
|
||||
os.environ["DATABASE_URL"] = DATABASE_URL
|
||||
|
||||
|
||||
async def parse_csv_value(value: str, field_type: str) -> Any:
|
||||
"""Parse CSV values according to their expected types"""
|
||||
if value == "NULL" or value == "" or value is None:
|
||||
return None
|
||||
|
||||
if field_type == "boolean":
|
||||
return value.lower() == "true"
|
||||
elif field_type == "float":
|
||||
return float(value)
|
||||
elif field_type == "int":
|
||||
return int(value) if value.isdigit() else None
|
||||
elif field_type == "bigint":
|
||||
return int(value) if value.isdigit() else None
|
||||
elif field_type == "datetime":
|
||||
try:
|
||||
return datetime.fromisoformat(value.replace("Z", "+00:00"))
|
||||
except:
|
||||
return None
|
||||
elif field_type == "json":
|
||||
try:
|
||||
return value if value else json.dumps({})
|
||||
except:
|
||||
return json.dumps({})
|
||||
elif field_type == "string_array":
|
||||
# Handle string arrays like {default-models}
|
||||
if value.startswith("{") and value.endswith("}"):
|
||||
content = value[1:-1] # Remove braces
|
||||
if content:
|
||||
return [item.strip() for item in content.split(",")]
|
||||
else:
|
||||
return []
|
||||
return []
|
||||
else:
|
||||
return value
|
||||
|
||||
|
||||
async def migrate_verification_tokens():
|
||||
"""Main migration function"""
|
||||
prisma = Prisma()
|
||||
await prisma.connect()
|
||||
|
||||
try:
|
||||
# Read CSV file
|
||||
csv_file_path = CSV_FILE_PATH
|
||||
|
||||
with open(csv_file_path, "r", encoding="utf-8") as file:
|
||||
csv_reader = csv.DictReader(file)
|
||||
|
||||
processed_count = 0
|
||||
error_count = 0
|
||||
|
||||
for row in csv_reader:
|
||||
try:
|
||||
# Replace 'default-team' with the specified UUID
|
||||
team_id = row.get("team_id")
|
||||
if team_id == "NULL" or team_id == "":
|
||||
team_id = None
|
||||
|
||||
# Prepare data for insertion
|
||||
verification_token_data = {
|
||||
"token": row["token"],
|
||||
"key_name": await parse_csv_value(row["key_name"], "string"),
|
||||
"key_alias": await parse_csv_value(row["key_alias"], "string"),
|
||||
"soft_budget_cooldown": await parse_csv_value(
|
||||
row["soft_budget_cooldown"], "boolean"
|
||||
),
|
||||
"spend": await parse_csv_value(row["spend"], "float"),
|
||||
"expires": await parse_csv_value(row["expires"], "datetime"),
|
||||
"models": await parse_csv_value(row["models"], "string_array"),
|
||||
"aliases": await parse_csv_value(row["aliases"], "json"),
|
||||
"config": await parse_csv_value(row["config"], "json"),
|
||||
"user_id": await parse_csv_value(row["user_id"], "string"),
|
||||
"team_id": team_id,
|
||||
"permissions": await parse_csv_value(
|
||||
row["permissions"], "json"
|
||||
),
|
||||
"max_parallel_requests": await parse_csv_value(
|
||||
row["max_parallel_requests"], "int"
|
||||
),
|
||||
"metadata": await parse_csv_value(row["metadata"], "json"),
|
||||
"tpm_limit": await parse_csv_value(row["tpm_limit"], "bigint"),
|
||||
"rpm_limit": await parse_csv_value(row["rpm_limit"], "bigint"),
|
||||
"max_budget": await parse_csv_value(row["max_budget"], "float"),
|
||||
"budget_duration": await parse_csv_value(
|
||||
row["budget_duration"], "string"
|
||||
),
|
||||
"budget_reset_at": await parse_csv_value(
|
||||
row["budget_reset_at"], "datetime"
|
||||
),
|
||||
"allowed_cache_controls": await parse_csv_value(
|
||||
row["allowed_cache_controls"], "string_array"
|
||||
),
|
||||
"model_spend": await parse_csv_value(
|
||||
row["model_spend"], "json"
|
||||
),
|
||||
"model_max_budget": await parse_csv_value(
|
||||
row["model_max_budget"], "json"
|
||||
),
|
||||
"budget_id": await parse_csv_value(row["budget_id"], "string"),
|
||||
"blocked": await parse_csv_value(row["blocked"], "boolean"),
|
||||
"created_at": await parse_csv_value(
|
||||
row["created_at"], "datetime"
|
||||
),
|
||||
"updated_at": await parse_csv_value(
|
||||
row["updated_at"], "datetime"
|
||||
),
|
||||
"allowed_routes": await parse_csv_value(
|
||||
row["allowed_routes"], "string_array"
|
||||
),
|
||||
"object_permission_id": await parse_csv_value(
|
||||
row["object_permission_id"], "string"
|
||||
),
|
||||
"created_by": await parse_csv_value(
|
||||
row["created_by"], "string"
|
||||
),
|
||||
"updated_by": await parse_csv_value(
|
||||
row["updated_by"], "string"
|
||||
),
|
||||
"organization_id": await parse_csv_value(
|
||||
row["organization_id"], "string"
|
||||
),
|
||||
}
|
||||
|
||||
# Remove None values to use database defaults
|
||||
verification_token_data = {
|
||||
k: v
|
||||
for k, v in verification_token_data.items()
|
||||
if v is not None
|
||||
}
|
||||
|
||||
# Check if token already exists
|
||||
existing_token = await prisma.litellm_verificationtoken.find_unique(
|
||||
where={"token": verification_token_data["token"]}
|
||||
)
|
||||
|
||||
if existing_token:
|
||||
print(
|
||||
f"Token {verification_token_data['token']} already exists, skipping..."
|
||||
)
|
||||
continue
|
||||
|
||||
# Insert the record
|
||||
await prisma.litellm_verificationtoken.create(
|
||||
data=verification_token_data
|
||||
)
|
||||
|
||||
processed_count += 1
|
||||
print(
|
||||
f"Successfully migrated token: {verification_token_data['token']}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
error_count += 1
|
||||
print(
|
||||
f"Error processing row with token {row.get('token', 'unknown')}: {str(e)}"
|
||||
)
|
||||
continue
|
||||
|
||||
print(f"\nMigration completed!")
|
||||
print(f"Successfully processed: {processed_count} records")
|
||||
print(f"Errors encountered: {error_count} records")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Migration failed: {str(e)}")
|
||||
|
||||
finally:
|
||||
await prisma.disconnect()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
asyncio.run(migrate_verification_tokens())
|
34
Development/litellm/db_scripts/update_unassigned_teams.py
Normal file
34
Development/litellm/db_scripts/update_unassigned_teams.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from prisma import Prisma
|
||||
from litellm._logging import verbose_logger
|
||||
|
||||
|
||||
async def apply_db_fixes(db: Prisma):
|
||||
"""
|
||||
Do Not Run this in production, only use it as a one-time fix
|
||||
"""
|
||||
verbose_logger.warning(
|
||||
"DO NOT run this in Production....Running update_unassigned_teams"
|
||||
)
|
||||
try:
|
||||
sql_query = """
|
||||
UPDATE "LiteLLM_SpendLogs"
|
||||
SET team_id = (
|
||||
SELECT vt.team_id
|
||||
FROM "LiteLLM_VerificationToken" vt
|
||||
WHERE vt.token = "LiteLLM_SpendLogs".api_key
|
||||
)
|
||||
WHERE team_id IS NULL
|
||||
AND EXISTS (
|
||||
SELECT 1
|
||||
FROM "LiteLLM_VerificationToken" vt
|
||||
WHERE vt.token = "LiteLLM_SpendLogs".api_key
|
||||
);
|
||||
"""
|
||||
response = await db.query_raw(sql_query)
|
||||
print(
|
||||
"Updated unassigned teams, Response=%s",
|
||||
response,
|
||||
)
|
||||
except Exception as e:
|
||||
raise Exception(f"Error apply_db_fixes: {str(e)}")
|
||||
return
|
Reference in New Issue
Block a user