chore: initial snapshot for gitea/github upload

This commit is contained in:
Your Name
2026-03-26 16:04:46 +08:00
commit a699a1ac98
3497 changed files with 1586237 additions and 0 deletions

View File

@@ -0,0 +1,85 @@
"""
Streaming utilities for ChatGPT provider.
Normalizes non-spec-compliant tool_call chunks from the ChatGPT backend API.
"""
from typing import Any, Dict, Optional
class ChatGPTToolCallNormalizer:
"""
Wraps a streaming response and fixes tool_call index/dedup issues.
The ChatGPT backend API (chatgpt.com/backend-api) sends non-spec-compliant
streaming tool call chunks:
1. `index` is always 0, even for multiple parallel tool calls
2. `id` and `name` get repeated in "closing" chunks that shouldn't exist
This wrapper normalizes the stream to match the OpenAI spec before yielding
chunks to the consumer.
"""
def __init__(self, stream: Any):
self._stream = stream
self._seen_ids: Dict[str, int] = {} # tool_call_id -> assigned_index
self._next_index: int = 0
self._last_id: Optional[
str
] = None # tracks which tool call the next delta belongs to
def __getattr__(self, name: str) -> Any:
return getattr(self._stream, name)
def __iter__(self):
return self
def __aiter__(self):
return self
def __next__(self):
while True:
chunk = next(self._stream)
result = self._normalize(chunk)
if result is not None:
return result
async def __anext__(self):
while True:
chunk = await self._stream.__anext__()
result = self._normalize(chunk)
if result is not None:
return result
def _normalize(self, chunk: Any) -> Any:
"""Fix tool_calls in the chunk. Returns None to skip duplicate chunks."""
if not chunk.choices:
return chunk
delta = chunk.choices[0].delta
if delta is None or not delta.tool_calls:
return chunk
normalized = []
for tc in delta.tool_calls:
if tc.id and tc.id not in self._seen_ids:
# New tool call — assign correct index
self._seen_ids[tc.id] = self._next_index
tc.index = self._next_index
self._last_id = tc.id
self._next_index += 1
normalized.append(tc)
elif tc.id and tc.id in self._seen_ids:
# Duplicate "closing" chunk — skip it
continue
else:
# Continuation delta (id=None) — fix index
if self._last_id:
tc.index = self._seen_ids[self._last_id]
normalized.append(tc)
if not normalized:
return None # all tool_calls were duplicates, skip chunk
delta.tool_calls = normalized
return chunk

View File

@@ -0,0 +1,79 @@
from typing import Any, List, Optional, Tuple
from litellm.exceptions import AuthenticationError
from litellm.llms.openai.openai import OpenAIConfig
from litellm.types.llms.openai import AllMessageValues
from ..authenticator import Authenticator
from ..common_utils import (
GetAccessTokenError,
ensure_chatgpt_session_id,
get_chatgpt_default_headers,
)
from .streaming_utils import ChatGPTToolCallNormalizer
class ChatGPTConfig(OpenAIConfig):
def __init__(
self,
api_key: Optional[str] = None,
api_base: Optional[str] = None,
custom_llm_provider: str = "openai",
) -> None:
super().__init__()
self.authenticator = Authenticator()
def _get_openai_compatible_provider_info(
self,
model: str,
api_base: Optional[str],
api_key: Optional[str],
custom_llm_provider: str,
) -> Tuple[Optional[str], Optional[str], str]:
dynamic_api_base = self.authenticator.get_api_base()
try:
dynamic_api_key = self.authenticator.get_access_token()
except GetAccessTokenError as e:
raise AuthenticationError(
model=model,
llm_provider=custom_llm_provider,
message=str(e),
)
return dynamic_api_base, dynamic_api_key, custom_llm_provider
def validate_environment(
self,
headers: dict,
model: str,
messages: List[AllMessageValues],
optional_params: dict,
litellm_params: dict,
api_key: Optional[str] = None,
api_base: Optional[str] = None,
) -> dict:
validated_headers = super().validate_environment(
headers, model, messages, optional_params, litellm_params, api_key, api_base
)
account_id = self.authenticator.get_account_id()
session_id = ensure_chatgpt_session_id(litellm_params)
default_headers = get_chatgpt_default_headers(
api_key or "", account_id, session_id
)
return {**default_headers, **validated_headers}
def post_stream_processing(self, stream: Any) -> Any:
return ChatGPTToolCallNormalizer(stream)
def map_openai_params(
self,
non_default_params: dict,
optional_params: dict,
model: str,
drop_params: bool,
) -> dict:
optional_params = super().map_openai_params(
non_default_params, optional_params, model, drop_params
)
optional_params.setdefault("stream", False)
return optional_params