Spaces:
Running
Running
File size: 6,466 Bytes
715bb35 28c44e9 d4d57c4 28c44e9 715bb35 00f9681 715bb35 6d2d0b8 715bb35 28c44e9 715bb35 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 |
"""
Configuration constants for AnyCoder application.
"""
import os
from datetime import datetime
from typing import Optional
# Gradio supported languages for syntax highlighting
GRADIO_SUPPORTED_LANGUAGES = [
"python", "json", "html", "javascript"
]
# Search/Replace Constants
SEARCH_START = "<<<<<<< SEARCH"
DIVIDER = "======="
REPLACE_END = ">>>>>>> REPLACE"
# Gradio Documentation Auto-Update System
GRADIO_LLMS_TXT_URL = "https://www.gradio.app/llms.txt"
GRADIO_DOCS_CACHE_FILE = ".gradio_docs_cache.txt"
GRADIO_DOCS_LAST_UPDATE_FILE = ".gradio_docs_last_update.txt"
GRADIO_DOCS_UPDATE_ON_APP_UPDATE = True # Only update when app is updated, not on a timer
# Global variable to store the current Gradio documentation
_gradio_docs_content: Optional[str] = None
_gradio_docs_last_fetched: Optional[datetime] = None
# ComfyUI Documentation Auto-Update System
COMFYUI_LLMS_TXT_URL = "https://docs.comfy.org/llms.txt"
COMFYUI_DOCS_CACHE_FILE = ".comfyui_docs_cache.txt"
COMFYUI_DOCS_LAST_UPDATE_FILE = ".comfyui_docs_last_update.txt"
COMFYUI_DOCS_UPDATE_ON_APP_UPDATE = True # Only update when app is updated, not on a timer
# Global variable to store the current ComfyUI documentation
_comfyui_docs_content: Optional[str] = None
_comfyui_docs_last_fetched: Optional[datetime] = None
# FastRTC Documentation Auto-Update System
FASTRTC_LLMS_TXT_URL = "https://fastrtc.org/llms.txt"
FASTRTC_DOCS_CACHE_FILE = ".fastrtc_docs_cache.txt"
FASTRTC_DOCS_LAST_UPDATE_FILE = ".fastrtc_docs_last_update.txt"
FASTRTC_DOCS_UPDATE_ON_APP_UPDATE = True # Only update when app is updated, not on a timer
# Global variable to store the current FastRTC documentation
_fastrtc_docs_content: Optional[str] = None
_fastrtc_docs_last_fetched: Optional[datetime] = None
# Available Models Configuration
AVAILABLE_MODELS = [
{
"name": "Gemini 3.0 Pro",
"id": "gemini-3.0-pro",
"description": "Google Gemini 3.0 Pro via Poe with advanced reasoning"
},
{
"name": "DeepSeek V3.2-Exp",
"id": "deepseek-ai/DeepSeek-V3.2-Exp",
"description": "DeepSeek V3.2 Experimental model for cutting-edge code generation and reasoning"
},
{
"name": "DeepSeek R1",
"id": "deepseek-ai/DeepSeek-R1-0528",
"description": "DeepSeek R1 model for code generation"
},
{
"name": "GLM-4.6",
"id": "zai-org/GLM-4.6",
"description": "GLM-4.6 model for advanced code generation and general tasks"
},
{
"name": "Gemini Flash Latest",
"id": "gemini-flash-latest",
"description": "Google Gemini Flash Latest model via native Gemini API"
},
{
"name": "Gemini Flash Lite Latest",
"id": "gemini-flash-lite-latest",
"description": "Google Gemini Flash Lite Latest model via OpenAI-compatible API"
},
{
"name": "GPT-5",
"id": "gpt-5",
"description": "OpenAI GPT-5 model for advanced code generation and general tasks"
},
{
"name": "GPT-5.1",
"id": "gpt-5.1",
"description": "OpenAI GPT-5.1 model via Poe for advanced code generation and general tasks"
},
{
"name": "GPT-5.1 Instant",
"id": "gpt-5.1-instant",
"description": "OpenAI GPT-5.1 Instant model via Poe for fast responses"
},
{
"name": "GPT-5.1 Codex",
"id": "gpt-5.1-codex",
"description": "OpenAI GPT-5.1 Codex model via Poe optimized for code generation"
},
{
"name": "GPT-5.1 Codex Mini",
"id": "gpt-5.1-codex-mini",
"description": "OpenAI GPT-5.1 Codex Mini model via Poe for lightweight code generation"
},
{
"name": "Grok-4",
"id": "grok-4",
"description": "Grok-4 model via Poe (OpenAI-compatible) for advanced tasks"
},
{
"name": "Grok-Code-Fast-1",
"id": "Grok-Code-Fast-1",
"description": "Grok-Code-Fast-1 model via Poe (OpenAI-compatible) for fast code generation"
},
{
"name": "Claude-Opus-4.1",
"id": "claude-opus-4.1",
"description": "Anthropic Claude Opus 4.1 via Poe (OpenAI-compatible)"
},
{
"name": "Claude-Sonnet-4.5",
"id": "claude-sonnet-4.5",
"description": "Anthropic Claude Sonnet 4.5 via Poe (OpenAI-compatible)"
},
{
"name": "Claude-Haiku-4.5",
"id": "claude-haiku-4.5",
"description": "Anthropic Claude Haiku 4.5 via Poe (OpenAI-compatible)"
},
{
"name": "Qwen3 Max Preview",
"id": "qwen3-max-preview",
"description": "Qwen3 Max Preview model via DashScope International API"
},
{
"name": "MiniMax M2",
"id": "MiniMaxAI/MiniMax-M2",
"description": "MiniMax M2 model via HuggingFace InferenceClient with Novita provider"
},
{
"name": "Kimi K2 Thinking",
"id": "moonshotai/Kimi-K2-Thinking",
"description": "Moonshot Kimi K2 Thinking model for advanced reasoning and code generation"
},
{
"name": "Sherlock Dash Alpha",
"id": "openrouter/sherlock-dash-alpha",
"description": "Sherlock Dash Alpha model via OpenRouter for advanced code generation and reasoning"
},
{
"name": "Sherlock Think Alpha",
"id": "openrouter/sherlock-think-alpha",
"description": "Sherlock Think Alpha model via OpenRouter for advanced reasoning with thinking capabilities"
}
]
k2_model_name_tag = "moonshotai/Kimi-K2-Thinking"
# Default model selection
DEFAULT_MODEL_NAME = "Gemini 3 Pro Preview"
DEFAULT_MODEL = None
for _m in AVAILABLE_MODELS:
if _m.get("name") == DEFAULT_MODEL_NAME:
DEFAULT_MODEL = _m
break
if DEFAULT_MODEL is None and AVAILABLE_MODELS:
DEFAULT_MODEL = AVAILABLE_MODELS[0]
# HF Inference Client
HF_TOKEN = os.getenv('HF_TOKEN')
# Note: HF_TOKEN is checked at runtime when needed, not at import time
# Language choices for code generation
LANGUAGE_CHOICES = [
"html", "gradio", "transformers.js", "streamlit", "comfyui", "react"
]
def get_gradio_language(language):
"""Map composite options to a supported syntax highlighting."""
if language == "streamlit":
return "python"
if language == "gradio":
return "python"
if language == "comfyui":
return "json"
if language == "react":
return "javascript"
return language if language in GRADIO_SUPPORTED_LANGUAGES else None
|