File size: 8,213 Bytes
872296d 7e77368 872296d 7e77368 872296d 774ea65 872296d 7e77368 872296d 7e77368 872296d |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 |
"""
SuperCoder - Hugging Face Spaces Frontend (FIXED)
Connects to your local API server via tunnel
"""
import gradio as gr
import requests
from typing import List, Tuple
import os
# ============================================================================
# Configuration - YOUR TUNNEL URL
# ============================================================================
# The API_URL is pulled from a Space Secret for better security
API_URL = os.getenv("API_URL", "https://your-tunnel-url.ngrok-free.dev")
API_URL = API_URL.rstrip('/')
# ============================================================================
# API Client Functions
# ============================================================================
def call_api(message: str, temperature: float = 0.1, max_tokens: int = 512) -> str:
"""Call the remote API with error handling"""
if not message or not message.strip():
return "β οΈ Please enter a message"
try:
response = requests.post(
f"{API_URL}/api/chat",
json={
"messages": [{"role": "user", "content": message}],
"temperature": temperature,
"max_tokens": max_tokens
},
timeout=90,
headers={
"Content-Type": "application/json",
"ngrok-skip-browser-warning": "true"
}
)
if response.status_code == 200:
result = response.json()
return result.get("response", "No response from API")
elif response.status_code == 503:
return "π§ Backend service unavailable. Please ensure your local server is running."
else:
return f"β API Error ({response.status_code}): {response.text[:200]}"
except requests.exceptions.Timeout:
return "β±οΈ Request timed out. The model might be processing a complex request or the server is down."
except requests.exceptions.ConnectionError:
return f"π Cannot connect to API at {API_URL}. Please verify:\n1. Local server is running\n2. Tunnel (ngrok/cloudflare) is active\n3. API_URL is correct"
except Exception as e:
return f"β οΈ Unexpected error: {str(e)}"
def check_api_status() -> str:
"""Check if the API is reachable and healthy"""
try:
response = requests.get(
f"{API_URL}/health",
timeout=10, # Increased timeout slightly for cold starts
headers={"ngrok-skip-browser-warning": "true"}
)
if response.status_code == 200:
data = response.json()
status = data.get("status", "unknown")
if status == "ok":
return "β
Connected - Backend Ready"
else:
return f"β οΈ Connected but status: {status}"
else:
return f"β API returned status {response.status_code}"
except requests.exceptions.ConnectionError:
return f"π΄ Cannot reach {API_URL} - Check tunnel status"
except requests.exceptions.Timeout:
return "β±οΈ Health check timed out"
except Exception as e:
return f"β Error: {str(e)}"
# ============================================================================
# Gradio Interface
# ============================================================================
def respond(message: str, history: List[Tuple[str, str]], temperature: float, max_tokens: int):
"""Handle chat responses"""
history.append((message, None))
yield history # Show user message immediately
bot_response = call_api(message, temperature, max_tokens)
history[-1] = (message, bot_response)
yield history
def apply_template(template: str, history: List[Tuple[str, str]]) -> tuple:
"""Apply a code template"""
templates = {
"Explain Code": "Please explain the following code in detail:\n```python\n# Paste your code here\n```",
"Debug Code": "I have a bug in my code. Can you help me debug it?\n```python\n# Paste your buggy code here\n```",
"Write Function": "Please write a Python function that: [describe what you need]",
"Optimize Code": "Can you optimize this code for better performance?\n```python\n# Paste your code here\n```",
"Add Comments": "Please add clear comments to this code:\n```python\n# Paste your code here\n```"
}
return templates.get(template, ""), history
# Create the Gradio interface
with gr.Blocks(
title="SuperCoder Pro",
theme=gr.themes.Soft(primary_hue="indigo"),
css=".gradio-container {max-width: 1200px !important}"
) as demo:
gr.Markdown(
"""
# π€ SuperCoder Pro
### AI-Powered Coding Assistant
> **Note:** This interface connects to a local backend via a secure tunnel.
> Ensure your local server and tunnel are running.
"""
)
# Status bar
with gr.Row():
with gr.Column(scale=4):
# --- FIX ---: Set a static default value here
status_display = gr.Textbox(
value="β³ Initializing...",
label="π Backend Status",
interactive=False,
show_copy_button=True
)
with gr.Column(scale=1):
refresh_btn = gr.Button("π Refresh Status", size="sm")
# Main chat interface
with gr.Row():
with gr.Column(scale=3):
chatbot = gr.Chatbot(
label="π¬ Conversation",
height=500,
show_copy_button=True,
avatar_images=(None, "π€"),
bubble_full_width=False
)
with gr.Row():
msg_input = gr.Textbox(
placeholder="Ask me to write, explain, debug, or review code...",
scale=5,
lines=2,
show_label=False,
autofocus=True,
container=False
)
send_btn = gr.Button("Send π", scale=1, variant="primary")
# Settings sidebar
with gr.Column(scale=1):
gr.Markdown("### βοΈ Model Settings")
temperature = gr.Slider(0.0, 1.0, value=0.1, step=0.05, label="π‘οΈ Temperature")
max_tokens = gr.Slider(128, 4096, value=1024, step=128, label="π Max Tokens")
gr.Markdown("---")
gr.Markdown("### π― Quick Templates")
template_dropdown = gr.Dropdown(
choices=["Explain Code", "Debug Code", "Write Function", "Optimize Code", "Add Comments"],
label="Select Template",
value="Explain Code"
)
use_template_btn = gr.Button("π Use Template", size="sm")
clear_btn = gr.Button("ποΈ Clear Chat", variant="stop", size="sm")
gr.Markdown("---")
gr.Markdown(f"""### π‘ Connection Info\n**API Endpoint:**\n`{API_URL}`""")
# Event handlers
# --- FIX ---: Use the demo.load() event to check status after UI is ready
demo.load(check_api_status, outputs=[status_display])
refresh_btn.click(check_api_status, outputs=[status_display])
msg_submit_event = msg_input.submit(
respond,
inputs=[msg_input, chatbot, temperature, max_tokens],
outputs=[chatbot]
)
msg_submit_event.then(lambda: gr.update(value=""), outputs=[msg_input])
send_btn.click(
respond,
inputs=[msg_input, chatbot, temperature, max_tokens],
outputs=[chatbot]
).then(lambda: gr.update(value=""), outputs=[msg_input])
use_template_btn.click(apply_template, inputs=[template_dropdown, chatbot], outputs=[msg_input, chatbot])
clear_btn.click(lambda: [], outputs=[chatbot])
# ============================================================================
# Launch Configuration for HF Spaces
# ============================================================================
if __name__ == "__main__":
demo.launch(
server_name="0.0.0.0",
server_port=7860,
show_error=True
) |