|
|
""" |
|
|
SuperCoder - Hugging Face Spaces Frontend (FIXED) |
|
|
Connects to your local API server via tunnel |
|
|
""" |
|
|
import gradio as gr |
|
|
import requests |
|
|
from typing import List, Tuple |
|
|
import os |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
API_URL = os.getenv("API_URL", "https://your-tunnel-url.ngrok-free.dev") |
|
|
API_URL = API_URL.rstrip('/') |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def call_api(message: str, temperature: float = 0.1, max_tokens: int = 512) -> str: |
|
|
"""Call the remote API with error handling""" |
|
|
if not message or not message.strip(): |
|
|
return "β οΈ Please enter a message" |
|
|
|
|
|
try: |
|
|
response = requests.post( |
|
|
f"{API_URL}/api/chat", |
|
|
json={ |
|
|
"messages": [{"role": "user", "content": message}], |
|
|
"temperature": temperature, |
|
|
"max_tokens": max_tokens |
|
|
}, |
|
|
timeout=90, |
|
|
headers={ |
|
|
"Content-Type": "application/json", |
|
|
"ngrok-skip-browser-warning": "true" |
|
|
} |
|
|
) |
|
|
|
|
|
if response.status_code == 200: |
|
|
result = response.json() |
|
|
return result.get("response", "No response from API") |
|
|
elif response.status_code == 503: |
|
|
return "π§ Backend service unavailable. Please ensure your local server is running." |
|
|
else: |
|
|
return f"β API Error ({response.status_code}): {response.text[:200]}" |
|
|
|
|
|
except requests.exceptions.Timeout: |
|
|
return "β±οΈ Request timed out. The model might be processing a complex request or the server is down." |
|
|
except requests.exceptions.ConnectionError: |
|
|
return f"π Cannot connect to API at {API_URL}. Please verify:\n1. Local server is running\n2. Tunnel (ngrok/cloudflare) is active\n3. API_URL is correct" |
|
|
except Exception as e: |
|
|
return f"β οΈ Unexpected error: {str(e)}" |
|
|
|
|
|
def check_api_status() -> str: |
|
|
"""Check if the API is reachable and healthy""" |
|
|
try: |
|
|
response = requests.get( |
|
|
f"{API_URL}/health", |
|
|
timeout=10, |
|
|
headers={"ngrok-skip-browser-warning": "true"} |
|
|
) |
|
|
|
|
|
if response.status_code == 200: |
|
|
data = response.json() |
|
|
status = data.get("status", "unknown") |
|
|
if status == "ok": |
|
|
return "β
Connected - Backend Ready" |
|
|
else: |
|
|
return f"β οΈ Connected but status: {status}" |
|
|
else: |
|
|
return f"β API returned status {response.status_code}" |
|
|
|
|
|
except requests.exceptions.ConnectionError: |
|
|
return f"π΄ Cannot reach {API_URL} - Check tunnel status" |
|
|
except requests.exceptions.Timeout: |
|
|
return "β±οΈ Health check timed out" |
|
|
except Exception as e: |
|
|
return f"β Error: {str(e)}" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def respond(message: str, history: List[Tuple[str, str]], temperature: float, max_tokens: int): |
|
|
"""Handle chat responses""" |
|
|
history.append((message, None)) |
|
|
yield history |
|
|
|
|
|
bot_response = call_api(message, temperature, max_tokens) |
|
|
history[-1] = (message, bot_response) |
|
|
yield history |
|
|
|
|
|
def apply_template(template: str, history: List[Tuple[str, str]]) -> tuple: |
|
|
"""Apply a code template""" |
|
|
templates = { |
|
|
"Explain Code": "Please explain the following code in detail:\n```python\n# Paste your code here\n```", |
|
|
"Debug Code": "I have a bug in my code. Can you help me debug it?\n```python\n# Paste your buggy code here\n```", |
|
|
"Write Function": "Please write a Python function that: [describe what you need]", |
|
|
"Optimize Code": "Can you optimize this code for better performance?\n```python\n# Paste your code here\n```", |
|
|
"Add Comments": "Please add clear comments to this code:\n```python\n# Paste your code here\n```" |
|
|
} |
|
|
return templates.get(template, ""), history |
|
|
|
|
|
|
|
|
with gr.Blocks( |
|
|
title="SuperCoder Pro", |
|
|
theme=gr.themes.Soft(primary_hue="indigo"), |
|
|
css=".gradio-container {max-width: 1200px !important}" |
|
|
) as demo: |
|
|
|
|
|
gr.Markdown( |
|
|
""" |
|
|
# π€ SuperCoder Pro |
|
|
### AI-Powered Coding Assistant |
|
|
> **Note:** This interface connects to a local backend via a secure tunnel. |
|
|
> Ensure your local server and tunnel are running. |
|
|
""" |
|
|
) |
|
|
|
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(scale=4): |
|
|
|
|
|
status_display = gr.Textbox( |
|
|
value="β³ Initializing...", |
|
|
label="π Backend Status", |
|
|
interactive=False, |
|
|
show_copy_button=True |
|
|
) |
|
|
with gr.Column(scale=1): |
|
|
refresh_btn = gr.Button("π Refresh Status", size="sm") |
|
|
|
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(scale=3): |
|
|
chatbot = gr.Chatbot( |
|
|
label="π¬ Conversation", |
|
|
height=500, |
|
|
show_copy_button=True, |
|
|
avatar_images=(None, "π€"), |
|
|
bubble_full_width=False |
|
|
) |
|
|
|
|
|
with gr.Row(): |
|
|
msg_input = gr.Textbox( |
|
|
placeholder="Ask me to write, explain, debug, or review code...", |
|
|
scale=5, |
|
|
lines=2, |
|
|
show_label=False, |
|
|
autofocus=True, |
|
|
container=False |
|
|
) |
|
|
send_btn = gr.Button("Send π", scale=1, variant="primary") |
|
|
|
|
|
|
|
|
with gr.Column(scale=1): |
|
|
gr.Markdown("### βοΈ Model Settings") |
|
|
temperature = gr.Slider(0.0, 1.0, value=0.1, step=0.05, label="π‘οΈ Temperature") |
|
|
max_tokens = gr.Slider(128, 4096, value=1024, step=128, label="π Max Tokens") |
|
|
|
|
|
gr.Markdown("---") |
|
|
gr.Markdown("### π― Quick Templates") |
|
|
template_dropdown = gr.Dropdown( |
|
|
choices=["Explain Code", "Debug Code", "Write Function", "Optimize Code", "Add Comments"], |
|
|
label="Select Template", |
|
|
value="Explain Code" |
|
|
) |
|
|
use_template_btn = gr.Button("π Use Template", size="sm") |
|
|
clear_btn = gr.Button("ποΈ Clear Chat", variant="stop", size="sm") |
|
|
|
|
|
gr.Markdown("---") |
|
|
gr.Markdown(f"""### π‘ Connection Info\n**API Endpoint:**\n`{API_URL}`""") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
demo.load(check_api_status, outputs=[status_display]) |
|
|
|
|
|
refresh_btn.click(check_api_status, outputs=[status_display]) |
|
|
|
|
|
msg_submit_event = msg_input.submit( |
|
|
respond, |
|
|
inputs=[msg_input, chatbot, temperature, max_tokens], |
|
|
outputs=[chatbot] |
|
|
) |
|
|
msg_submit_event.then(lambda: gr.update(value=""), outputs=[msg_input]) |
|
|
|
|
|
send_btn.click( |
|
|
respond, |
|
|
inputs=[msg_input, chatbot, temperature, max_tokens], |
|
|
outputs=[chatbot] |
|
|
).then(lambda: gr.update(value=""), outputs=[msg_input]) |
|
|
|
|
|
use_template_btn.click(apply_template, inputs=[template_dropdown, chatbot], outputs=[msg_input, chatbot]) |
|
|
|
|
|
clear_btn.click(lambda: [], outputs=[chatbot]) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
demo.launch( |
|
|
server_name="0.0.0.0", |
|
|
server_port=7860, |
|
|
show_error=True |
|
|
) |