import gradio as gr import google.generativeai as genai import os from datetime import datetime from typing import List, Optional, Dict, Any, Tuple import asyncio import httpx # Библиотека для асинхронных HTTP-запросов # --- 1. Конфигурация API и Моделей --- GOOGLE_API_KEY = os.environ.get('GOOGLE_API_KEY') if not GOOGLE_API_KEY: raise gr.Error("Переменная окружения GOOGLE_API_KEY не установлена.") try: genai.configure(api_key=GOOGLE_API_KEY) except Exception as e: raise gr.Error(f"Ошибка при настройке Google Gemini API: {e}.") def get_available_models() -> List[str]: available_models = [] try: for m in genai.list_models(): if 'generateContent' in m.supported_generation_methods: if 'vision' not in m.name.lower() and 'tts' not in m.name.lower() and 'audio' not in m.name.lower(): available_models.append(m.name) except Exception as e: print(f"Предупреждение: Не удалось получить список моделей: {e}.") return sorted(list(set(available_models))) AVAILABLE_MODELS = get_available_models() if not AVAILABLE_MODELS: raise gr.Error("Не найдено моделей, совместимых с 'generateContent'.") # --- Константы для настройки --- DEFAULT_NUM_AI_VARIANTS = 1 # --- 2. Логика генерации ответов --- def format_history_for_gemini(history: List[List[Optional[str]]]) -> List[Dict[str, Any]]: gemini_history = [] for turn in history: if isinstance(turn, (list, tuple)) and len(turn) >= 2: user_message, bot_message = turn[0], turn[1] if user_message: gemini_history.append({'role': 'user', 'parts': [{'text': str(user_message)}]}) if bot_message and bot_message != "...": gemini_history.append({'role': 'model', 'parts': [{'text': "Ответ сгенерирован."}]}) # Упрощенная заглушка if history and history[-1][0]: gemini_history.append({'role': 'user', 'parts': [{'text': str(history[-1][0])}]}) return gemini_history async def generate_single_variant_async(history: List, model_name: str, temperature: float): try: model = genai.GenerativeModel(model_name=model_name) chat_history_for_context = history[:-1] last_user_prompt = history[-1]['parts'][0]['text'] chat = model.start_chat(history=chat_history_for_context) response = await chat.send_message_async( last_user_prompt, generation_config=genai.types.GenerationConfig(temperature=temperature) ) return response.text.strip() except Exception as e: return f'Ошибка генерации:
{e}' def format_variants_html(variants: List[str]) -> str: if not variants: return "" html_outputs = [] for i, variant_text in enumerate(variants): js_safe_text = variant_text.replace('`', '\\`').replace('\n', '\\n').replace("'", "\\'") copy_button_html = f"""""" if "Ошибка" in variant_text: html_outputs.append(f'
{variant_text}
') else: header = f"Вариант {i + 1}" if len(variants) > 1 else "" html_outputs.append(f'
{header}{copy_button_html}
{variant_text}
') return "".join(html_outputs) async def respond(history, model_name, temperature, num_variants): # 1. Защита от ошибок. Оставляем ТОЛЬКО return. if not history or not history[-1][0]: return # 2. Никакого промежуточного yield. Никакого "..." или таймера. # 3. Основная логика генерации ответа (остается без изменений) api_history = format_history_for_gemini(history) try: tasks = [generate_single_variant_async(api_history, model_name, temperature) for _ in range(int(num_variants))] results = await asyncio.gather(*tasks, return_exceptions=True) processed_results = [] for res in results: if isinstance(res, Exception): processed_results.append(f"Ошибка генерации варианта:
{res}") else: processed_results.append(res) final_html = format_variants_html(processed_results) history[-1][1] = final_html except Exception as e: history[-1][1] = f"Произошла непредвиденная ошибка:
{e}" # 4. Отправляем в интерфейс только финальный результат yield history # --- 3. Вспомогательные функции для UI --- def add_user_message_to_history(message: str, history: List[List[Optional[str]]]): if not message.strip(): return "", history return "", history + [[message, None]] def regenerate_last_response(history: List[List[Optional[str]]]): if history and history[-1][1] is not None: history[-1][1] = None return history # --- 4. CSS --- custom_css = """ /* Цветовая палитра "Глубокий Космос" */ :root { --primary-color: #3B82F6; --primary-color-hover: #60A5FA; --secondary-color: #9CA3AF; --secondary-color-hover: #E5E7EB; --danger-color: #F87171; --danger-color-hover: #EF4444; --app-bg-color: #111827; --input-bg-color: #1F2937; --border-color: #4B5563; --text-color-primary: #F3F4F6; --text-color-secondary: #9CA3AF; --label-color: #E5E7EB; } .gradio-container { background-color: var(--app-bg-color) !important; color: var(--text-color-primary) !important; } .custom-button button { border-radius: 8px !important; font-weight: 600 !important; transition: all 0.2s ease-in-out !important; padding: 10px !important; } .submit-button button { background: var(--primary-color) !important; color: white !important; border: 1px solid var(--primary-color) !important; } .submit-button button:hover { background: var(--primary-color-hover) !important; border-color: var(--primary-color-hover) !important; box-shadow: 0px 4px 10px rgba(0, 0, 0, 0.2) !important; transform: translateY(-2px); } .regenerate-button button, .clear-button button { background: transparent !important; border: 1px solid var(--border-color) !important; } .regenerate-button button { color: var(--secondary-color) !important; } .clear-button button { color: var(--danger-color) !important; } .regenerate-button button:hover { background: var(--secondary-color) !important; border-color: var(--secondary-color) !important; color: var(--app-bg-color) !important; } .clear-button button:hover { background: var(--danger-color) !important; border-color: var(--danger-color) !important; color: white !important; } .input-container, .input-container .wrap { background-color: var(--input-bg-color) !important; border: 1px solid var(--border-color) !important; border-radius: 8px !important; transition: border-color 0.2s, box-shadow 0.2s !important; box-shadow: 0px 1px 2px rgba(0, 0, 0, 0.1) !important; } .input-container:focus-within, .input-container .wrap:focus-within { border-color: var(--primary-color) !important; box-shadow: 0 0 0 3px rgba(59, 130, 246, 0.2) !important; } .input-container textarea, .input-container .gr-form-component { border: none !important; background: transparent !important; color: var(--text-color-primary) !important; } .input-container textarea::placeholder { color: var(--text-color-secondary); } .gradio-container .input-container .options { background-color: var(--input-bg-color) !important; border: 1px solid var(--border-color) !important; } .gradio-container .input-container .option-item:hover, .gradio-container .input-container .option-item.selected { background-color: var(--primary-color) !important; color: white !important; } .gradio-container .gradio-slider > input[type=range] { background-color: var(--primary-color) !important; } .gradio-container label, .gradio-container .gr-info { font-weight: 600 !important; color: var(--label-color) !important; } .gradio-container h1, .gradio-container .gr-markdown p { color: var(--text-color-primary); } .variant-container { background: var(--input-bg-color); border: 1px solid var(--border-color); border-radius: 8px; margin-bottom: 10px; padding: 15px; color: var(--text-color-primary); } .variant-header { display: flex; justify-content: space-between; align-items: center; margin-bottom: 10px; color: var(--label-color); } .copy-button { background: transparent; border: 1px solid var(--border-color); border-radius: 5px; cursor: pointer; padding: 5px; } .copy-button:hover { background: #374151; } .copy-button svg { stroke: var(--secondary-color); } .error-message { background-color: #450A0A; color: #F87171; border: 1px solid #7F1D1D; border-radius: 8px; padding: 15px; } .gradio-container .chatbot { border: 1px solid var(--border-color) !important; border-radius: 8px !important; background-color: var(--input-bg-color) !important; } .gradio-container .message.bot { background-color: #1F2937 !important; border-radius: 8px !important; } .gradio-container .message.user { background-color: #374151 !important; border-radius: 8px !important; } """ # --- 5. Создание интерфейса Gradio --- with gr.Blocks(theme=gr.themes.Soft(), css=custom_css) as interface: gr.Markdown("# Чат-Бот Gemini/Gemma AI") with gr.Row(): # --- Левая колонка: Чат и Ввод --- with gr.Column(scale=3): chatbot = gr.Chatbot(height=500, label="Чат", elem_classes="input-container") prompt_input = gr.Textbox( label="Введи свой запрос:", lines=2, max_lines=10, placeholder="Чё хошь, я жду...", elem_classes="input-container" ) # --- Правая колонка: Настройки и Управление --- with gr.Column(scale=1, min_width=200): model_selector = gr.Dropdown( choices=AVAILABLE_MODELS, value=AVAILABLE_MODELS[0] if AVAILABLE_MODELS else "", label="Выбирай Модель", interactive=True, elem_classes="input-container" ) temperature_slider = gr.Slider( minimum=0.0, maximum=1.0, step=0.1, value=0.9, label="Температура" ) num_variants_slider = gr.Slider( minimum=1, maximum=5, step=1, value=DEFAULT_NUM_AI_VARIANTS, label="Количество Вариантов Ответа" ) gr.Markdown("---") # Разделитель submit_button = gr.Button("Спросить (Shift+Enter)", variant="primary", elem_classes=["custom-button", "submit-button"]) regenerate_button = gr.Button("🔄 Переспросить", variant="secondary", elem_classes=["custom-button", "regenerate-button"]) clear_button = gr.Button("🗑️ Сбросить", variant="stop", elem_classes=["custom-button", "clear-button"]) # --- 6. Логика обработчиков событий --- generation_inputs = [chatbot, model_selector, temperature_slider, num_variants_slider] prompt_input.submit( fn=add_user_message_to_history, inputs=[prompt_input, chatbot], outputs=[prompt_input, chatbot], queue=False ).then( fn=respond, inputs=generation_inputs, outputs=[chatbot] ) submit_button.click( fn=add_user_message_to_history, inputs=[prompt_input, chatbot], outputs=[prompt_input, chatbot], queue=False ).then( fn=respond, inputs=generation_inputs, outputs=[chatbot] ) regenerate_button.click( fn=regenerate_last_response, inputs=[chatbot], outputs=[chatbot], queue=False ).then( fn=respond, inputs=generation_inputs, outputs=[chatbot] ) clear_button.click( fn=lambda: ["", []], inputs=[], outputs=[prompt_input, chatbot], queue=False ) interface.queue().launch()