From 268e6290d0b7743f1388d69fd49a76c8fb669b52 Mon Sep 17 00:00:00 2001 From: Hongyu Shi Date: Sun, 28 Sep 2025 11:57:07 +0800 Subject: [PATCH 1/6] =?UTF-8?q?feat(deploy):=20=E6=9B=B4=E6=96=B0=20SSE=20?= =?UTF-8?q?Endpoint=20=E9=AA=8C=E8=AF=81=E9=80=BB=E8=BE=91=EF=BC=8C?= =?UTF-8?q?=E6=96=B0=E5=A2=9E=20MCP=20=E5=8D=8F=E8=AE=AE=E7=9A=84=20initia?= =?UTF-8?q?lize=20=E6=96=B9=E6=B3=95?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Hongyu Shi --- src/app/deployment/agent.py | 162 +++++++++++++++++++++++++++++------- 1 file changed, 132 insertions(+), 30 deletions(-) diff --git a/src/app/deployment/agent.py b/src/app/deployment/agent.py index 2b2c6ff..07137eb 100644 --- a/src/app/deployment/agent.py +++ b/src/app/deployment/agent.py @@ -233,7 +233,7 @@ class ApiClient: self, service_id: str, max_wait_time: int = 300, - check_interval: int = 10, + check_interval: int = 2, ) -> bool: """ 等待 MCP 服务安装完成 @@ -836,7 +836,7 @@ class AgentManager: retry_count: int = 0, ) -> tuple[bool, str]: """验证单个服务状态""" - await asyncio.sleep(2) + await asyncio.sleep(0.1) service_name = service_file.stem # 去掉 .service 后缀 @@ -1278,36 +1278,29 @@ class AgentManager: ) # 重试配置 - max_attempts = 6 # 30秒 / 5秒 = 6次 - retry_interval = 5 # 5秒重试间隔 + max_attempts = 5 # 10秒 / 2秒 = 5次 + retry_interval = 2 # 2秒重试间隔 for attempt in range(1, max_attempts + 1): - try: - # 使用流式请求,只读取响应头,避免 SSE 连接一直保持开放 - async with ( - httpx.AsyncClient(timeout=self.api_client.timeout) as client, - client.stream("GET", url, headers={"Accept": "text/event-stream"}) as response, - ): - if response.status_code == HTTP_OK: - # 验证成功 - self._report_progress( - state, - f" [green]{config.name} SSE Endpoint 验证通过[/green]", - callback, - ) - logger.info("SSE Endpoint 验证成功: %s (尝试 %d 次)", url, attempt) - return True - - logger.debug( - "SSE Endpoint 响应码非 200: %s, 状态码: %d, 尝试: %d/%d", - url, - response.status_code, - attempt, - max_attempts, - ) + # 方式1:先尝试原来的简单 GET 请求方式 + if await self._try_simple_sse_check(url, config.name, attempt, max_attempts): + self._report_progress( + state, + f" [green]{config.name} SSE Endpoint 验证通过[/green]", + callback, + ) + logger.info("SSE Endpoint 简单验证成功: %s (尝试 %d 次)", url, attempt) + return True - except (httpx.RequestError, httpx.HTTPStatusError) as e: - logger.debug("SSE Endpoint 连接失败: %s, 错误: %s, 尝试: %d/%d", url, e, attempt, max_attempts) + # 方式2:如果简单方式失败,尝试 MCP 协议 initialize 方法 + if await self._try_mcp_initialize_check(url, config.name, attempt, max_attempts): + self._report_progress( + state, + f" [green]{config.name} SSE Endpoint 验证通过[/green]", + callback, + ) + logger.info("SSE Endpoint MCP 协议验证成功: %s (尝试 %d 次)", url, attempt) + return True # 如果还有重试机会,等待后继续 if attempt < max_attempts: @@ -1316,7 +1309,7 @@ class AgentManager: # 所有尝试都失败了 self._report_progress( state, - f" [red]{config.name} SSE Endpoint 验证失败: 3分钟内无法连接[/red]", + f" [red]{config.name} SSE Endpoint 验证失败: 30秒内无法连接[/red]", callback, ) logger.error( @@ -1326,3 +1319,112 @@ class AgentManager: max_attempts * retry_interval, ) return False + + async def _try_simple_sse_check( + self, + url: str, + config_name: str, + attempt: int, + max_attempts: int, + ) -> bool: + """尝试简单的 SSE 检查(原来的方式)""" + try: + # 使用流式请求,只读取响应头,避免 SSE 连接一直保持开放 + async with ( + httpx.AsyncClient(timeout=self.api_client.timeout) as client, + client.stream("GET", url, headers={"Accept": "text/event-stream"}) as response, + ): + if response.status_code == HTTP_OK: + logger.debug("SSE Endpoint 简单检查成功: %s (尝试 %d 次)", url, attempt) + return True + + logger.debug( + "SSE Endpoint 简单检查响应码非 200: %s, 状态码: %d, 尝试: %d/%d", + url, + response.status_code, + attempt, + max_attempts, + ) + + except (httpx.RequestError, httpx.HTTPStatusError) as e: + logger.debug("SSE Endpoint 简单检查连接失败: %s, 错误: %s, 尝试: %d/%d", url, e, attempt, max_attempts) + + return False + + async def _try_mcp_initialize_check( + self, + url: str, + config_name: str, + attempt: int, + max_attempts: int, + ) -> bool: + """尝试 MCP 协议的 initialize 检查""" + # MCP 协议初始化请求负载 + mcp_payload = { + "jsonrpc": "2.0", + "id": "health-check", + "method": "initialize", + "params": { + "protocolVersion": "2024-11-05", + "capabilities": {}, + "clientInfo": { + "name": "openEuler Intelligence", + "version": "1.0", + }, + }, + } + + headers = { + "Content-Type": "application/json", + "Accept": "application/json,text/event-stream", + "MCP-Protocol-Version": "2024-11-05", + } + + try: + async with httpx.AsyncClient(timeout=self.api_client.timeout) as client: + response = await client.post(url, json=mcp_payload, headers=headers) + + if response.status_code == HTTP_OK: + # 尝试解析 SSE 响应,确保是有效的 MCP JSON-RPC 响应 + try: + response_text = response.text + + # 检查是否是 SSE 格式的响应 + if "event: message" in response_text and "data: " in response_text: + logger.debug("SSE Endpoint MCP 协议检查成功: %s (尝试 %d 次)", url, attempt) + return True + + # 限制日志输出长度,避免过长的响应内容 + max_log_length = 100 + truncated_response = ( + response_text[:max_log_length] + "..." + if len(response_text) > max_log_length + else response_text + ) + logger.debug( + "SSE Endpoint MCP 响应格式异常: %s, 响应: %s, 尝试: %d/%d", + url, + truncated_response, + attempt, + max_attempts, + ) + except json.JSONDecodeError: + logger.debug( + "SSE Endpoint MCP 响应非 JSON 格式: %s, 尝试: %d/%d", + url, + attempt, + max_attempts, + ) + else: + logger.debug( + "SSE Endpoint MCP 响应码非 200: %s, 状态码: %d, 尝试: %d/%d", + url, + response.status_code, + attempt, + max_attempts, + ) + + except (httpx.RequestError, httpx.HTTPStatusError) as e: + logger.debug("SSE Endpoint MCP 连接失败: %s, 错误: %s, 尝试: %d/%d", url, e, attempt, max_attempts) + + return False -- Gitee From 9205cf15cd9c74e1238da0df6161a2f48736b5e2 Mon Sep 17 00:00:00 2001 From: Hongyu Shi Date: Mon, 29 Sep 2025 16:16:51 +0800 Subject: [PATCH 2/6] =?UTF-8?q?fix:=20=E7=A1=AE=E4=BF=9D=E5=9C=A8=E5=88=9D?= =?UTF-8?q?=E5=A7=8B=E5=8C=96=E5=92=8C=E5=88=B7=E6=96=B0=E6=97=B6=E6=AD=A3?= =?UTF-8?q?=E7=A1=AE=E4=BF=9D=E5=AD=98=E5=92=8C=E6=81=A2=E5=A4=8D=E6=99=BA?= =?UTF-8?q?=E8=83=BD=E4=BD=93=E7=8A=B6=E6=80=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Hongyu Shi --- src/app/settings.py | 8 ++++++++ src/app/tui.py | 27 +++++++++++++++++++++++++-- src/backend/hermes/client.py | 25 +++++++++++-------------- 3 files changed, 44 insertions(+), 16 deletions(-) diff --git a/src/app/settings.py b/src/app/settings.py index d20048c..6347f85 100644 --- a/src/app/settings.py +++ b/src/app/settings.py @@ -466,6 +466,11 @@ class SettingsScreen(ModalScreen): base_url_input = self.query_one("#base-url", Input) api_key_input = self.query_one("#api-key", Input) + # 保存当前智能体状态(如果是Hermes客户端) + current_agent_id = "" + if isinstance(self.llm_client, HermesChatClient): + current_agent_id = getattr(self.llm_client, "current_agent_id", "") + if self.backend == Backend.OPENAI: # 获取模型输入值,如果输入框不存在则使用当前选择的模型 try: @@ -484,6 +489,9 @@ class SettingsScreen(ModalScreen): base_url=base_url_input.value, auth_token=api_key_input.value, ) + # 恢复智能体状态 + if current_agent_id: + self.llm_client.set_current_agent(current_agent_id) async def _toggle_mcp_authorization_async(self) -> None: """异步切换 MCP 工具授权模式""" diff --git a/src/app/tui.py b/src/app/tui.py index 419a8eb..82832c0 100644 --- a/src/app/tui.py +++ b/src/app/tui.py @@ -399,17 +399,40 @@ class IntelligentTerminal(App): if self._llm_client is None: self._llm_client = BackendFactory.create_client(self.config_manager) + # 初始化时设置智能体状态 + if (self.current_agent and self.current_agent[0] and + isinstance(self._llm_client, HermesChatClient)): + self._llm_client.set_current_agent(self.current_agent[0]) + # 为 Hermes 客户端设置 MCP 事件处理器以支持 MCP 交互 if isinstance(self._llm_client, HermesChatClient): mcp_handler = TUIMCPEventHandler(self, self._llm_client) self._llm_client.set_mcp_handler(mcp_handler) + # 确保智能体状态同步 + if self.current_agent and self.current_agent[0]: + current_client_agent = getattr(self._llm_client, "current_agent_id", "") + if current_client_agent != self.current_agent[0]: + self._llm_client.set_current_agent(self.current_agent[0]) + return self._llm_client def refresh_llm_client(self) -> None: """刷新 LLM 客户端实例,用于配置更改后重新创建客户端""" + # 保存当前智能体状态 + current_agent_id = self.current_agent[0] if self.current_agent else "" + self._llm_client = BackendFactory.create_client(self.config_manager) + # 恢复智能体状态到新的客户端 + if current_agent_id and isinstance(self._llm_client, HermesChatClient): + self._llm_client.set_current_agent(current_agent_id) + + # 为 Hermes 客户端设置 MCP 事件处理器 + if isinstance(self._llm_client, HermesChatClient): + mcp_handler = TUIMCPEventHandler(self, self._llm_client) + self._llm_client.set_mcp_handler(mcp_handler) + # 后端切换时重新初始化智能体状态 self._reinitialize_agent_state() @@ -1057,8 +1080,8 @@ class IntelligentTerminal(App): app_id, name = selected_agent # 设置智能体到客户端 - if hasattr(llm_client, "set_current_agent"): - llm_client.set_current_agent(app_id) # type: ignore[attr-defined] + if isinstance(llm_client, HermesChatClient): + llm_client.set_current_agent(app_id) dialog = AgentSelectionDialog(agent_list, on_agent_selected, self.current_agent) self.push_screen(dialog) diff --git a/src/backend/hermes/client.py b/src/backend/hermes/client.py index fb8bfae..49196f6 100644 --- a/src/backend/hermes/client.py +++ b/src/backend/hermes/client.py @@ -40,6 +40,9 @@ class HermesChatClient(LLMClientBase): """初始化 Hermes Chat API 客户端""" self.logger = get_logger(__name__) + self.current_agent_id: str = "" # 当前选择的智能体 ID + self.current_task_id: str = "" # 当前正在运行的任务 ID + # HTTP 管理器 - 立即初始化 self.http_manager = HermesHttpManager(base_url, auth_token) @@ -50,12 +53,6 @@ class HermesChatClient(LLMClientBase): self._conversation_manager: HermesConversationManager | None = None self._stream_processor: HermesStreamProcessor | None = None - # 当前选择的智能体ID - self._current_agent_id: str = "" - - # 当前正在运行的任务ID(用于停止请求) - self._current_task_id: str = "" - # MCP 事件处理器(可选) self._mcp_handler: MCPEventHandler | None = None @@ -108,7 +105,7 @@ class HermesChatClient(LLMClientBase): agent_id: 智能体ID,空字符串表示不使用智能体 """ - self._current_agent_id = agent_id + self.current_agent_id = agent_id self.logger.info("设置当前智能体ID: %s", agent_id or "无智能体") def reset_conversation(self) -> None: @@ -148,7 +145,7 @@ class HermesChatClient(LLMClientBase): self.logger.info("使用会话ID: %s", conversation_id) # 创建聊天请求 - app = HermesApp(self._current_agent_id) + app = HermesApp(self.current_agent_id) request = HermesChatRequest( app=app, conversation_id=conversation_id, @@ -422,15 +419,15 @@ class HermesChatClient(LLMClientBase): def _handle_task_id(self, event: HermesStreamEvent) -> None: """处理事件中的任务ID""" task_id = event.get_task_id() - if task_id and not self._current_task_id: - self._current_task_id = task_id + if task_id and not self.current_task_id: + self.current_task_id = task_id self.logger.debug("设置当前任务ID: %s", task_id) def _cleanup_task_id(self, context: str) -> None: """清理任务ID""" - if self._current_task_id: - self.logger.debug("%s清理任务ID: %s", context, self._current_task_id) - self._current_task_id = "" + if self.current_task_id: + self.logger.debug("%s清理任务ID: %s", context, self.current_task_id) + self.current_task_id = "" async def _handle_event_content(self, event: HermesStreamEvent) -> AsyncGenerator[str, None]: """处理单个事件的内容""" @@ -459,7 +456,7 @@ class HermesChatClient(LLMClientBase): async def _stop(self) -> None: """停止当前会话""" if self._conversation_manager is not None: - await self._conversation_manager.stop_conversation(self._current_task_id) + await self._conversation_manager.stop_conversation(self.current_task_id) # 停止后清理任务ID self._cleanup_task_id("手动停止") -- Gitee From 162ba28bb4fbffb032045d35bb594dff880e6f16 Mon Sep 17 00:00:00 2001 From: Hongyu Shi Date: Mon, 29 Sep 2025 17:44:53 +0800 Subject: [PATCH 3/6] =?UTF-8?q?feat:=20=E5=A2=9E=E5=BC=BA=20SSL=20?= =?UTF-8?q?=E9=AA=8C=E8=AF=81=E6=94=AF=E6=8C=81=EF=BC=8C=E5=85=81=E8=AE=B8?= =?UTF-8?q?=E9=80=9A=E8=BF=87=E7=8E=AF=E5=A2=83=E5=8F=98=E9=87=8F=E9=85=8D?= =?UTF-8?q?=E7=BD=AE/=E8=B7=B3=E8=BF=87=20SSL=20=E6=A0=A1=E9=AA=8C?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Hongyu Shi --- src/backend/openai.py | 14 +++++++- src/tool/validators.py | 78 +++++++++++++++++++++++++++++++++++++++--- 2 files changed, 87 insertions(+), 5 deletions(-) diff --git a/src/backend/openai.py b/src/backend/openai.py index ac3b9dc..9bd80ec 100644 --- a/src/backend/openai.py +++ b/src/backend/openai.py @@ -5,10 +5,12 @@ import time from collections.abc import AsyncGenerator from typing import TYPE_CHECKING +import httpx from openai import AsyncOpenAI, OpenAIError from backend.base import LLMClientBase from log.manager import get_logger, log_api_request, log_exception +from tool.validators import should_verify_ssl if TYPE_CHECKING: from openai.types.chat import ChatCompletionMessageParam @@ -17,16 +19,26 @@ if TYPE_CHECKING: class OpenAIClient(LLMClientBase): """OpenAI 大模型客户端""" - def __init__(self, base_url: str, model: str, api_key: str = "") -> None: + def __init__( + self, + base_url: str, + model: str, + api_key: str = "", + *, + verify_ssl: bool | None = None, + ) -> None: """初始化 OpenAI 大模型客户端""" self.logger = get_logger(__name__) self.model = model self.base_url = base_url + self.verify_ssl = should_verify_ssl(verify_ssl=verify_ssl) self.client = AsyncOpenAI( api_key=api_key, base_url=base_url, + http_client=httpx.AsyncClient(verify=self.verify_ssl), ) + self.logger.debug("OpenAIClient SSL 验证状态: %s", self.verify_ssl) # 添加历史记录管理 self._conversation_history: list[ChatCompletionMessageParam] = [] diff --git a/src/tool/validators.py b/src/tool/validators.py index ae2f6c0..259bf53 100644 --- a/src/tool/validators.py +++ b/src/tool/validators.py @@ -2,11 +2,13 @@ 配置验证器 提供实际 API 调用验证配置的有效性。 +支持通过环境变量 OI_SKIP_SSL_VERIFY / OI_SSL_VERIFY 控制 SSL 校验。 """ from __future__ import annotations import json +import os from typing import Any import httpx @@ -21,13 +23,57 @@ HTTP_UNAUTHORIZED = 401 HTTP_FORBIDDEN = 403 HTTP_NOT_FOUND = 404 +TRUTHY_VALUES = {"1", "true", "yes", "on"} +FALSY_VALUES = {"0", "false", "no", "off"} +SSL_VERIFY_ENV_VAR = "OI_SSL_VERIFY" +SSL_SKIP_ENV_VAR = "OI_SKIP_SSL_VERIFY" + + +def _parse_env_flag(value: str | None) -> bool | None: + """解析环境变量中的布尔标志值""" + if value is None: + return None + + normalized = value.strip().lower() + if normalized in TRUTHY_VALUES: + return True + if normalized in FALSY_VALUES: + return False + + return None + + +def _resolve_verify_ssl(*, verify_ssl: bool | None = None) -> bool: + """根据参数和环境变量确定是否启用 SSL 校验""" + if verify_ssl is not None: + return verify_ssl + + skip_flag = _parse_env_flag(os.getenv(SSL_SKIP_ENV_VAR)) + if skip_flag is True: + return False + if skip_flag is False: + return True + + verify_flag = _parse_env_flag(os.getenv(SSL_VERIFY_ENV_VAR)) + if verify_flag is not None: + return verify_flag + + return True + + +def should_verify_ssl(*, verify_ssl: bool | None = None) -> bool: + """公开的 SSL 校验决策入口,供其他模块复用""" + return _resolve_verify_ssl(verify_ssl=verify_ssl) + class APIValidator: """API 配置验证器""" - def __init__(self) -> None: + def __init__(self, *, verify_ssl: bool | None = None) -> None: """初始化验证器""" self.logger = get_logger(__name__) + self.verify_ssl = should_verify_ssl(verify_ssl=verify_ssl) + self.logger.debug("SSL 验证状态: %s", self.verify_ssl) async def validate_llm_config( # noqa: PLR0913 self, @@ -56,7 +102,11 @@ class APIValidator: self.logger.info("开始验证 LLM 配置 - 端点: %s, 模型: %s", endpoint, model) try: - client = AsyncOpenAI(api_key=api_key, base_url=endpoint, timeout=timeout) + client = self._create_openai_client( + endpoint=endpoint, + api_key=api_key, + timeout=timeout, + ) # 测试基本对话功能 chat_valid, chat_msg = await self._test_basic_chat(client, model, max_tokens, temperature) @@ -140,6 +190,22 @@ class APIValidator: # 两种格式都失败 return False, "无法连接到 Embedding 模型服务。", {} + def _create_openai_client( + self, + *, + endpoint: str, + api_key: str, + timeout: int, + ) -> AsyncOpenAI: + """构造 AsyncOpenAI 客户端,应用统一的 SSL 校验设置""" + http_client = httpx.AsyncClient(timeout=timeout, verify=self.verify_ssl) + return AsyncOpenAI( + api_key=api_key, + base_url=endpoint, + timeout=timeout, + http_client=http_client, + ) + async def _test_basic_chat( self, client: AsyncOpenAI, @@ -493,7 +559,11 @@ FUNCTION_CALL: get_current_time() ) -> tuple[bool, str, dict[str, Any]]: """验证 OpenAI 格式的 embedding 配置""" try: - client = AsyncOpenAI(api_key=api_key, base_url=endpoint, timeout=timeout) + client = self._create_openai_client( + endpoint=endpoint, + api_key=api_key, + timeout=timeout, + ) # 测试 embedding 功能 test_text = "这是一个测试文本" @@ -537,7 +607,7 @@ FUNCTION_CALL: get_current_time() data = {"inputs": "这是一个测试文本", "normalize": True} - async with httpx.AsyncClient(timeout=timeout) as client: + async with httpx.AsyncClient(timeout=timeout, verify=self.verify_ssl) as client: response = await client.post(embed_endpoint, json=data, headers=headers) if response.status_code == HTTP_OK: -- Gitee From 99d1e6f2425ded07ce00eb92639394a0e6153047 Mon Sep 17 00:00:00 2001 From: Hongyu Shi Date: Tue, 30 Sep 2025 09:05:50 +0800 Subject: [PATCH 4/6] =?UTF-8?q?fix:=20=E5=BB=B6=E8=BF=9F=E5=AF=BC=E5=85=A5?= =?UTF-8?q?=E5=B7=A5=E5=85=B7=E6=A8=A1=E5=9D=97?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Hongyu Shi --- src/backend/openai.py | 10 ++++++++-- 1 file changed, 8 insertions(+), 2 deletions(-) diff --git a/src/backend/openai.py b/src/backend/openai.py index 9bd80ec..ba9a887 100644 --- a/src/backend/openai.py +++ b/src/backend/openai.py @@ -3,6 +3,7 @@ import asyncio import time from collections.abc import AsyncGenerator +from importlib import import_module from typing import TYPE_CHECKING import httpx @@ -10,12 +11,17 @@ from openai import AsyncOpenAI, OpenAIError from backend.base import LLMClientBase from log.manager import get_logger, log_api_request, log_exception -from tool.validators import should_verify_ssl if TYPE_CHECKING: from openai.types.chat import ChatCompletionMessageParam +def _should_verify_ssl(*, verify_ssl: bool | None = None) -> bool: + """延迟导入工具模块以决定 SSL 校验策略""" + module = import_module("tool.validators") + return module.should_verify_ssl(verify_ssl=verify_ssl) + + class OpenAIClient(LLMClientBase): """OpenAI 大模型客户端""" @@ -32,7 +38,7 @@ class OpenAIClient(LLMClientBase): self.model = model self.base_url = base_url - self.verify_ssl = should_verify_ssl(verify_ssl=verify_ssl) + self.verify_ssl = _should_verify_ssl(verify_ssl=verify_ssl) self.client = AsyncOpenAI( api_key=api_key, base_url=base_url, -- Gitee From 57bcb016d6de646cb152ba1aeebe4baaebc4b6f1 Mon Sep 17 00:00:00 2001 From: Hongyu Shi Date: Tue, 30 Sep 2025 09:14:28 +0800 Subject: [PATCH 5/6] =?UTF-8?q?chore:=20=E6=9B=B4=E6=96=B0=20RPM=20Spec?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Hongyu Shi --- distribution/linux/euler-copilot-shell.spec | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/distribution/linux/euler-copilot-shell.spec b/distribution/linux/euler-copilot-shell.spec index 37e7d90..a674dd9 100644 --- a/distribution/linux/euler-copilot-shell.spec +++ b/distribution/linux/euler-copilot-shell.spec @@ -4,7 +4,7 @@ Name: euler-copilot-shell Version: 0.10.1 -Release: 4%{?dist} +Release: 5%{?dist} Summary: openEuler Intelligence 智能命令行工具集 License: MulanPSL-2.0 URL: https://gitee.com/openeuler/euler-copilot-shell @@ -131,6 +131,9 @@ rm -f /usr/lib/openeuler-intelligence/scripts/5-resource/env.* fi %changelog +* Tue Sep 30 2025 openEuler - 0.10.1-5 +- 支持通过环境变量 OI_SKIP_SSL_VERIFY / OI_SSL_VERIFY 控制 OpenAI 客户端 SSL 验证 + * Wed Sep 17 2025 openEuler - 0.10.1-4 - 修复 Token 计算器中类型注解的兼容性问题 - 优化部署脚本中下载资源文件的逻辑 -- Gitee From 14831ef4f040dce3d827eb5694dcf3cf89f132f2 Mon Sep 17 00:00:00 2001 From: Hongyu Shi Date: Tue, 30 Sep 2025 09:31:32 +0800 Subject: [PATCH 6/6] =?UTF-8?q?fix:=20=E6=9B=B4=E6=96=B0=20OutputLine=20?= =?UTF-8?q?=E7=BB=84=E4=BB=B6=E7=9A=84=E5=86=85=E5=AE=B9=E6=9B=B4=E6=96=B0?= =?UTF-8?q?=E6=96=B9=E6=B3=95=EF=BC=8C=E9=BB=98=E8=AE=A4=E5=90=AF=E7=94=A8?= =?UTF-8?q?=E5=B8=83=E5=B1=80=E6=9B=B4=E6=96=B0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Hongyu Shi --- src/app/tui.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/app/tui.py b/src/app/tui.py index 82832c0..fcab291 100644 --- a/src/app/tui.py +++ b/src/app/tui.py @@ -106,7 +106,7 @@ class OutputLine(Static): self.add_class("command-line") self.text_content = text - def update(self, content: VisualType = "", *, layout: bool = False) -> None: + def update(self, content: VisualType = "", *, layout: bool = True) -> None: """更新组件内容,确保禁用富文本标记解析""" # 如果是字符串,更新内部存储的文本内容 if isinstance(content, str): -- Gitee