Browse Source

refactor: Remove unused code in large_language_model.py (#5433)

-LAN- 10 months ago
parent
commit
142dc0afd7

+ 0 - 31
api/core/model_runtime/model_providers/__base/large_language_model.py

@@ -489,37 +489,6 @@ if you are not sure about the structure.
         """Cut off the text as soon as any stop words occur."""
         """Cut off the text as soon as any stop words occur."""
         return re.split("|".join(stop), text, maxsplit=1)[0]
         return re.split("|".join(stop), text, maxsplit=1)[0]
 
 
-    def _llm_result_to_stream(self, result: LLMResult) -> Generator:
-        """
-from typing_extensions import deprecated
-        Transform llm result to stream
-
-        :param result: llm result
-        :return: stream
-        """
-        index = 0
-
-        tool_calls = result.message.tool_calls
-
-        for word in result.message.content:
-            assistant_prompt_message = AssistantPromptMessage(
-                content=word,
-                tool_calls=tool_calls if index == (len(result.message.content) - 1) else []
-            )
-
-            yield LLMResultChunk(
-                model=result.model,
-                prompt_messages=result.prompt_messages,
-                system_fingerprint=result.system_fingerprint,
-                delta=LLMResultChunkDelta(
-                    index=index,
-                    message=assistant_prompt_message,
-                )
-            )
-
-            index += 1
-            time.sleep(0.01)
-
     def get_parameter_rules(self, model: str, credentials: dict) -> list[ParameterRule]:
     def get_parameter_rules(self, model: str, credentials: dict) -> list[ParameterRule]:
         """
         """
         Get parameter rules
         Get parameter rules

+ 0 - 5
api/tests/integration_tests/model_runtime/azure_openai/test_llm.py

@@ -156,11 +156,6 @@ def test_invoke_chat_model(setup_openai_mock):
     assert isinstance(result, LLMResult)
     assert isinstance(result, LLMResult)
     assert len(result.message.content) > 0
     assert len(result.message.content) > 0
 
 
-    for chunk in model._llm_result_to_stream(result):
-        assert isinstance(chunk, LLMResultChunk)
-        assert isinstance(chunk.delta, LLMResultChunkDelta)
-        assert isinstance(chunk.delta.message, AssistantPromptMessage)
-        assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
 
 
 @pytest.mark.parametrize('setup_openai_mock', [['chat']], indirect=True)
 @pytest.mark.parametrize('setup_openai_mock', [['chat']], indirect=True)
 def test_invoke_stream_chat_model(setup_openai_mock):
 def test_invoke_stream_chat_model(setup_openai_mock):

+ 0 - 6
api/tests/integration_tests/model_runtime/cohere/test_llm.py

@@ -136,12 +136,6 @@ def test_invoke_chat_model():
     assert isinstance(result, LLMResult)
     assert isinstance(result, LLMResult)
     assert len(result.message.content) > 0
     assert len(result.message.content) > 0
 
 
-    for chunk in model._llm_result_to_stream(result):
-        assert isinstance(chunk, LLMResultChunk)
-        assert isinstance(chunk.delta, LLMResultChunkDelta)
-        assert isinstance(chunk.delta.message, AssistantPromptMessage)
-        assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
-
 
 
 def test_invoke_stream_chat_model():
 def test_invoke_stream_chat_model():
     model = CohereLargeLanguageModel()
     model = CohereLargeLanguageModel()

+ 0 - 6
api/tests/integration_tests/model_runtime/openai/test_llm.py

@@ -156,12 +156,6 @@ def test_invoke_chat_model(setup_openai_mock):
     assert isinstance(result, LLMResult)
     assert isinstance(result, LLMResult)
     assert len(result.message.content) > 0
     assert len(result.message.content) > 0
 
 
-    for chunk in model._llm_result_to_stream(result):
-        assert isinstance(chunk, LLMResultChunk)
-        assert isinstance(chunk.delta, LLMResultChunkDelta)
-        assert isinstance(chunk.delta.message, AssistantPromptMessage)
-        assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
-
 @pytest.mark.parametrize('setup_openai_mock', [['chat']], indirect=True)
 @pytest.mark.parametrize('setup_openai_mock', [['chat']], indirect=True)
 def test_invoke_chat_model_with_vision(setup_openai_mock):
 def test_invoke_chat_model_with_vision(setup_openai_mock):
     model = OpenAILargeLanguageModel()
     model = OpenAILargeLanguageModel()