test_llm.py 5.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172
  1. import os
  2. from collections.abc import Generator
  3. from time import sleep
  4. import pytest
  5. from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
  6. from core.model_runtime.entities.message_entities import AssistantPromptMessage, SystemPromptMessage, UserPromptMessage
  7. from core.model_runtime.entities.model_entities import AIModelEntity
  8. from core.model_runtime.errors.validate import CredentialsValidateFailedError
  9. from core.model_runtime.model_providers.baichuan.llm.llm import BaichuanLanguageModel
  10. def test_predefined_models():
  11. model = BaichuanLanguageModel()
  12. model_schemas = model.predefined_models()
  13. assert len(model_schemas) >= 1
  14. assert isinstance(model_schemas[0], AIModelEntity)
  15. def test_validate_credentials_for_chat_model():
  16. sleep(3)
  17. model = BaichuanLanguageModel()
  18. with pytest.raises(CredentialsValidateFailedError):
  19. model.validate_credentials(
  20. model="baichuan2-turbo", credentials={"api_key": "invalid_key", "secret_key": "invalid_key"}
  21. )
  22. model.validate_credentials(
  23. model="baichuan2-turbo",
  24. credentials={
  25. "api_key": os.environ.get("BAICHUAN_API_KEY"),
  26. "secret_key": os.environ.get("BAICHUAN_SECRET_KEY"),
  27. },
  28. )
  29. def test_invoke_model():
  30. sleep(3)
  31. model = BaichuanLanguageModel()
  32. response = model.invoke(
  33. model="baichuan2-turbo",
  34. credentials={
  35. "api_key": os.environ.get("BAICHUAN_API_KEY"),
  36. "secret_key": os.environ.get("BAICHUAN_SECRET_KEY"),
  37. },
  38. prompt_messages=[UserPromptMessage(content="Hello World!")],
  39. model_parameters={
  40. "temperature": 0.7,
  41. "top_p": 1.0,
  42. "top_k": 1,
  43. },
  44. stop=["you"],
  45. user="abc-123",
  46. stream=False,
  47. )
  48. assert isinstance(response, LLMResult)
  49. assert len(response.message.content) > 0
  50. assert response.usage.total_tokens > 0
  51. def test_invoke_model_with_system_message():
  52. sleep(3)
  53. model = BaichuanLanguageModel()
  54. response = model.invoke(
  55. model="baichuan2-turbo",
  56. credentials={
  57. "api_key": os.environ.get("BAICHUAN_API_KEY"),
  58. "secret_key": os.environ.get("BAICHUAN_SECRET_KEY"),
  59. },
  60. prompt_messages=[
  61. SystemPromptMessage(content="请记住你是Kasumi。"),
  62. UserPromptMessage(content="现在告诉我你是谁?"),
  63. ],
  64. model_parameters={
  65. "temperature": 0.7,
  66. "top_p": 1.0,
  67. "top_k": 1,
  68. },
  69. stop=["you"],
  70. user="abc-123",
  71. stream=False,
  72. )
  73. assert isinstance(response, LLMResult)
  74. assert len(response.message.content) > 0
  75. assert response.usage.total_tokens > 0
  76. def test_invoke_stream_model():
  77. sleep(3)
  78. model = BaichuanLanguageModel()
  79. response = model.invoke(
  80. model="baichuan2-turbo",
  81. credentials={
  82. "api_key": os.environ.get("BAICHUAN_API_KEY"),
  83. "secret_key": os.environ.get("BAICHUAN_SECRET_KEY"),
  84. },
  85. prompt_messages=[UserPromptMessage(content="Hello World!")],
  86. model_parameters={
  87. "temperature": 0.7,
  88. "top_p": 1.0,
  89. "top_k": 1,
  90. },
  91. stop=["you"],
  92. stream=True,
  93. user="abc-123",
  94. )
  95. assert isinstance(response, Generator)
  96. for chunk in response:
  97. assert isinstance(chunk, LLMResultChunk)
  98. assert isinstance(chunk.delta, LLMResultChunkDelta)
  99. assert isinstance(chunk.delta.message, AssistantPromptMessage)
  100. assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
  101. def test_invoke_with_search():
  102. sleep(3)
  103. model = BaichuanLanguageModel()
  104. response = model.invoke(
  105. model="baichuan2-turbo",
  106. credentials={
  107. "api_key": os.environ.get("BAICHUAN_API_KEY"),
  108. "secret_key": os.environ.get("BAICHUAN_SECRET_KEY"),
  109. },
  110. prompt_messages=[UserPromptMessage(content="北京今天的天气怎么样")],
  111. model_parameters={
  112. "temperature": 0.7,
  113. "top_p": 1.0,
  114. "top_k": 1,
  115. "with_search_enhance": True,
  116. },
  117. stop=["you"],
  118. stream=True,
  119. user="abc-123",
  120. )
  121. assert isinstance(response, Generator)
  122. total_message = ""
  123. for chunk in response:
  124. assert isinstance(chunk, LLMResultChunk)
  125. assert isinstance(chunk.delta, LLMResultChunkDelta)
  126. assert isinstance(chunk.delta.message, AssistantPromptMessage)
  127. assert len(chunk.delta.message.content) > 0 if not chunk.delta.finish_reason else True
  128. total_message += chunk.delta.message.content
  129. assert "不" not in total_message
  130. def test_get_num_tokens():
  131. sleep(3)
  132. model = BaichuanLanguageModel()
  133. response = model.get_num_tokens(
  134. model="baichuan2-turbo",
  135. credentials={
  136. "api_key": os.environ.get("BAICHUAN_API_KEY"),
  137. "secret_key": os.environ.get("BAICHUAN_SECRET_KEY"),
  138. },
  139. prompt_messages=[UserPromptMessage(content="Hello World!")],
  140. tools=[],
  141. )
  142. assert isinstance(response, int)
  143. assert response == 9