test_llm.py 7.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272
  1. import os
  2. from typing import Generator
  3. import pytest
  4. from core.model_runtime.entities.llm_entities import LLMResult, LLMResultChunk, LLMResultChunkDelta
  5. from core.model_runtime.entities.message_entities import (AssistantPromptMessage, SystemPromptMessage,
  6. UserPromptMessage)
  7. from core.model_runtime.errors.validate import CredentialsValidateFailedError
  8. from core.model_runtime.model_providers.cohere.llm.llm import CohereLargeLanguageModel
  9. def test_validate_credentials_for_chat_model():
  10. model = CohereLargeLanguageModel()
  11. with pytest.raises(CredentialsValidateFailedError):
  12. model.validate_credentials(
  13. model='command-light-chat',
  14. credentials={
  15. 'api_key': 'invalid_key'
  16. }
  17. )
  18. model.validate_credentials(
  19. model='command-light-chat',
  20. credentials={
  21. 'api_key': os.environ.get('COHERE_API_KEY')
  22. }
  23. )
  24. def test_validate_credentials_for_completion_model():
  25. model = CohereLargeLanguageModel()
  26. with pytest.raises(CredentialsValidateFailedError):
  27. model.validate_credentials(
  28. model='command-light',
  29. credentials={
  30. 'api_key': 'invalid_key'
  31. }
  32. )
  33. model.validate_credentials(
  34. model='command-light',
  35. credentials={
  36. 'api_key': os.environ.get('COHERE_API_KEY')
  37. }
  38. )
  39. def test_invoke_completion_model():
  40. model = CohereLargeLanguageModel()
  41. credentials = {
  42. 'api_key': os.environ.get('COHERE_API_KEY')
  43. }
  44. result = model.invoke(
  45. model='command-light',
  46. credentials=credentials,
  47. prompt_messages=[
  48. UserPromptMessage(
  49. content='Hello World!'
  50. )
  51. ],
  52. model_parameters={
  53. 'temperature': 0.0,
  54. 'max_tokens': 1
  55. },
  56. stream=False,
  57. user="abc-123"
  58. )
  59. assert isinstance(result, LLMResult)
  60. assert len(result.message.content) > 0
  61. assert model._num_tokens_from_string('command-light', credentials, result.message.content) == 1
  62. def test_invoke_stream_completion_model():
  63. model = CohereLargeLanguageModel()
  64. result = model.invoke(
  65. model='command-light',
  66. credentials={
  67. 'api_key': os.environ.get('COHERE_API_KEY')
  68. },
  69. prompt_messages=[
  70. UserPromptMessage(
  71. content='Hello World!'
  72. )
  73. ],
  74. model_parameters={
  75. 'temperature': 0.0,
  76. 'max_tokens': 100
  77. },
  78. stream=True,
  79. user="abc-123"
  80. )
  81. assert isinstance(result, Generator)
  82. for chunk in result:
  83. assert isinstance(chunk, LLMResultChunk)
  84. assert isinstance(chunk.delta, LLMResultChunkDelta)
  85. assert isinstance(chunk.delta.message, AssistantPromptMessage)
  86. assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
  87. def test_invoke_chat_model():
  88. model = CohereLargeLanguageModel()
  89. result = model.invoke(
  90. model='command-light-chat',
  91. credentials={
  92. 'api_key': os.environ.get('COHERE_API_KEY')
  93. },
  94. prompt_messages=[
  95. SystemPromptMessage(
  96. content='You are a helpful AI assistant.',
  97. ),
  98. UserPromptMessage(
  99. content='Hello World!'
  100. )
  101. ],
  102. model_parameters={
  103. 'temperature': 0.0,
  104. 'p': 0.99,
  105. 'presence_penalty': 0.0,
  106. 'frequency_penalty': 0.0,
  107. 'max_tokens': 10
  108. },
  109. stop=['How'],
  110. stream=False,
  111. user="abc-123"
  112. )
  113. assert isinstance(result, LLMResult)
  114. assert len(result.message.content) > 0
  115. for chunk in model._llm_result_to_stream(result):
  116. assert isinstance(chunk, LLMResultChunk)
  117. assert isinstance(chunk.delta, LLMResultChunkDelta)
  118. assert isinstance(chunk.delta.message, AssistantPromptMessage)
  119. assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
  120. def test_invoke_stream_chat_model():
  121. model = CohereLargeLanguageModel()
  122. result = model.invoke(
  123. model='command-light-chat',
  124. credentials={
  125. 'api_key': os.environ.get('COHERE_API_KEY')
  126. },
  127. prompt_messages=[
  128. SystemPromptMessage(
  129. content='You are a helpful AI assistant.',
  130. ),
  131. UserPromptMessage(
  132. content='Hello World!'
  133. )
  134. ],
  135. model_parameters={
  136. 'temperature': 0.0,
  137. 'max_tokens': 100
  138. },
  139. stream=True,
  140. user="abc-123"
  141. )
  142. assert isinstance(result, Generator)
  143. for chunk in result:
  144. assert isinstance(chunk, LLMResultChunk)
  145. assert isinstance(chunk.delta, LLMResultChunkDelta)
  146. assert isinstance(chunk.delta.message, AssistantPromptMessage)
  147. assert len(chunk.delta.message.content) > 0 if chunk.delta.finish_reason is None else True
  148. if chunk.delta.finish_reason is not None:
  149. assert chunk.delta.usage is not None
  150. assert chunk.delta.usage.completion_tokens > 0
  151. def test_get_num_tokens():
  152. model = CohereLargeLanguageModel()
  153. num_tokens = model.get_num_tokens(
  154. model='command-light',
  155. credentials={
  156. 'api_key': os.environ.get('COHERE_API_KEY')
  157. },
  158. prompt_messages=[
  159. UserPromptMessage(
  160. content='Hello World!'
  161. )
  162. ]
  163. )
  164. assert num_tokens == 3
  165. num_tokens = model.get_num_tokens(
  166. model='command-light-chat',
  167. credentials={
  168. 'api_key': os.environ.get('COHERE_API_KEY')
  169. },
  170. prompt_messages=[
  171. SystemPromptMessage(
  172. content='You are a helpful AI assistant.',
  173. ),
  174. UserPromptMessage(
  175. content='Hello World!'
  176. )
  177. ]
  178. )
  179. assert num_tokens == 15
  180. def test_fine_tuned_model():
  181. model = CohereLargeLanguageModel()
  182. # test invoke
  183. result = model.invoke(
  184. model='85ec47be-6139-4f75-a4be-0f0ec1ef115c-ft',
  185. credentials={
  186. 'api_key': os.environ.get('COHERE_API_KEY'),
  187. 'mode': 'completion'
  188. },
  189. prompt_messages=[
  190. SystemPromptMessage(
  191. content='You are a helpful AI assistant.',
  192. ),
  193. UserPromptMessage(
  194. content='Hello World!'
  195. )
  196. ],
  197. model_parameters={
  198. 'temperature': 0.0,
  199. 'max_tokens': 100
  200. },
  201. stream=False,
  202. user="abc-123"
  203. )
  204. assert isinstance(result, LLMResult)
  205. def test_fine_tuned_chat_model():
  206. model = CohereLargeLanguageModel()
  207. # test invoke
  208. result = model.invoke(
  209. model='94f2d55a-4c79-4c00-bde4-23962e74b170-ft',
  210. credentials={
  211. 'api_key': os.environ.get('COHERE_API_KEY'),
  212. 'mode': 'chat'
  213. },
  214. prompt_messages=[
  215. SystemPromptMessage(
  216. content='You are a helpful AI assistant.',
  217. ),
  218. UserPromptMessage(
  219. content='Hello World!'
  220. )
  221. ],
  222. model_parameters={
  223. 'temperature': 0.0,
  224. 'max_tokens': 100
  225. },
  226. stream=False,
  227. user="abc-123"
  228. )
  229. assert isinstance(result, LLMResult)