|
@@ -402,25 +402,25 @@ class BedrockLargeLanguageModel(LargeLanguageModel):
|
|
|
:param credentials: model credentials
|
|
|
:return:
|
|
|
"""
|
|
|
-
|
|
|
- if "anthropic.claude-3" in model:
|
|
|
- try:
|
|
|
- self._invoke_claude(model=model,
|
|
|
- credentials=credentials,
|
|
|
- prompt_messages=[{"role": "user", "content": "ping"}],
|
|
|
- model_parameters={},
|
|
|
- stop=None,
|
|
|
- stream=False)
|
|
|
-
|
|
|
- except Exception as ex:
|
|
|
- raise CredentialsValidateFailedError(str(ex))
|
|
|
-
|
|
|
+ required_params = {}
|
|
|
+ if "anthropic" in model:
|
|
|
+ required_params = {
|
|
|
+ "max_tokens": 32,
|
|
|
+ }
|
|
|
+ elif "ai21" in model:
|
|
|
+ # ValidationException: Malformed input request: #/temperature: expected type: Number, found: Null#/maxTokens: expected type: Integer, found: Null#/topP: expected type: Number, found: Null, please reformat your input and try again.
|
|
|
+ required_params = {
|
|
|
+ "temperature": 0.7,
|
|
|
+ "topP": 0.9,
|
|
|
+ "maxTokens": 32,
|
|
|
+ }
|
|
|
+
|
|
|
try:
|
|
|
ping_message = UserPromptMessage(content="ping")
|
|
|
- self._generate(model=model,
|
|
|
+ self._invoke(model=model,
|
|
|
credentials=credentials,
|
|
|
prompt_messages=[ping_message],
|
|
|
- model_parameters={},
|
|
|
+ model_parameters=required_params,
|
|
|
stream=False)
|
|
|
|
|
|
except ClientError as ex:
|