fix: Volcano engine large speech model cannot be added (#2058)
This commit is contained in:
parent
6b9dd1ce96
commit
d1f7a82661
@ -18,7 +18,8 @@ from setting.models_provider.base_model_provider import BaseModelCredential, Val
|
|||||||
|
|
||||||
|
|
||||||
class OpenAILLMModelParams(BaseForm):
|
class OpenAILLMModelParams(BaseForm):
|
||||||
temperature = forms.SliderField(TooltipLabel(_('Temperature'), _('Higher values make the output more random, while lower values make it more focused and deterministic')),
|
temperature = forms.SliderField(TooltipLabel(_('Temperature'),
|
||||||
|
_('Higher values make the output more random, while lower values make it more focused and deterministic')),
|
||||||
required=True, default_value=0.7,
|
required=True, default_value=0.7,
|
||||||
_min=0.1,
|
_min=0.1,
|
||||||
_max=1.0,
|
_max=1.0,
|
||||||
@ -26,7 +27,8 @@ class OpenAILLMModelParams(BaseForm):
|
|||||||
precision=2)
|
precision=2)
|
||||||
|
|
||||||
max_tokens = forms.SliderField(
|
max_tokens = forms.SliderField(
|
||||||
TooltipLabel(_('Output the maximum Tokens'), _('Specify the maximum number of tokens that the model can generate')),
|
TooltipLabel(_('Output the maximum Tokens'),
|
||||||
|
_('Specify the maximum number of tokens that the model can generate')),
|
||||||
required=True, default_value=800,
|
required=True, default_value=800,
|
||||||
_min=1,
|
_min=1,
|
||||||
_max=100000,
|
_max=100000,
|
||||||
@ -40,7 +42,8 @@ class OpenAILLMModelCredential(BaseForm, BaseModelCredential):
|
|||||||
raise_exception=False):
|
raise_exception=False):
|
||||||
model_type_list = provider.get_model_type_list()
|
model_type_list = provider.get_model_type_list()
|
||||||
if not any(list(filter(lambda mt: mt.get('value') == model_type, model_type_list))):
|
if not any(list(filter(lambda mt: mt.get('value') == model_type, model_type_list))):
|
||||||
raise AppApiException(ValidCode.valid_error.value, __('{model_type} Model type is not supported').format(model_type=model_type))
|
raise AppApiException(ValidCode.valid_error.value,
|
||||||
|
__('{model_type} Model type is not supported').format(model_type=model_type))
|
||||||
|
|
||||||
for key in ['api_base', 'api_key']:
|
for key in ['api_base', 'api_key']:
|
||||||
if key not in model_credential:
|
if key not in model_credential:
|
||||||
@ -51,12 +54,14 @@ class OpenAILLMModelCredential(BaseForm, BaseModelCredential):
|
|||||||
try:
|
try:
|
||||||
|
|
||||||
model = provider.get_model(model_type, model_name, model_credential, **model_params)
|
model = provider.get_model(model_type, model_name, model_credential, **model_params)
|
||||||
model.invoke([HumanMessage(content=_('Hello'))])
|
model.invoke([HumanMessage(content=__('Hello'))])
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
if isinstance(e, AppApiException):
|
if isinstance(e, AppApiException):
|
||||||
raise e
|
raise e
|
||||||
if raise_exception:
|
if raise_exception:
|
||||||
raise AppApiException(ValidCode.valid_error.value, __('Verification failed, please check whether the parameters are correct: {error}').format(error=str(e)))
|
raise AppApiException(ValidCode.valid_error.value,
|
||||||
|
__('Verification failed, please check whether the parameters are correct: {error}').format(
|
||||||
|
error=str(e)))
|
||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user