openai: Handle BAD_GETAWAY, be more resilient to failure

This commit is contained in:
Louis Dureuil 2024-03-05 12:18:54 +01:00
parent f4a6261dea
commit 36d17110d8
No known key found for this signature in database
2 changed files with 10 additions and 22 deletions

View File

@ -59,8 +59,8 @@ pub enum EmbedErrorKind {
OpenAiAuth(OpenAiError), OpenAiAuth(OpenAiError),
#[error("sent too many requests to OpenAI: {0}")] #[error("sent too many requests to OpenAI: {0}")]
OpenAiTooManyRequests(OpenAiError), OpenAiTooManyRequests(OpenAiError),
#[error("received internal error from OpenAI: {0}")] #[error("received internal error from OpenAI: {0:?}")]
OpenAiInternalServerError(OpenAiError), OpenAiInternalServerError(Option<OpenAiError>),
#[error("sent too many tokens in a request to OpenAI: {0}")] #[error("sent too many tokens in a request to OpenAI: {0}")]
OpenAiTooManyTokens(OpenAiError), OpenAiTooManyTokens(OpenAiError),
#[error("received unhandled HTTP status code {0} from OpenAI")] #[error("received unhandled HTTP status code {0} from OpenAI")]
@ -106,7 +106,7 @@ impl EmbedError {
Self { kind: EmbedErrorKind::OpenAiTooManyRequests(inner), fault: FaultSource::Runtime } Self { kind: EmbedErrorKind::OpenAiTooManyRequests(inner), fault: FaultSource::Runtime }
} }
pub(crate) fn openai_internal_server_error(inner: OpenAiError) -> EmbedError { pub(crate) fn openai_internal_server_error(inner: Option<OpenAiError>) -> EmbedError {
Self { kind: EmbedErrorKind::OpenAiInternalServerError(inner), fault: FaultSource::Runtime } Self { kind: EmbedErrorKind::OpenAiInternalServerError(inner), fault: FaultSource::Runtime }
} }

View File

@ -220,24 +220,12 @@ impl Embedder {
error_response.error, error_response.error,
))); )));
} }
StatusCode::INTERNAL_SERVER_ERROR => { StatusCode::INTERNAL_SERVER_ERROR
let error_response: OpenAiErrorResponse = response | StatusCode::BAD_GATEWAY
.json() | StatusCode::SERVICE_UNAVAILABLE => {
.await let error_response: Result<OpenAiErrorResponse, _> = response.json().await;
.map_err(EmbedError::openai_unexpected)
.map_err(Retry::retry_later)?;
return Err(Retry::retry_later(EmbedError::openai_internal_server_error( return Err(Retry::retry_later(EmbedError::openai_internal_server_error(
error_response.error, error_response.ok().map(|error_response| error_response.error),
)));
}
StatusCode::SERVICE_UNAVAILABLE => {
let error_response: OpenAiErrorResponse = response
.json()
.await
.map_err(EmbedError::openai_unexpected)
.map_err(Retry::retry_later)?;
return Err(Retry::retry_later(EmbedError::openai_internal_server_error(
error_response.error,
))); )));
} }
StatusCode::BAD_REQUEST => { StatusCode::BAD_REQUEST => {
@ -248,14 +236,14 @@ impl Embedder {
.map_err(EmbedError::openai_unexpected) .map_err(EmbedError::openai_unexpected)
.map_err(Retry::retry_later)?; .map_err(Retry::retry_later)?;
tracing::warn!("OpenAI: input was too long, retrying on tokenized version. For best performance, limit the size of your prompt."); tracing::warn!("OpenAI: received `BAD_REQUEST`. Input was maybe too long, retrying on tokenized version. For best performance, limit the size of your prompt.");
return Err(Retry::retry_tokenized(EmbedError::openai_too_many_tokens( return Err(Retry::retry_tokenized(EmbedError::openai_too_many_tokens(
error_response.error, error_response.error,
))); )));
} }
code => { code => {
return Err(Retry::give_up(EmbedError::openai_unhandled_status_code( return Err(Retry::retry_later(EmbedError::openai_unhandled_status_code(
code.as_u16(), code.as_u16(),
))); )));
} }