Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

ref: Propagate traceback in exception logs #4030

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions src/backend/base/langflow/alembic/env.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,8 +72,8 @@ def run_migrations_online() -> None:
initialize_settings_service()
service_manager.register_factory(DatabaseServiceFactory())
connectable = get_db_service().engine
except Exception as e:
logger.error(f"Error getting database engine: {e}")
except Exception:
logger.exception("Error getting database engine")
url = os.getenv("LANGFLOW_DATABASE_URL")
url = url or config.get_main_option("sqlalchemy.url")
if url:
Expand Down
4 changes: 2 additions & 2 deletions src/backend/base/langflow/api/v1/callback.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,8 +74,8 @@ async def on_tool_end(self, output: str, **kwargs: Any) -> Any:
# This is to emulate the stream of tokens
for resp in resps:
await self.socketio_service.emit_token(to=self.sid, data=resp.model_dump())
except Exception as exc:
logger.error(f"Error sending response: {exc}")
except Exception:
logger.exception("Error sending response")

async def on_tool_error(
self,
Expand Down
24 changes: 10 additions & 14 deletions src/backend/base/langflow/api/v1/chat.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,8 +60,8 @@ async def try_running_celery_task(vertex, user_id):

task = build_vertex.delay(vertex)
vertex.task_id = task.id
except Exception as exc:
logger.debug(f"Error running task in celery: {exc}")
except Exception:
logger.opt(exception=True).debug("Error running task in celery")
vertex.task_id = None
await vertex.build(user_id=user_id)
return vertex
Expand Down Expand Up @@ -135,8 +135,7 @@ async def retrieve_vertices_order(
)
if "stream or streaming set to True" in str(exc):
raise HTTPException(status_code=400, detail=str(exc)) from exc
logger.error(f"Error checking build status: {exc}")
logger.exception(exc)
logger.exception("Error checking build status")
raise HTTPException(status_code=500, detail=str(exc)) from exc


Expand Down Expand Up @@ -169,7 +168,7 @@ async def build_graph_and_get_order() -> tuple[list[str], list[str], Graph]:
try:
first_layer = graph.sort_vertices(stop_component_id, start_component_id)
except Exception as exc:
logger.error(exc)
logger.exception(exc)
first_layer = graph.sort_vertices()
else:
first_layer = graph.sort_vertices()
Expand Down Expand Up @@ -203,8 +202,7 @@ async def build_graph_and_get_order() -> tuple[list[str], list[str], Graph]:
)
if "stream or streaming set to True" in str(exc):
raise HTTPException(status_code=400, detail=str(exc)) from exc
logger.error(f"Error checking build status: {exc}")
logger.exception(exc)
logger.exception("Error checking build status")
raise HTTPException(status_code=500, detail=str(exc)) from exc

async def _build_vertex(vertex_id: str, graph: Graph, event_manager: EventManager) -> VertexBuildResponse:
Expand Down Expand Up @@ -241,7 +239,7 @@ async def _build_vertex(vertex_id: str, graph: Graph, event_manager: EventManage
tb = exc.formatted_traceback
else:
tb = traceback.format_exc()
logger.exception(f"Error building Component: {exc}")
logger.exception("Error building Component")
params = format_exception_message(exc)
message = {"errorMessage": params, "stackTrace": tb}
valid = False
Expand Down Expand Up @@ -315,8 +313,7 @@ async def _build_vertex(vertex_id: str, graph: Graph, event_manager: EventManage
componentErrorMessage=str(exc),
),
)
logger.error(f"Error building Component: \n\n{exc}")
logger.exception(exc)
logger.exception("Error building Component")
message = parse_exception(exc)
raise HTTPException(status_code=500, detail=message) from exc

Expand Down Expand Up @@ -524,7 +521,7 @@ async def build_vertex(
tb = exc.formatted_traceback
else:
tb = traceback.format_exc()
logger.exception(f"Error building Component: {exc}")
logger.exception("Error building Component")
params = format_exception_message(exc)
message = {"errorMessage": params, "stackTrace": tb}
valid = False
Expand Down Expand Up @@ -602,8 +599,7 @@ async def build_vertex(
componentErrorMessage=str(exc),
),
)
logger.error(f"Error building Component: \n\n{exc}")
logger.exception(exc)
logger.exception("Error building Component")
message = parse_exception(exc)
raise HTTPException(status_code=500, detail=message) from exc

Expand Down Expand Up @@ -695,7 +691,7 @@ async def stream_vertex():
raise ValueError(msg)

except Exception as exc:
logger.exception(f"Error building Component: {exc}")
logger.exception("Error building Component")
exc_message = parse_exception(exc)
if exc_message == "The message must be an iterator or an async iterator.":
exc_message = "This stream has already been closed."
Expand Down
15 changes: 7 additions & 8 deletions src/backend/base/langflow/api/v1/endpoints.py
Original file line number Diff line number Diff line change
Expand Up @@ -168,8 +168,8 @@ async def simple_run_flow_task(
api_key_user=api_key_user,
)

except Exception as exc:
logger.exception(f"Error running flow {flow.id} task: {exc}")
except Exception:
logger.exception(f"Error running flow {flow.id} task")


@router.post("/run/{flow_id_or_name}", response_model=RunResponse, response_model_exclude_none=True)
Expand Down Expand Up @@ -280,7 +280,7 @@ async def simplified_run_flow(
logger.exception(exc)
raise APIException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, exception=exc, flow=flow) from exc
except InvalidChatInputException as exc:
logger.error(exc)
logger.exception(exc)
raise HTTPException(status_code=status.HTTP_400_BAD_REQUEST, detail=str(exc)) from exc
except Exception as exc:
logger.exception(exc)
Expand All @@ -293,7 +293,6 @@ async def simplified_run_flow(
runErrorMessage=str(exc),
),
)
logger.exception(exc)
raise APIException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, exception=exc, flow=flow) from exc


Expand Down Expand Up @@ -478,15 +477,15 @@ async def experimental_run_flow(
except sa.exc.StatementError as exc:
# StatementError('(builtins.ValueError) badly formed hexadecimal UUID string')
if "badly formed hexadecimal UUID string" in str(exc):
logger.error(f"Flow ID {flow_id_str} is not a valid UUID")
logger.exception(f"Flow ID {flow_id_str} is not a valid UUID")
# This means the Flow ID is not a valid UUID which means it can't find the flow
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
except ValueError as exc:
if f"Flow {flow_id_str} not found" in str(exc):
logger.error(f"Flow {flow_id_str} not found")
logger.exception(f"Flow {flow_id_str} not found")
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
if f"Session {session_id} not found" in str(exc):
logger.error(f"Session {session_id} not found")
logger.exception(f"Session {session_id} not found")
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc)) from exc
logger.exception(exc)
raise HTTPException(status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=str(exc)) from exc
Expand Down Expand Up @@ -573,7 +572,7 @@ async def create_upload_file(
file_path=file_path,
)
except Exception as exc:
logger.error(f"Error saving file: {exc}")
logger.exception("Error saving file")
raise HTTPException(status_code=500, detail=str(exc)) from exc


Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/api/v1/flows.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ def read_flows(
if example_flow.id not in flow_ids:
flows.append(example_flow) # type: ignore
except Exception as e:
logger.error(e)
logger.exception(e)

if remove_example_flows:
flows = [flow for flow in flows if flow.folder_id != folder.id]
Expand Down
2 changes: 1 addition & 1 deletion src/backend/base/langflow/api/v1/starter_projects.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,5 +19,5 @@ def get_starter_projects(
try:
return get_starter_projects_dump()
except Exception as exc:
logger.error(exc)
logger.exception(exc)
raise HTTPException(status_code=500, detail=str(exc)) from exc
4 changes: 2 additions & 2 deletions src/backend/base/langflow/api/v1/store.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,8 +42,8 @@ def get_optional_user_store_api_key(
return None
try:
return auth_utils.decrypt_api_key(user.store_api_key, settings_service)
except Exception as e:
logger.error(f"Failed to decrypt API key: {e}")
except Exception:
logger.exception("Failed to decrypt API key")
return user.store_api_key


Expand Down
4 changes: 2 additions & 2 deletions src/backend/base/langflow/base/prompts/api_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -133,9 +133,9 @@ def validate_prompt(prompt_template: str, silent_errors: bool = False) -> list[s
try:
PromptTemplate(template=prompt_template, input_variables=input_variables)
except Exception as exc:
logger.error(f"Invalid prompt: {exc}")
msg = f"Invalid prompt: {exc}"
logger.exception(msg)
if not silent_errors:
msg = f"Invalid prompt: {exc}"
raise ValueError(msg) from exc

return input_variables
Expand Down
4 changes: 2 additions & 2 deletions src/backend/base/langflow/base/tools/flow_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,8 +103,8 @@ async def _arun(
tweaks = self.build_tweaks_dict(args, kwargs)
try:
run_id = self.graph.run_id if self.graph else None
except Exception as e:
logger.warning(f"Failed to set run_id: {e}")
except Exception:
logger.opt(exception=True).warning("Failed to set run_id")
run_id = None
run_outputs = await run_flow(
tweaks={key: {"input_value": value} for key, value in tweaks.items()},
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,7 @@ def _update_notion_page(self, page_id: str, properties: str | dict[str, Any]) ->
parsed_properties = json.loads(properties)
except json.JSONDecodeError as e:
error_message = f"Invalid JSON format for properties: {e}"
logger.error(error_message)
logger.exception(error_message)
return error_message

else:
Expand All @@ -98,15 +98,15 @@ def _update_notion_page(self, page_id: str, properties: str | dict[str, Any]) ->
if e.response is not None:
error_message += f"\nStatus code: {e.response.status_code}"
error_message += f"\nResponse body: {e.response.text}"
logger.error(error_message)
logger.exception(error_message)
return error_message
except requests.exceptions.RequestException as e:
error_message = f"An error occurred while making the request: {e}"
logger.error(error_message)
logger.exception(error_message)
return error_message
except Exception as e:
error_message = f"An unexpected error occurred: {e}"
logger.error(error_message)
logger.exception(error_message)
return error_message

def __call__(self, *args, **kwargs):
Expand Down
10 changes: 5 additions & 5 deletions src/backend/base/langflow/components/data/APIRequest.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,13 +87,13 @@ def parse_curl(self, curl: str, build_config: dotdict) -> dotdict:
try:
json_data = json.loads(parsed.data)
build_config["body"]["value"] = json_data
except json.JSONDecodeError as e:
logger.error(f"Error decoding JSON data: {e}")
except json.JSONDecodeError:
logger.exception("Error decoding JSON data")
else:
build_config["body"]["value"] = {}
except Exception as exc:
logger.error(f"Error parsing curl: {exc}")
msg = f"Error parsing curl: {exc}"
logger.exception(msg)
raise ValueError(msg) from exc
return build_config

Expand All @@ -120,9 +120,9 @@ async def make_request(
try:
body = json.loads(body)
except Exception as e:
logger.error(f"Error decoding JSON data: {e}")
body = None
msg = f"Error decoding JSON data: {e}"
logger.exception(msg)
body = None
raise ValueError(msg) from e

data = body if body else None
Expand Down
4 changes: 2 additions & 2 deletions src/backend/base/langflow/components/deactivated/SubFlow.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,8 +54,8 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam
inputs = get_flow_inputs(graph)
# Add inputs to the build config
build_config = self.add_inputs_to_build_config(inputs, build_config)
except Exception as e:
logger.error(f"Error getting flow {field_value}: {e}")
except Exception:
logger.exception(f"Error getting flow {field_value}")

return build_config

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,8 +36,8 @@ def embed_documents(self, texts: list[str]) -> list[list[float]]:
httpx.RequestError,
json.JSONDecodeError,
KeyError,
) as e:
logger.error(f"Error occurred: {e}")
):
logger.exception("Error occurred")
raise

return embeddings # type: ignore
Expand Down
4 changes: 2 additions & 2 deletions src/backend/base/langflow/components/prototypes/FlowTool.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,8 @@ def build_tool(self) -> Tool:
graph = Graph.from_payload(flow_data.data["data"])
try:
graph.set_run_id(self.graph.run_id)
except Exception as e:
logger.warning(f"Failed to set run_id: {e}")
except Exception:
logger.opt(exception=True).warning("Failed to set run_id")
inputs = get_flow_inputs(graph)
tool = FlowTool(
name=self.name,
Expand Down
4 changes: 2 additions & 2 deletions src/backend/base/langflow/components/prototypes/SubFlow.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,8 +46,8 @@ def update_build_config(self, build_config: dotdict, field_value: Any, field_nam
inputs = get_flow_inputs(graph)
# Add inputs to the build config
build_config = self.add_inputs_to_build_config(inputs, build_config)
except Exception as e:
logger.error(f"Error getting flow {field_value}: {e}")
except Exception:
logger.exception(f"Error getting flow {field_value}")

return build_config

Expand Down
4 changes: 2 additions & 2 deletions src/backend/base/langflow/components/toolkits/ComposioAPI.py
Original file line number Diff line number Diff line change
Expand Up @@ -85,8 +85,8 @@ def _handle_authorization_failure(self, toolset: ComposioToolSet, entity: Any, a
if auth_schemes[0].auth_mode == "API_KEY":
return self._process_api_key_auth(entity, app)
return self._initiate_default_connection(entity, app)
except Exception as exc:
logger.error(f"Authorization error: {exc}")
except Exception:
logger.exception("Authorization error")
return "Error"

def _process_api_key_auth(self, entity: Any, app: str) -> str:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -198,11 +198,11 @@ def search(self, query: str | None = None) -> list[dict[str, Any]]:
msg = f"Invalid search type: {self.search_type}"
raise ValueError(msg)
except Exception as e:
logger.error(f"Search query failed: {e}")
msg = (
"Error occurred while querying the Elasticsearch VectorStore,"
" there is no Data into the VectorStore."
)
logger.exception(msg)
raise ValueError(msg) from e
return [
{"page_content": doc.page_content, "metadata": doc.metadata, "score": score} for doc, score in results
Expand Down
Loading
Loading