galileo_observe package#

Subpackages#

Submodules#

galileo_observe.handlers module#

class GalileoObserveCallback(project_name, version=None, *args, **kwargs)#

Bases: BaseCallbackHandler

LangChain callback handler for Galileo Observe

Parameters:
  • project_name (str) – Name of the project to log to

  • version (Optional[str]) – A version identifier for this system so logs can be attributed to a specific configuration

timers: Dict[str, Dict[str, float]] = {}#
records: Dict[str, TransactionRecord] = {}#
version: Optional[str]#
client: ApiClient#
on_llm_start(serialized, prompts, run_id, parent_run_id=None, **kwargs)#

Run when LLM starts running.

Return type:

Any

on_chat_model_start(serialized, messages, run_id, parent_run_id=None, **kwargs)#

Run when Chat Model starts running.

Return type:

Any

on_llm_end(response, run_id, **kwargs)#

Run when LLM ends running.

Return type:

Any

on_llm_error(error, run_id, **kwargs)#

Run when LLM errors.

Return type:

Any

on_chain_start(serialized, inputs, run_id, parent_run_id=None, **kwargs)#

Run when chain starts running.

Return type:

Any

on_chain_end(outputs, run_id, **kwargs)#

Run when chain ends running.

Return type:

Any

on_chain_error(error, run_id, **kwargs)#

Run when chain errors.

Return type:

Any

on_agent_finish(finish, *, run_id, **kwargs)#

Run on agent end.

Return type:

Any

on_tool_start(serialized, input_str, *, run_id, parent_run_id=None, tags=None, metadata=None, **kwargs)#

Run when tool starts running.

Return type:

Any

on_tool_end(output, *, run_id, **kwargs)#

Run when tool ends running.

Return type:

Any

on_tool_error(error, *, run_id, **kwargs)#

Run when tool errors.

Return type:

Any

on_retriever_start(serialized, query, *, run_id, parent_run_id=None, tags=None, metadata=None, **kwargs)#

Run on retriever start.

Return type:

None

on_retriever_end(documents, *, run_id, **kwargs)#

Run on retriever end.

Return type:

None

on_retriever_error(error, *, run_id, **kwargs)#

Run on retriever error.

Return type:

None

static json_serializer(obj)#

For serializing objects that cannot be serialized by default with json.dumps.

Checks for certain methods to convert object to dict.

Return type:

Union[str, Dict[Any, Any]]

galileo_observe.monitor module#

class GalileoObserve(project_name, version=None, *args, **kwargs)#

Bases: object

Initializes Galileo Observe

Parameters:
  • project_name (str) – The name of the project to log to

  • version (Optional[str]) – A version identifier for this system so logs can be attributed to a specific configuration

timers: Dict[str, Dict[str, float]] = {}#
records: Dict[str, TransactionRecord] = {}#
version: Optional[str]#
client: ApiClient#
log_node_start(node_type, input_text, model=None, temperature=None, user_metadata=None, tags=None, constructor=None, chain_id=None)#

Log the start of a new node of any type

Parameters:
  • node_type (TransactionRecordType) – Type of node (“llm”, “chat”, “chain”, “agent”, “tool”, “retriever”)

  • input_text (str) – Input to the node as a str or json dump, by default None

  • model (Optional[str], optional) – Model name for llm or chat nodes, by default None

  • temperature (Optional[float], optional) – Temperature setting for llm or chat nodes, by default None

  • user_metadata (Optional[Dict[str, Any]], optional) – A dict of key-value metadata for identifying logs, by default None

  • tags (Optional[List[str]], optional) – A list of string tags for identifying logs, by default None

  • constructor (Optional[str], optional) – A class constructor name for the node (e.g. OpenAIChat), by default None

  • chain_id (Optional[str], optional) – The ID of the chain this node belongs to, by default None

Returns:

The node_id used when calling log_node_completion() or log_node_error()

Return type:

str

log_node_completion(node_id, output_text, num_input_tokens=0, num_output_tokens=0, num_total_tokens=0, finish_reason=None, status_code=200)#

_summary_

Parameters:
  • node_id (str) – Output value from log_node_start()

  • output_text (str) – Ouput from the node as str or json dump (List[str] for retrievers)

  • num_input_tokens (Optional[int], optional) – Number of input tokens for llm or chat nodes, by default 0

  • num_output_tokens (Optional[int], optional) – Number of output tokens for llm or chat nodes, by default 0

  • num_total_tokens (Optional[int], optional) – Total number of tokens for llm or chat nodes, by default 0

  • finish_reason (Optional[str], optional) – Finish reason for node (e.g. “chain end” or “stop”), by default None

  • status_code (Optional[int], optional) – HTTP status code for the node, by default 200

Return type:

None

log_node_error(node_id, error_message, status_code=500)#

Log an error encountered while processing a node

Parameters:
  • node_id (str) – Ouput from log_node_start()

  • error_message (str) – The error message from the remote system or local application

  • status_code (Optional[int], optional) – HTTP status code for the error, by default 500

Return type:

None

Module contents#

Galileo Observe

class GalileoObserveCallback(project_name, version=None, *args, **kwargs)#

Bases: BaseCallbackHandler

LangChain callback handler for Galileo Observe

Parameters:
  • project_name (str) – Name of the project to log to

  • version (Optional[str]) – A version identifier for this system so logs can be attributed to a specific configuration

timers: Dict[str, Dict[str, float]] = {}#
records: Dict[str, TransactionRecord] = {}#
version: Optional[str]#
client: ApiClient#
on_llm_start(serialized, prompts, run_id, parent_run_id=None, **kwargs)#

Run when LLM starts running.

Return type:

Any

on_chat_model_start(serialized, messages, run_id, parent_run_id=None, **kwargs)#

Run when Chat Model starts running.

Return type:

Any

on_llm_end(response, run_id, **kwargs)#

Run when LLM ends running.

Return type:

Any

on_llm_error(error, run_id, **kwargs)#

Run when LLM errors.

Return type:

Any

on_chain_start(serialized, inputs, run_id, parent_run_id=None, **kwargs)#

Run when chain starts running.

Return type:

Any

on_chain_end(outputs, run_id, **kwargs)#

Run when chain ends running.

Return type:

Any

on_chain_error(error, run_id, **kwargs)#

Run when chain errors.

Return type:

Any

on_agent_finish(finish, *, run_id, **kwargs)#

Run on agent end.

Return type:

Any

on_tool_start(serialized, input_str, *, run_id, parent_run_id=None, tags=None, metadata=None, **kwargs)#

Run when tool starts running.

Return type:

Any

on_tool_end(output, *, run_id, **kwargs)#

Run when tool ends running.

Return type:

Any

on_tool_error(error, *, run_id, **kwargs)#

Run when tool errors.

Return type:

Any

on_retriever_start(serialized, query, *, run_id, parent_run_id=None, tags=None, metadata=None, **kwargs)#

Run on retriever start.

Return type:

None

on_retriever_end(documents, *, run_id, **kwargs)#

Run on retriever end.

Return type:

None

on_retriever_error(error, *, run_id, **kwargs)#

Run on retriever error.

Return type:

None

static json_serializer(obj)#

For serializing objects that cannot be serialized by default with json.dumps.

Checks for certain methods to convert object to dict.

Return type:

Union[str, Dict[Any, Any]]

class GalileoObserve(project_name, version=None, *args, **kwargs)#

Bases: object

Initializes Galileo Observe

Parameters:
  • project_name (str) – The name of the project to log to

  • version (Optional[str]) – A version identifier for this system so logs can be attributed to a specific configuration

timers: Dict[str, Dict[str, float]] = {}#
records: Dict[str, TransactionRecord] = {}#
version: Optional[str]#
client: ApiClient#
log_node_start(node_type, input_text, model=None, temperature=None, user_metadata=None, tags=None, constructor=None, chain_id=None)#

Log the start of a new node of any type

Parameters:
  • node_type (TransactionRecordType) – Type of node (“llm”, “chat”, “chain”, “agent”, “tool”, “retriever”)

  • input_text (str) – Input to the node as a str or json dump, by default None

  • model (Optional[str], optional) – Model name for llm or chat nodes, by default None

  • temperature (Optional[float], optional) – Temperature setting for llm or chat nodes, by default None

  • user_metadata (Optional[Dict[str, Any]], optional) – A dict of key-value metadata for identifying logs, by default None

  • tags (Optional[List[str]], optional) – A list of string tags for identifying logs, by default None

  • constructor (Optional[str], optional) – A class constructor name for the node (e.g. OpenAIChat), by default None

  • chain_id (Optional[str], optional) – The ID of the chain this node belongs to, by default None

Returns:

The node_id used when calling log_node_completion() or log_node_error()

Return type:

str

log_node_completion(node_id, output_text, num_input_tokens=0, num_output_tokens=0, num_total_tokens=0, finish_reason=None, status_code=200)#

_summary_

Parameters:
  • node_id (str) – Output value from log_node_start()

  • output_text (str) – Ouput from the node as str or json dump (List[str] for retrievers)

  • num_input_tokens (Optional[int], optional) – Number of input tokens for llm or chat nodes, by default 0

  • num_output_tokens (Optional[int], optional) – Number of output tokens for llm or chat nodes, by default 0

  • num_total_tokens (Optional[int], optional) – Total number of tokens for llm or chat nodes, by default 0

  • finish_reason (Optional[str], optional) – Finish reason for node (e.g. “chain end” or “stop”), by default None

  • status_code (Optional[int], optional) – HTTP status code for the node, by default 200

Return type:

None

log_node_error(node_id, error_message, status_code=500)#

Log an error encountered while processing a node

Parameters:
  • node_id (str) – Ouput from log_node_start()

  • error_message (str) – The error message from the remote system or local application

  • status_code (Optional[int], optional) – HTTP status code for the error, by default 500

Return type:

None