class langgraph_agent_toolkit.core.observability.langfuse.LangfuseObservability(prompts_dir=None)[source][source]

Bases: BaseObservabilityPlatform

Langfuse implementation of observability platform.

Parameters:

prompts_dir (str | None)

__init__(prompts_dir=None)[source][source]
Parameters:

prompts_dir (str | None)

property required_vars: List[str]
get_callback_handler(**kwargs)[source][source]
Return type:

LangchainCallbackHandler

before_shutdown()[source][source]
Return type:

None

record_feedback(run_id, key, score, **kwargs)[source][source]
Parameters:
Return type:

None

push_prompt(name, prompt_template, metadata=None, force_create_new_version=True)[source][source]
Parameters:
Return type:

None

pull_prompt(name, return_with_prompt_object=False, cache_ttl_seconds=DEFAULT_CACHE_TTL_SECOND, template_format='f-string', label=None, version=None, **kwargs)[source][source]

Pull a prompt from the observability platform.

Parameters:
  • name (str)

  • return_with_prompt_object (bool)

  • cache_ttl_seconds (int | None)

  • template_format (Literal['f-string', 'mustache', 'jinja2'])

  • label (str | None)

  • version (int | None)

Return type:

ChatPromptTemplate | str | dict | None | Tuple[ChatPromptTemplate | str | dict | None, Any]

delete_prompt(name)[source][source]
Parameters:

name (str)

Return type:

None

get_template(name)[source]
Parameters:

name (str)

Return type:

str

property prompts_dir: Path
render_prompt(prompt_name, **variables)[source]
Parameters:

prompt_name (str)

Return type:

str

static requires_env_vars(func)[source]
Parameters:

func (Callable[[...], T])

Return type:

Callable[[…], T]

validate_environment()[source]
Return type:

bool