diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index b4a200a8..72ec1afe 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -23,7 +23,7 @@ jobs: - run: rye config --set-bool behavior.use-uv=true - run: rye sync --no-lock - run: make lint - - run: rye run pyright + - run: rye run pyright logfire tests docs: runs-on: ubuntu-latest @@ -73,6 +73,9 @@ jobs: - run: rye config --set-bool behavior.use-uv=true # Update all dependencies to the latest version possible - run: rye sync --update-all + - run: | + pip install uv + uv pip install "logfire-api @ file://logfire-api" - run: rye show - run: mkdir coverage - run: make test @@ -144,5 +147,14 @@ jobs: - run: rye config --set-bool behavior.use-uv=true - run: rye build - - name: Upload package to PyPI + - name: Publish logfire to PyPI uses: pypa/gh-action-pypi-publish@release/v1 + + - name: Build logfire-api + run: rye build + working-directory: logfire-api/ + + - name: Publish logfire-api to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: logfire-api/ diff --git a/.gitignore b/.gitignore index f9953c33..4d9c1a2a 100644 --- a/.gitignore +++ b/.gitignore @@ -6,3 +6,6 @@ __pycache__ *.env /scratch/ /.coverage + +# stubgen +out diff --git a/Makefile b/Makefile index f93bc021..6c1afe60 100644 --- a/Makefile +++ b/Makefile @@ -13,6 +13,7 @@ sources = pydantic tests docs/plugins install: .rye .pre-commit rye show rye sync --no-lock + uv pip install -e logfire-api pre-commit install --install-hooks .PHONY: format # Format the code @@ -29,6 +30,10 @@ lint: test: rye run coverage run -m pytest +.PHONY: generate-stubs # Generate stubs for logfire-api +generate-stubs: + rye run generate-stubs + .PHONY: testcov # Run tests and generate a coverage report testcov: test @echo "building coverage html" diff --git a/docs/integrations/third_party/litellm.md b/docs/integrations/third_party/litellm.md index fe8e8dff..04b16fa6 100644 --- a/docs/integrations/third_party/litellm.md +++ b/docs/integrations/third_party/litellm.md @@ -1,4 +1,4 @@ -# LiteLLM +# LiteLLM LiteLLM allows you to call over 100 Large Language Models (LLMs) using the same input/output format. It also supports Logfire for logging and monitoring. diff --git a/logfire-api/.gitignore b/logfire-api/.gitignore new file mode 100644 index 00000000..ae8554de --- /dev/null +++ b/logfire-api/.gitignore @@ -0,0 +1,10 @@ +# python generated files +__pycache__/ +*.py[oc] +build/ +dist/ +wheels/ +*.egg-info + +# venv +.venv diff --git a/logfire-api/README.md b/logfire-api/README.md new file mode 100644 index 00000000..fd0b0133 --- /dev/null +++ b/logfire-api/README.md @@ -0,0 +1,7 @@ +# logfire-api + +Shim for the logfire SDK Python API which does nothing unless logfire is installed. + +This package is designed to be used by packages that want to provide opt-in integration with [Logfire](https://github.com/pydantic/logfire). + +The package provides a clone of the Python API exposed by the `logfire` package which does nothing if the `logfire` package is not installed, but makes real calls when it is. diff --git a/logfire-api/logfire_api/__init__.py b/logfire-api/logfire_api/__init__.py new file mode 100644 index 00000000..54b84fc6 --- /dev/null +++ b/logfire-api/logfire_api/__init__.py @@ -0,0 +1,190 @@ +from __future__ import annotations + +from contextlib import contextmanager +import importlib +import sys +from typing import TYPE_CHECKING, ContextManager, Literal +from contextlib import nullcontext +from unittest.mock import MagicMock + +try: + logfire_module = importlib.import_module('logfire') + sys.modules[__name__] = logfire_module + +except ImportError: + if not TYPE_CHECKING: # pragma: no branch + LevelName = Literal['trace', 'debug', 'info', 'notice', 'warn', 'warning', 'error', 'fatal'] + VERSION = '0.0.0' + METRICS_PREFERRED_TEMPORALITY = {} + + def configure(*args, **kwargs): ... + + class LogfireSpan: + def __getattr__(self, attr): + return MagicMock() # pragma: no cover + + def __enter__(self): + return self + + def __exit__(self, *args, **kwargs) -> None: ... + + @property + def message_template(self) -> str: # pragma: no cover + return '' + + @property + def tags(self) -> Sequence[str]: # pragma: no cover + return [] + + @property + def message(self) -> str: # pragma: no cover + return '' + + @message.setter + def message(self, message: str): ... # pragma: no cover + + def is_recording(self) -> bool: # pragma: no cover + return False + + class Logfire: + def __getattr__(self, attr): + return MagicMock() # pragma: no cover + + def __init__(self, *args, **kwargs) -> None: ... + + def span(self, *args, **kwargs) -> LogfireSpan: + return LogfireSpan() + + def log(self, *args, **kwargs) -> None: ... + + def trace(self, *args, **kwargs) -> None: ... + + def debug(self, *args, **kwargs) -> None: ... + + def notice(self, *args, **kwargs) -> None: ... + + def info(self, *args, **kwargs) -> None: ... + + def warn(self, *args, **kwargs) -> None: ... + + def error(self, *args, **kwargs) -> None: ... + + def fatal(self, *args, **kwargs) -> None: ... + + def with_tags(self, *args, **kwargs) -> Logfire: + return self + + def with_settings(self, *args, **kwargs) -> Logfire: + return self + + def force_flush(self, *args, **kwargs) -> None: ... + + def log_slow_async_callbacks(self, *args, **kwargs) -> None: # pragma: no branch + return nullcontext() + + def install_auto_tracing(self, *args, **kwargs) -> None: ... + + def instrument(self, *args, **kwargs): + def decorator(func): + return func + + return decorator + + def instrument_fastapi(self, *args, **kwargs) -> ContextManager[None]: + return nullcontext() + + def instrument_pymongo(self, *args, **kwargs) -> None: ... + + def instrument_sqlalchemy(self, *args, **kwargs) -> None: ... + + def instrument_redis(self, *args, **kwargs) -> None: ... + + def instrument_flask(self, *args, **kwargs) -> None: ... + + def instrument_starlette(self, *args, **kwargs) -> None: ... + + def instrument_django(self, *args, **kwargs) -> None: ... + + def instrument_psycopg(self, *args, **kwargs) -> None: ... + + def instrument_requests(self, *args, **kwargs) -> None: ... + + def instrument_httpx(self, *args, **kwargs) -> None: ... + + def instrument_asyncpg(self, *args, **kwargs) -> None: ... + + def instrument_anthropic(self, *args, **kwargs) -> ContextManager[None]: + return nullcontext() + + def instrument_openai(self, *args, **kwargs) -> ContextManager[None]: + return nullcontext() + + def instrument_aiohttp_client(self, *args, **kwargs) -> None: ... + + def shutdown(self, *args, **kwargs) -> None: ... + + DEFAULT_LOGFIRE_INSTANCE = Logfire() + span = DEFAULT_LOGFIRE_INSTANCE.span + log = DEFAULT_LOGFIRE_INSTANCE.log + trace = DEFAULT_LOGFIRE_INSTANCE.trace + debug = DEFAULT_LOGFIRE_INSTANCE.debug + notice = DEFAULT_LOGFIRE_INSTANCE.notice + info = DEFAULT_LOGFIRE_INSTANCE.info + warn = DEFAULT_LOGFIRE_INSTANCE.warn + error = DEFAULT_LOGFIRE_INSTANCE.error + fatal = DEFAULT_LOGFIRE_INSTANCE.fatal + with_tags = DEFAULT_LOGFIRE_INSTANCE.with_tags + with_settings = DEFAULT_LOGFIRE_INSTANCE.with_settings + force_flush = DEFAULT_LOGFIRE_INSTANCE.force_flush + log_slow_async_callbacks = DEFAULT_LOGFIRE_INSTANCE.log_slow_async_callbacks + install_auto_tracing = DEFAULT_LOGFIRE_INSTANCE.install_auto_tracing + instrument = DEFAULT_LOGFIRE_INSTANCE.instrument + instrument_fastapi = DEFAULT_LOGFIRE_INSTANCE.instrument_fastapi + instrument_openai = DEFAULT_LOGFIRE_INSTANCE.instrument_openai + instrument_anthropic = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic + instrument_asyncpg = DEFAULT_LOGFIRE_INSTANCE.instrument_asyncpg + instrument_httpx = DEFAULT_LOGFIRE_INSTANCE.instrument_httpx + instrument_requests = DEFAULT_LOGFIRE_INSTANCE.instrument_requests + instrument_psycopg = DEFAULT_LOGFIRE_INSTANCE.instrument_psycopg + instrument_django = DEFAULT_LOGFIRE_INSTANCE.instrument_django + instrument_flask = DEFAULT_LOGFIRE_INSTANCE.instrument_flask + instrument_starlette = DEFAULT_LOGFIRE_INSTANCE.instrument_starlette + instrument_aiohttp_client = DEFAULT_LOGFIRE_INSTANCE.instrument_aiohttp_client + instrument_sqlalchemy = DEFAULT_LOGFIRE_INSTANCE.instrument_sqlalchemy + instrument_redis = DEFAULT_LOGFIRE_INSTANCE.instrument_redis + instrument_pymongo = DEFAULT_LOGFIRE_INSTANCE.instrument_pymongo + shutdown = DEFAULT_LOGFIRE_INSTANCE.shutdown + + def no_auto_trace(x): + return x + + @contextmanager + def suppress_instrumentation(): + yield + + class ConsoleOptions: + def __init__(self, *args, **kwargs) -> None: ... + + class TailSamplingOptions: + def __init__(self, *args, **kwargs) -> None: ... + + class ScrubbingOptions: + def __init__(self, *args, **kwargs) -> None: ... + + class PydanticPlugin: + def __init__(self, *args, **kwargs) -> None: ... + + class ScrubMatch: + def __init__(self, *args, **kwargs) -> None: ... + + class AutoTraceModule: + def __init__(self, *args, **kwargs) -> None: ... + + class StructlogProcessor: + def __init__(self, *args, **kwargs) -> None: ... + + class LogfireLoggingHandler: + def __init__(self, *args, **kwargs) -> None: ... + + def load_spans_from_file(*args, **kwargs): + return [] diff --git a/logfire-api/logfire_api/__init__.pyi b/logfire-api/logfire_api/__init__.pyi new file mode 100644 index 00000000..4b407455 --- /dev/null +++ b/logfire-api/logfire_api/__init__.pyi @@ -0,0 +1,47 @@ +from ._internal.auto_trace import AutoTraceModule as AutoTraceModule +from ._internal.auto_trace.rewrite_ast import no_auto_trace as no_auto_trace +from ._internal.config import ConsoleOptions as ConsoleOptions, METRICS_PREFERRED_TEMPORALITY as METRICS_PREFERRED_TEMPORALITY, PydanticPlugin as PydanticPlugin, configure as configure +from ._internal.constants import LevelName as LevelName +from ._internal.exporters.file import load_file as load_spans_from_file +from ._internal.exporters.tail_sampling import TailSamplingOptions as TailSamplingOptions +from ._internal.main import Logfire as Logfire, LogfireSpan as LogfireSpan +from ._internal.scrubbing import ScrubMatch as ScrubMatch, ScrubbingOptions as ScrubbingOptions +from ._internal.utils import suppress_instrumentation as suppress_instrumentation +from .integrations.logging import LogfireLoggingHandler as LogfireLoggingHandler +from .integrations.structlog import LogfireProcessor as StructlogProcessor +from .version import VERSION as VERSION + +__all__ = ['Logfire', 'LogfireSpan', 'LevelName', 'ConsoleOptions', 'PydanticPlugin', 'configure', 'span', 'instrument', 'log', 'trace', 'debug', 'notice', 'info', 'warn', 'error', 'fatal', 'force_flush', 'log_slow_async_callbacks', 'install_auto_tracing', 'instrument_fastapi', 'instrument_openai', 'instrument_anthropic', 'instrument_asyncpg', 'instrument_httpx', 'instrument_requests', 'instrument_psycopg', 'instrument_django', 'instrument_flask', 'instrument_starlette', 'instrument_aiohttp_client', 'instrument_sqlalchemy', 'instrument_redis', 'instrument_pymongo', 'AutoTraceModule', 'with_tags', 'with_settings', 'shutdown', 'load_spans_from_file', 'no_auto_trace', 'METRICS_PREFERRED_TEMPORALITY', 'ScrubMatch', 'ScrubbingOptions', 'VERSION', 'suppress_instrumentation', 'StructlogProcessor', 'LogfireLoggingHandler', 'TailSamplingOptions'] + +DEFAULT_LOGFIRE_INSTANCE = Logfire() +span = DEFAULT_LOGFIRE_INSTANCE.span +instrument = DEFAULT_LOGFIRE_INSTANCE.instrument +force_flush = DEFAULT_LOGFIRE_INSTANCE.force_flush +log_slow_async_callbacks = DEFAULT_LOGFIRE_INSTANCE.log_slow_async_callbacks +install_auto_tracing = DEFAULT_LOGFIRE_INSTANCE.install_auto_tracing +instrument_fastapi = DEFAULT_LOGFIRE_INSTANCE.instrument_fastapi +instrument_openai = DEFAULT_LOGFIRE_INSTANCE.instrument_openai +instrument_anthropic = DEFAULT_LOGFIRE_INSTANCE.instrument_anthropic +instrument_asyncpg = DEFAULT_LOGFIRE_INSTANCE.instrument_asyncpg +instrument_httpx = DEFAULT_LOGFIRE_INSTANCE.instrument_httpx +instrument_requests = DEFAULT_LOGFIRE_INSTANCE.instrument_requests +instrument_psycopg = DEFAULT_LOGFIRE_INSTANCE.instrument_psycopg +instrument_django = DEFAULT_LOGFIRE_INSTANCE.instrument_django +instrument_flask = DEFAULT_LOGFIRE_INSTANCE.instrument_flask +instrument_starlette = DEFAULT_LOGFIRE_INSTANCE.instrument_starlette +instrument_aiohttp_client = DEFAULT_LOGFIRE_INSTANCE.instrument_aiohttp_client +instrument_sqlalchemy = DEFAULT_LOGFIRE_INSTANCE.instrument_sqlalchemy +instrument_redis = DEFAULT_LOGFIRE_INSTANCE.instrument_redis +instrument_pymongo = DEFAULT_LOGFIRE_INSTANCE.instrument_pymongo +shutdown = DEFAULT_LOGFIRE_INSTANCE.shutdown +with_tags = DEFAULT_LOGFIRE_INSTANCE.with_tags +with_settings = DEFAULT_LOGFIRE_INSTANCE.with_settings +log = DEFAULT_LOGFIRE_INSTANCE.log +trace = DEFAULT_LOGFIRE_INSTANCE.trace +debug = DEFAULT_LOGFIRE_INSTANCE.debug +info = DEFAULT_LOGFIRE_INSTANCE.info +notice = DEFAULT_LOGFIRE_INSTANCE.notice +warn = DEFAULT_LOGFIRE_INSTANCE.warn +error = DEFAULT_LOGFIRE_INSTANCE.error +fatal = DEFAULT_LOGFIRE_INSTANCE.fatal +__version__ = VERSION diff --git a/logfire-api/logfire_api/_internal/__init__.pyi b/logfire-api/logfire_api/_internal/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/logfire-api/logfire_api/_internal/ast_utils.pyi b/logfire-api/logfire_api/_internal/ast_utils.pyi new file mode 100644 index 00000000..98d52f72 --- /dev/null +++ b/logfire-api/logfire_api/_internal/ast_utils.pyi @@ -0,0 +1,34 @@ +import ast +from .constants import ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY, ATTRIBUTES_SAMPLE_RATE_KEY as ATTRIBUTES_SAMPLE_RATE_KEY, ATTRIBUTES_TAGS_KEY as ATTRIBUTES_TAGS_KEY +from .stack_info import StackInfo as StackInfo, get_filepath_attribute as get_filepath_attribute +from .utils import uniquify_sequence as uniquify_sequence +from dataclasses import dataclass +from opentelemetry.util import types as otel_types + +@dataclass(frozen=True) +class LogfireArgs: + """Values passed to `logfire.instrument` and/or values stored in a logfire instance as basic configuration. + + These determine the arguments passed to the method calls added by the AST transformer. + """ + tags: tuple[str, ...] + sample_rate: float | None + msg_template: str | None = ... + span_name: str | None = ... + extract_args: bool = ... + +@dataclass +class BaseTransformer(ast.NodeTransformer): + """Helper for rewriting ASTs to wrap function bodies in `with {logfire_method_name}(...):`.""" + logfire_args: LogfireArgs + logfire_method_name: str + filename: str + module_name: str + qualname_stack = ... + def __post_init__(self) -> None: ... + def visit_ClassDef(self, node: ast.ClassDef): ... + def visit_FunctionDef(self, node: ast.FunctionDef | ast.AsyncFunctionDef): ... + def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef): ... + def rewrite_function(self, node: ast.FunctionDef | ast.AsyncFunctionDef, qualname: str) -> ast.AST: ... + def logfire_method_call_node(self, node: ast.FunctionDef | ast.AsyncFunctionDef, qualname: str) -> ast.Call: ... + def logfire_method_arg_values(self, qualname: str, lineno: int) -> tuple[str, dict[str, otel_types.AttributeValue]]: ... diff --git a/logfire-api/logfire_api/_internal/async_.pyi b/logfire-api/logfire_api/_internal/async_.pyi new file mode 100644 index 00000000..29f3e026 --- /dev/null +++ b/logfire-api/logfire_api/_internal/async_.pyi @@ -0,0 +1,22 @@ +from .constants import ONE_SECOND_IN_NANOSECONDS as ONE_SECOND_IN_NANOSECONDS +from .main import Logfire as Logfire +from .stack_info import StackInfo as StackInfo, get_code_object_info as get_code_object_info, get_stack_info_from_frame as get_stack_info_from_frame +from .utils import safe_repr as safe_repr +from _typeshed import Incomplete +from types import CoroutineType +from typing import Any, ContextManager + +ASYNCIO_PATH: Incomplete + +def log_slow_callbacks(logfire: Logfire, slow_duration: float) -> ContextManager[None]: + """Log a warning whenever a function running in the asyncio event loop blocks for too long. + + See Logfire.log_slow_async_callbacks. + Inspired by https://gitlab.com/quantlane/libs/aiodebug. + """ + +class _CallbackAttributes(StackInfo, total=False): + name: str + stack: list[StackInfo] + +def stack_info_from_coroutine(coro: CoroutineType[Any, Any, Any]) -> StackInfo: ... diff --git a/logfire-api/logfire_api/_internal/auth.pyi b/logfire-api/logfire_api/_internal/auth.pyi new file mode 100644 index 00000000..e8ce72d3 --- /dev/null +++ b/logfire-api/logfire_api/_internal/auth.pyi @@ -0,0 +1,55 @@ +import requests +from .utils import UnexpectedResponse as UnexpectedResponse +from _typeshed import Incomplete +from logfire.exceptions import LogfireConfigError as LogfireConfigError +from typing import TypedDict + +HOME_LOGFIRE: Incomplete +DEFAULT_FILE: Incomplete + +class UserTokenData(TypedDict): + """User token data.""" + token: str + expiration: str + +class DefaultFile(TypedDict): + """Content of the default.toml file.""" + tokens: dict[str, UserTokenData] + +class NewDeviceFlow(TypedDict): + """Matches model of the same name in the backend.""" + device_code: str + frontend_auth_url: str + +def request_device_code(session: requests.Session, base_api_url: str) -> tuple[str, str]: + """Request a device code from the Logfire API. + + Args: + session: The `requests` session to use. + base_api_url: The base URL of the Logfire instance. + + Returns: + return data['device_code'], data['frontend_auth_url'] + The device code and the frontend URL to authenticate the device at, as a `NewDeviceFlow` dict. + """ +def poll_for_token(session: requests.Session, device_code: str, base_api_url: str) -> UserTokenData: + """Poll the Logfire API for the user token. + + This function will keep polling the API until it receives a user token, not that + each request should take ~10 seconds as the API endpoint will block waiting for the user to + complete authentication. + + Args: + session: The `requests` session to use. + device_code: The device code to poll for. + base_api_url: The base URL of the Logfire instance. + + Returns: + The user token. + """ +def is_logged_in(data: DefaultFile, logfire_url: str) -> bool: + """Check if the user is logged in. + + Returns: + True if the user is logged in, False otherwise. + """ diff --git a/logfire-api/logfire_api/_internal/auto_trace/__init__.pyi b/logfire-api/logfire_api/_internal/auto_trace/__init__.pyi new file mode 100644 index 00000000..840afe1d --- /dev/null +++ b/logfire-api/logfire_api/_internal/auto_trace/__init__.pyi @@ -0,0 +1,15 @@ +from ..constants import ONE_SECOND_IN_NANOSECONDS as ONE_SECOND_IN_NANOSECONDS +from ..main import Logfire as Logfire +from .import_hook import LogfireFinder as LogfireFinder +from .types import AutoTraceModule as AutoTraceModule +from typing import Callable, Literal, Sequence + +def install_auto_tracing(logfire: Logfire, modules: Sequence[str] | Callable[[AutoTraceModule], bool], *, check_imported_modules: Literal['error', 'warn', 'ignore'] = 'error', min_duration: float = 0) -> None: + """Install automatic tracing. + + See `Logfire.install_auto_tracing` for more information. + """ +def modules_func_from_sequence(modules: Sequence[str]) -> Callable[[AutoTraceModule], bool]: ... + +class AutoTraceModuleAlreadyImportedException(Exception): ... +class AutoTraceModuleAlreadyImportedWarning(Warning): ... diff --git a/logfire-api/logfire_api/_internal/auto_trace/import_hook.pyi b/logfire-api/logfire_api/_internal/auto_trace/import_hook.pyi new file mode 100644 index 00000000..afb46bcf --- /dev/null +++ b/logfire-api/logfire_api/_internal/auto_trace/import_hook.pyi @@ -0,0 +1,38 @@ +from ..main import Logfire as Logfire +from .rewrite_ast import exec_source as exec_source +from .types import AutoTraceModule as AutoTraceModule +from dataclasses import dataclass +from importlib.abc import Loader, MetaPathFinder +from importlib.machinery import ModuleSpec +from types import ModuleType +from typing import Callable, Sequence + +@dataclass +class LogfireFinder(MetaPathFinder): + """The import hook entry point, inserted into `sys.meta_path` to apply AST rewriting to matching modules.""" + logfire: Logfire + modules_filter: Callable[[AutoTraceModule], bool] + min_duration: int + def find_spec(self, fullname: str, path: Sequence[str] | None, target: ModuleType | None = None) -> ModuleSpec | None: + """This is the method that is called by the import system. + + It uses the other existing meta path finders to do most of the standard work, + particularly finding the module's source code and filename. + If it finds a module spec that matches the filter, it returns a new spec that uses the LogfireLoader. + """ + +@dataclass +class LogfireLoader(Loader): + """An import loader produced by LogfireFinder which executes a modified AST of the module's source code.""" + plain_spec: ModuleSpec + source: str + logfire: Logfire + min_duration: int + def exec_module(self, module: ModuleType): + """Execute a modified AST of the module's source code in the module's namespace. + + This is called by the import system. + """ + def create_module(self, spec: ModuleSpec): ... + def __getattr__(self, item: str): + """Forward some methods to the plain spec's loader (likely a `SourceFileLoader`) if they exist.""" diff --git a/logfire-api/logfire_api/_internal/auto_trace/rewrite_ast.pyi b/logfire-api/logfire_api/_internal/auto_trace/rewrite_ast.pyi new file mode 100644 index 00000000..1823740b --- /dev/null +++ b/logfire-api/logfire_api/_internal/auto_trace/rewrite_ast.pyi @@ -0,0 +1,51 @@ +import ast +from ..ast_utils import BaseTransformer as BaseTransformer, LogfireArgs as LogfireArgs +from ..main import Logfire as Logfire +from dataclasses import dataclass +from typing import Any, Callable, ContextManager, TypeVar + +def exec_source(source: str, filename: str, module_name: str, globs: dict[str, Any], logfire_instance: Logfire, min_duration: int) -> None: + """Execute a modified AST of the module's source code in the module's namespace. + + The modified AST wraps the body of every function definition in `with context_factories[index]():`. + `context_factories` is added to the module's namespace as `logfire_`. + `index` is a different constant number for each function definition. + `context_factories[index]` is one of these: + - `partial(logfire_instance._fast_span, name, attributes)` where the name and attributes + are constructed from `filename`, `module_name`, attributes of `logfire_instance`, + and the qualified name and line number of the current function. + - `MeasureTime`, a class that measures the time elapsed. If it exceeds `min_duration`, + then `context_factories[index]` is replaced with the `partial` above. + If `min_duration` is greater than 0, then `context_factories[index]` is initially `MeasureTime`. + Otherwise, it's initially the `partial` above. + """ +def rewrite_ast(source: str, filename: str, logfire_name: str, module_name: str, logfire_instance: Logfire, context_factories: list[Callable[[], ContextManager[Any]]], min_duration: int) -> ast.AST: ... + +@dataclass +class AutoTraceTransformer(BaseTransformer): + """Trace all encountered functions except those explicitly marked with `@no_auto_trace`.""" + logfire_instance: Logfire + context_factories: list[Callable[[], ContextManager[Any]]] + min_duration: int + def check_no_auto_trace(self, node: ast.FunctionDef | ast.AsyncFunctionDef | ast.ClassDef) -> bool: + """Return true if the node has a `@no_auto_trace` or `@logfire.no_auto_trace` decorator.""" + def visit_ClassDef(self, node: ast.ClassDef): ... + def visit_FunctionDef(self, node: ast.FunctionDef | ast.AsyncFunctionDef): ... + def logfire_method_call_node(self, node: ast.FunctionDef | ast.AsyncFunctionDef, qualname: str) -> ast.Call: ... +T = TypeVar('T') + +def no_auto_trace(x: T) -> T: + """Decorator to prevent a function/class from being traced by `logfire.install_auto_tracing`. + + This is useful for small functions that are called very frequently and would generate too much noise. + + The decorator is detected at import time. + Only `@no_auto_trace` or `@logfire.no_auto_trace` are supported. + Renaming/aliasing either the function or module won't work. + Neither will calling this indirectly via another function. + + Any decorated function, or any function defined anywhere inside a decorated function/class, + will be completely ignored by `logfire.install_auto_tracing`. + + This decorator simply returns the argument unchanged, so there is zero runtime overhead. + """ diff --git a/logfire-api/logfire_api/_internal/auto_trace/types.pyi b/logfire-api/logfire_api/_internal/auto_trace/types.pyi new file mode 100644 index 00000000..fdb59374 --- /dev/null +++ b/logfire-api/logfire_api/_internal/auto_trace/types.pyi @@ -0,0 +1,24 @@ +from dataclasses import dataclass +from typing import Sequence + +@dataclass +class AutoTraceModule: + """Information about a module being imported that should maybe be traced automatically. + + This object will be passed to a function that should return True if the module should be traced. + In particular it'll be passed to a function that's passed to `install_auto_tracing` as the `modules` argument. + """ + name: str + filename: str | None + def parts_start_with(self, prefix: str | Sequence[str]) -> bool: + """Return True if the module name starts with any of the given prefixes, using dots as boundaries. + + For example, if the module name is `foo.bar.spam`, then `parts_start_with('foo')` will return True, + but `parts_start_with('bar')` or `parts_start_with('foo_bar')` will return False. + In other words, this will match the module itself or any submodules. + + If a prefix contains any characters other than letters, numbers, and dots, + then it will be treated as a regular expression. + """ + +def get_module_pattern(module: str): ... diff --git a/logfire-api/logfire_api/_internal/backfill.pyi b/logfire-api/logfire_api/_internal/backfill.pyi new file mode 100644 index 00000000..455bef7d --- /dev/null +++ b/logfire-api/logfire_api/_internal/backfill.pyi @@ -0,0 +1,72 @@ +from .constants import LevelName +from _typeshed import Incomplete +from datetime import datetime +from pathlib import Path +from pydantic import BaseModel +from typing import Any, IO + +__all__ = ['generate_trace_id', 'generate_span_id', 'Log', 'StartSpan', 'PrepareBackfill'] + +def generate_trace_id() -> int: + """Generate a new trace ID. + + Returns: + A new trace ID. + """ +def generate_span_id() -> int: + """Generate a new span ID. + + Returns: + A new span ID. + """ + +class Log(BaseModel): + """A log record.""" + model_config = pydantic_config + msg_template: str + level: LevelName + service_name: str + attributes: dict[str, Any] + trace_id: int + span_id: int + parent_span_id: int | None + timestamp: datetime | None + formatted_msg: str | None + otel_resource_attributes: dict[str, Any] + +class StartSpan(BaseModel): + """A span.""" + model_config = pydantic_config + span_name: str + msg_template: str + service_name: str + parent: StartSpan | int | None + log_attributes: dict[str, Any] + span_id: int + trace_id: int + parent_span_id: int | None + start_timestamp: datetime | None + formatted_msg: str | None + otel_resource_attributes: dict[str, Any] + def end(self, end_timestamp: datetime) -> Span: + """End the span at a given timestamp.""" + +class Span(StartSpan): + end_timestamp: datetime | None + +class PrepareBackfill: + """Prepare a backfill of logfire logs and spans from a file or stream. + + Attributes: + store_path: The path to the file or stream to backfill. + open_spans: A mapping of open spans, keyed by (trace_id, span_id). + processor: The span processor to use for the backfill. + """ + store_path: Incomplete + processor: Incomplete + scrubber: Incomplete + def __init__(self, file: Path | str | IO[bytes], batch: bool = True) -> None: ... + def __enter__(self) -> PrepareBackfill: ... + def write(self, data: Log | Span) -> None: + """Write the data to the backfill.""" + def __exit__(self, *_: Any) -> None: ... diff --git a/logfire-api/logfire_api/_internal/cli.pyi b/logfire-api/logfire_api/_internal/cli.pyi new file mode 100644 index 00000000..b090c1f3 --- /dev/null +++ b/logfire-api/logfire_api/_internal/cli.pyi @@ -0,0 +1,48 @@ +import argparse +from ..version import VERSION as VERSION +from .auth import DEFAULT_FILE as DEFAULT_FILE, DefaultFile as DefaultFile, HOME_LOGFIRE as HOME_LOGFIRE, is_logged_in as is_logged_in, poll_for_token as poll_for_token, request_device_code as request_device_code +from .config import LogfireCredentials as LogfireCredentials +from .config_params import ParamManager as ParamManager +from .constants import LOGFIRE_BASE_URL as LOGFIRE_BASE_URL +from .tracer import SDKTracerProvider as SDKTracerProvider +from .utils import read_toml_file as read_toml_file +from _typeshed import Incomplete +from logfire.exceptions import LogfireConfigError as LogfireConfigError +from logfire.propagate import ContextCarrier as ContextCarrier, get_context as get_context + +BASE_OTEL_INTEGRATION_URL: str +BASE_DOCS_URL: str +INTEGRATIONS_DOCS_URL: Incomplete +LOGFIRE_LOG_FILE: Incomplete +file_handler: Incomplete +logger: Incomplete + +def version_callback() -> None: + """Show the version and exit.""" +def parse_whoami(args: argparse.Namespace) -> None: + """Show user authenticated username and the URL to your Logfire project.""" +def parse_clean(args: argparse.Namespace) -> None: + """Remove the contents of the Logfire data directory.""" +def parse_backfill(args: argparse.Namespace) -> None: + """Bulk upload data to Logfire.""" + +OTEL_PACKAGES: set[str] +OTEL_PACKAGE_LINK: Incomplete + +def parse_inspect(args: argparse.Namespace) -> None: + """Inspect installed packages and recommend packages that might be useful.""" +def parse_auth(args: argparse.Namespace) -> None: + """Authenticate with Logfire. + + This will authenticate your machine with Logfire and store the credentials. + """ +def parse_list_projects(args: argparse.Namespace) -> None: + """List user projects.""" +def parse_create_new_project(args: argparse.Namespace) -> None: + """Create a new project.""" +def parse_use_project(args: argparse.Namespace) -> None: + """Use an existing project.""" +def parse_info(_args: argparse.Namespace) -> None: + """Show versions of logfire, OS and related packages.""" +def main(args: list[str] | None = None) -> None: + """Run the CLI.""" diff --git a/logfire-api/logfire_api/_internal/collect_system_info.pyi b/logfire-api/logfire_api/_internal/collect_system_info.pyi new file mode 100644 index 00000000..9287ee63 --- /dev/null +++ b/logfire-api/logfire_api/_internal/collect_system_info.pyi @@ -0,0 +1,6 @@ +def collect_package_info() -> dict[str, str]: + """Retrieve the package information for all installed packages. + + Returns: + A dicts with the package name and version. + """ diff --git a/logfire-api/logfire_api/_internal/config.pyi b/logfire-api/logfire_api/_internal/config.pyi new file mode 100644 index 00000000..7e0c5d26 --- /dev/null +++ b/logfire-api/logfire_api/_internal/config.pyi @@ -0,0 +1,298 @@ +import dataclasses +import requests +from .auth import DEFAULT_FILE as DEFAULT_FILE, DefaultFile as DefaultFile, is_logged_in as is_logged_in +from .collect_system_info import collect_package_info as collect_package_info +from .config_params import ParamManager as ParamManager, PydanticPluginRecordValues as PydanticPluginRecordValues +from .constants import DEFAULT_FALLBACK_FILE_NAME as DEFAULT_FALLBACK_FILE_NAME, LevelName as LevelName, OTLP_MAX_BODY_SIZE as OTLP_MAX_BODY_SIZE, RESOURCE_ATTRIBUTES_PACKAGE_VERSIONS as RESOURCE_ATTRIBUTES_PACKAGE_VERSIONS +from .exporters.console import ConsoleColorsValues as ConsoleColorsValues, IndentedConsoleSpanExporter as IndentedConsoleSpanExporter, ShowParentsConsoleSpanExporter as ShowParentsConsoleSpanExporter, SimpleConsoleSpanExporter as SimpleConsoleSpanExporter +from .exporters.fallback import FallbackSpanExporter as FallbackSpanExporter +from .exporters.file import FileSpanExporter as FileSpanExporter +from .exporters.otlp import OTLPExporterHttpSession as OTLPExporterHttpSession, RetryFewerSpansSpanExporter as RetryFewerSpansSpanExporter +from .exporters.processor_wrapper import MainSpanProcessorWrapper as MainSpanProcessorWrapper +from .exporters.quiet_metrics import QuietMetricExporter as QuietMetricExporter +from .exporters.remove_pending import RemovePendingSpansExporter as RemovePendingSpansExporter +from .exporters.tail_sampling import TailSamplingOptions as TailSamplingOptions, TailSamplingProcessor as TailSamplingProcessor +from .integrations.executors import instrument_executors as instrument_executors +from .metrics import ProxyMeterProvider as ProxyMeterProvider, configure_metrics as configure_metrics +from .scrubbing import BaseScrubber as BaseScrubber, NoopScrubber as NoopScrubber, ScrubCallback as ScrubCallback, Scrubber as Scrubber, ScrubbingOptions as ScrubbingOptions +from .stack_info import get_user_frame_and_stacklevel as get_user_frame_and_stacklevel +from .tracer import PendingSpanProcessor as PendingSpanProcessor, ProxyTracerProvider as ProxyTracerProvider +from .utils import UnexpectedResponse as UnexpectedResponse, ensure_data_dir_exists as ensure_data_dir_exists, get_version as get_version, read_toml_file as read_toml_file, suppress_instrumentation as suppress_instrumentation +from _typeshed import Incomplete +from dataclasses import dataclass +from functools import cached_property +from logfire.exceptions import LogfireConfigError as LogfireConfigError +from logfire.version import VERSION as VERSION +from opentelemetry import metrics +from opentelemetry.sdk.metrics.export import MetricReader as MetricReader +from opentelemetry.sdk.trace import SpanProcessor +from opentelemetry.sdk.trace.export import SpanExporter +from opentelemetry.sdk.trace.id_generator import IdGenerator +from pathlib import Path +from typing import Any, Callable, Literal, Sequence +from typing_extensions import Self + +CREDENTIALS_FILENAME: str +COMMON_REQUEST_HEADERS: Incomplete +PROJECT_NAME_PATTERN: str +METRICS_PREFERRED_TEMPORALITY: Incomplete + +@dataclass +class ConsoleOptions: + """Options for controlling console output.""" + colors: ConsoleColorsValues = ... + span_style: Literal['simple', 'indented', 'show-parents'] = ... + include_timestamps: bool = ... + verbose: bool = ... + min_log_level: LevelName = ... + +@dataclass +class PydanticPlugin: + """Options for the Pydantic plugin.""" + record: PydanticPluginRecordValues = ... + include: set[str] = ... + exclude: set[str] = ... + +def configure(*, send_to_logfire: bool | Literal['if-token-present'] | None = None, token: str | None = None, project_name: str | None = None, service_name: str | None = None, service_version: str | None = None, trace_sample_rate: float | None = None, console: ConsoleOptions | Literal[False] | None = None, show_summary: bool | None = None, config_dir: Path | str | None = None, data_dir: Path | str | None = None, base_url: str | None = None, collect_system_metrics: bool | None = None, id_generator: IdGenerator | None = None, ns_timestamp_generator: Callable[[], int] | None = None, processors: None = None, additional_span_processors: Sequence[SpanProcessor] | None = None, default_span_processor: Callable[[SpanExporter], SpanProcessor] | None = None, metric_readers: None = None, additional_metric_readers: Sequence[MetricReader] | None = None, pydantic_plugin: PydanticPlugin | None = None, fast_shutdown: bool = False, scrubbing_patterns: Sequence[str] | None = None, scrubbing_callback: ScrubCallback | None = None, scrubbing: ScrubbingOptions | Literal[False] | None = None, inspect_arguments: bool | None = None, tail_sampling: TailSamplingOptions | None = None) -> None: + """Configure the logfire SDK. + + Args: + send_to_logfire: Whether to send logs to logfire.dev. Defaults to the `LOGFIRE_SEND_TO_LOGFIRE` environment + variable if set, otherwise defaults to `True`. If `if-token-present` is provided, logs will only be sent if + a token is present. + token: The project token. Defaults to the `LOGFIRE_TOKEN` environment variable. + project_name: Name to request when creating a new project. Defaults to the `LOGFIRE_PROJECT_NAME` environment + variable, or the current directory name. + Project name accepts a string value containing alphanumeric characters and + hyphens (-). The hyphen character must not be located at the beginning or end of the string and should + appear in between alphanumeric characters. + service_name: Name of this service. Defaults to the `LOGFIRE_SERVICE_NAME` environment variable. + service_version: Version of this service. Defaults to the `LOGFIRE_SERVICE_VERSION` environment variable, or the + current git commit hash if available. + trace_sample_rate: Sampling ratio for spans. Defaults to the `LOGFIRE_SAMPLING_RATIO` environment variable, or + the `OTEL_TRACES_SAMPLER_ARG` environment variable, or to `1.0`. + console: Whether to control terminal output. If `None` uses the `LOGFIRE_CONSOLE_*` environment variables, + otherwise defaults to `ConsoleOption(colors='auto', indent_spans=True, include_timestamps=True, verbose=False)`. + If `False` disables console output. It can also be disabled by setting `LOGFIRE_CONSOLE` environment variable to `false`. + show_summary: When to print a summary of the Logfire setup including a link to the dashboard. If `None` uses the `LOGFIRE_SHOW_SUMMARY` environment variable, otherwise + defaults to `True`. + config_dir: Directory that contains the `pyproject.toml` file for this project. If `None` uses the + `LOGFIRE_CONFIG_DIR` environment variable, otherwise defaults to the current working directory. + data_dir: Directory to store credentials, and logs. If `None` uses the `LOGFIRE_CREDENTIALS_DIR` environment variable, otherwise defaults to `'.logfire'`. + base_url: Root URL for the Logfire API. If `None` uses the `LOGFIRE_BASE_URL` environment variable, otherwise defaults to https://logfire-api.pydantic.dev. + collect_system_metrics: Whether to collect system metrics like CPU and memory usage. If `None` uses the `LOGFIRE_COLLECT_SYSTEM_METRICS` environment variable, + otherwise defaults to `True`. + id_generator: Generator for span IDs. Defaults to `RandomIdGenerator()` from the OpenTelemetry SDK. + ns_timestamp_generator: Generator for nanosecond timestamps. Defaults to [`time.time_ns`][time.time_ns] from the + Python standard library. + processors: Legacy argument, use `additional_span_processors` instead. + additional_span_processors: Span processors to use in addition to the default processor which exports spans to Logfire's API. + default_span_processor: A function to create the default span processor. Defaults to `BatchSpanProcessor` from the OpenTelemetry SDK. You can configure the export delay for + [`BatchSpanProcessor`](https://opentelemetry-python.readthedocs.io/en/latest/sdk/trace.export.html#opentelemetry.sdk.trace.export.BatchSpanProcessor) + by setting the `OTEL_BSP_SCHEDULE_DELAY_MILLIS` environment variable. + metric_readers: Legacy argument, use `additional_metric_readers` instead. + additional_metric_readers: Sequence of metric readers to be used in addition to the default reader + which exports metrics to Logfire's API. + Ensure that `preferred_temporality=logfire.METRICS_PREFERRED_TEMPORALITY` + is passed to the constructor of metric readers/exporters that accept the `preferred_temporality` argument. + pydantic_plugin: Configuration for the Pydantic plugin. If `None` uses the `LOGFIRE_PYDANTIC_PLUGIN_*` environment + variables, otherwise defaults to `PydanticPlugin(record='off')`. + fast_shutdown: Whether to shut down exporters and providers quickly, mostly used for tests. Defaults to `False`. + scrubbing: Options for scrubbing sensitive data. Set to `False` to disable. + scrubbing_patterns: Deprecated, use `scrubbing=logfire.ScrubbingOptions(extra_patterns=[...])` instead. + scrubbing_callback: Deprecated, use `scrubbing=logfire.ScrubbingOptions(callback=...)` instead. + inspect_arguments: Whether to enable + [f-string magic](https://docs.pydantic.dev/logfire/guides/onboarding_checklist/add_manual_tracing/#f-strings). + If `None` uses the `LOGFIRE_INSPECT_ARGUMENTS` environment variable. + Defaults to `True` if and only if the Python version is at least 3.11. + tail_sampling: Tail sampling options. Not ready for general use. + """ + +@dataclasses.dataclass +class _LogfireConfigData: + """Data-only parent class for LogfireConfig. + + This class can be pickled / copied and gives a nice repr, + while allowing us to keep the ugly stuff only in LogfireConfig. + + In particular, using this dataclass as a base class of LogfireConfig allows us to use + `dataclasses.asdict` in `integrations/executors.py` to get a dict with just the attributes from + `_LogfireConfigData`, and none of the attributes added in `LogfireConfig`. + """ + base_url: str + send_to_logfire: bool | Literal['if-token-present'] + token: str | None + project_name: str | None + service_name: str + trace_sample_rate: float + console: ConsoleOptions | Literal[False] | None + show_summary: bool + data_dir: Path + collect_system_metrics: bool + id_generator: IdGenerator + ns_timestamp_generator: Callable[[], int] + additional_span_processors: Sequence[SpanProcessor] | None + pydantic_plugin: PydanticPlugin + default_span_processor: Callable[[SpanExporter], SpanProcessor] + fast_shutdown: bool + scrubbing: ScrubbingOptions | Literal[False] + inspect_arguments: bool + tail_sampling: TailSamplingOptions | None + +class LogfireConfig(_LogfireConfigData): + def __init__(self, base_url: str | None = None, send_to_logfire: bool | None = None, token: str | None = None, project_name: str | None = None, service_name: str | None = None, service_version: str | None = None, trace_sample_rate: float | None = None, console: ConsoleOptions | Literal[False] | None = None, show_summary: bool | None = None, config_dir: Path | None = None, data_dir: Path | None = None, collect_system_metrics: bool | None = None, id_generator: IdGenerator | None = None, ns_timestamp_generator: Callable[[], int] | None = None, additional_span_processors: Sequence[SpanProcessor] | None = None, default_span_processor: Callable[[SpanExporter], SpanProcessor] | None = None, additional_metric_readers: Sequence[MetricReader] | None = None, pydantic_plugin: PydanticPlugin | None = None, fast_shutdown: bool = False, scrubbing: ScrubbingOptions | Literal[False] | None = None, inspect_arguments: bool | None = None, tail_sampling: TailSamplingOptions | None = None) -> None: + """Create a new LogfireConfig. + + Users should never need to call this directly, instead use `logfire.configure`. + + See `_LogfireConfigData` for parameter documentation. + """ + def configure(self, base_url: str | None, send_to_logfire: bool | Literal['if-token-present'] | None, token: str | None, project_name: str | None, service_name: str | None, service_version: str | None, trace_sample_rate: float | None, console: ConsoleOptions | Literal[False] | None, show_summary: bool | None, config_dir: Path | None, data_dir: Path | None, collect_system_metrics: bool | None, id_generator: IdGenerator | None, ns_timestamp_generator: Callable[[], int] | None, additional_span_processors: Sequence[SpanProcessor] | None, default_span_processor: Callable[[SpanExporter], SpanProcessor] | None, additional_metric_readers: Sequence[MetricReader] | None, pydantic_plugin: PydanticPlugin | None, fast_shutdown: bool, scrubbing: ScrubbingOptions | Literal[False] | None, inspect_arguments: bool | None, tail_sampling: TailSamplingOptions | None) -> None: ... + def initialize(self) -> ProxyTracerProvider: + """Configure internals to start exporting traces and metrics.""" + def get_tracer_provider(self) -> ProxyTracerProvider: + """Get a tracer provider from this `LogfireConfig`. + + This is used internally and should not be called by users of the SDK. + + Returns: + The tracer provider. + """ + def get_meter_provider(self) -> ProxyMeterProvider: + """Get a meter provider from this `LogfireConfig`. + + This is used internally and should not be called by users of the SDK. + + Returns: + The meter provider. + """ + def warn_if_not_initialized(self, message: str): ... + @cached_property + def meter(self) -> metrics.Meter: + """Get a meter from this `LogfireConfig`. + + This is used internally and should not be called by users of the SDK. + + Returns: + The meter. + """ + +GLOBAL_CONFIG: Incomplete + +@dataclasses.dataclass +class LogfireCredentials: + """Credentials for logfire.dev.""" + token: str + project_name: str + project_url: str + logfire_api_url: str + @classmethod + def load_creds_file(cls, creds_dir: Path) -> Self | None: + """Check if a credentials file exists and if so load it. + + Args: + creds_dir: Path to the credentials directory. + + Returns: + The loaded credentials or `None` if the file does not exist. + + Raises: + LogfireConfigError: If the credentials file exists but is invalid. + """ + @classmethod + def from_token(cls, token: str, session: requests.Session, base_url: str) -> Self | None: + """Check that the token is valid. + + Issue a warning if the Logfire API is unreachable, or we get a response other than 200 or 401. + + We continue unless we get a 401. If something is wrong, we'll later store data locally for back-fill. + + Raises: + LogfireConfigError: If the token is invalid. + """ + @classmethod + def get_current_user(cls, session: requests.Session, logfire_api_url: str) -> dict[str, Any] | None: ... + @classmethod + def get_user_projects(cls, session: requests.Session, logfire_api_url: str) -> list[dict[str, Any]]: + """Get list of projects that user has access to them. + + Args: + session: HTTP client session used to communicate with the Logfire API. + logfire_api_url: The Logfire API base URL. + + Returns: + List of user projects. + + Raises: + LogfireConfigError: If there was an error retrieving user projects. + """ + @classmethod + def use_existing_project(cls, *, session: requests.Session, logfire_api_url: str, projects: list[dict[str, Any]], organization: str | None = None, project_name: str | None = None) -> dict[str, Any] | None: + """Configure one of the user projects to be used by Logfire. + + It configures the project if organization/project_name is a valid project that + the user has access to it. Otherwise, it asks the user to select a project interactively. + + Args: + session: HTTP client session used to communicate with the Logfire API. + logfire_api_url: The Logfire API base URL. + projects: List of user projects. + organization: Project organization. + project_name: Name of project that has to be used. + + Returns: + The configured project information. + + Raises: + LogfireConfigError: If there was an error configuring the project. + """ + @classmethod + def create_new_project(cls, *, session: requests.Session, logfire_api_url: str, organization: str | None = None, default_organization: bool = False, project_name: str | None = None, force_project_name_prompt: bool = False) -> dict[str, Any]: + """Create a new project and configure it to be used by Logfire. + + It creates the project under the organization if both project and organization are valid. + Otherwise, it asks the user to select organization and enter a valid project name interactively. + + Args: + session: HTTP client session used to communicate with the Logfire API. + logfire_api_url: The Logfire API base URL. + organization: The organization name of the new project. + default_organization: Whether to create the project under the user default organization. + project_name: The default name of the project. + force_project_name_prompt: Whether to force a prompt for the project name. + service_name: Name of the service. + + Returns: + The created project informations. + + Raises: + LogfireConfigError: If there was an error creating projects. + """ + @classmethod + def initialize_project(cls, *, logfire_api_url: str, project_name: str | None, session: requests.Session) -> Self: + """Create a new project or use an existing project on logfire.dev requesting the given project name. + + Args: + logfire_api_url: The Logfire API base URL. + project_name: Name for the project. + user_token: The user's token to use to create the new project. + session: HTTP client session used to communicate with the Logfire API. + + Returns: + The new credentials. + + Raises: + LogfireConfigError: If there was an error on creating/configuring the project. + """ + def write_creds_file(self, creds_dir: Path) -> None: + """Write a credentials file to the given path.""" + def print_token_summary(self) -> None: + """Print a summary of the existing project.""" + +def get_git_revision_hash() -> str: + """Get the current git commit hash.""" +def sanitize_project_name(name: str) -> str: + """Convert `name` to a string suitable for the `requested_project_name` API parameter.""" +def default_project_name(): ... + +class LogfireNotConfiguredWarning(UserWarning): ... diff --git a/logfire-api/logfire_api/_internal/config_params.pyi b/logfire-api/logfire_api/_internal/config_params.pyi new file mode 100644 index 00000000..adb32310 --- /dev/null +++ b/logfire-api/logfire_api/_internal/config_params.pyi @@ -0,0 +1,82 @@ +from . import config as config +from .constants import LOGFIRE_BASE_URL as LOGFIRE_BASE_URL, LevelName as LevelName +from .exporters.console import ConsoleColorsValues as ConsoleColorsValues +from .utils import read_toml_file as read_toml_file +from _typeshed import Incomplete +from dataclasses import dataclass +from functools import cached_property +from logfire.exceptions import LogfireConfigError as LogfireConfigError +from pathlib import Path +from typing import Any, Callable, TypeVar + +COLLECT_SYSTEM_METRICS_DEFAULT: bool +T = TypeVar('T') +slots_true: Incomplete +PydanticPluginRecordValues: Incomplete + +@dataclass(**slots_true) +class ConfigParam: + """A parameter that can be configured for a Logfire instance.""" + env_vars: list[str] + allow_file_config: bool = ... + default: Any = ... + tp: Any = ... + +@dataclass +class _DefaultCallback: + """A default value that is computed at runtime. + + A good example is when we want to check if we are running under pytest and set a default value based on that. + """ + callback: Callable[[], Any] + +BASE_URL: Incomplete +SEND_TO_LOGFIRE: Incomplete +TOKEN: Incomplete +PROJECT_NAME: Incomplete +SERVICE_NAME: Incomplete +SERVICE_VERSION: Incomplete +SHOW_SUMMARY: Incomplete +CREDENTIALS_DIR: Incomplete +COLLECT_SYSTEM_METRICS: Incomplete +CONSOLE: Incomplete +CONSOLE_COLORS: Incomplete +CONSOLE_SPAN_STYLE: Incomplete +CONSOLE_INCLUDE_TIMESTAMP: Incomplete +CONSOLE_VERBOSE: Incomplete +CONSOLE_MIN_LOG_LEVEL: Incomplete +PYDANTIC_PLUGIN_RECORD: Incomplete +PYDANTIC_PLUGIN_INCLUDE: Incomplete +PYDANTIC_PLUGIN_EXCLUDE: Incomplete +TRACE_SAMPLE_RATE: Incomplete +INSPECT_ARGUMENTS: Incomplete +IGNORE_NO_CONFIG: Incomplete +CONFIG_PARAMS: Incomplete + +@dataclass +class ParamManager: + """Manage parameters for a Logfire instance.""" + config_from_file: dict[str, Any] + @classmethod + def create(cls, config_dir: Path | None = None) -> ParamManager: ... + def load_param(self, name: str, runtime: Any = None) -> Any: + """Load a parameter given its name. + + The parameter is loaded in the following order: + 1. From the runtime argument, if provided. + 2. From the environment variables. + 3. From the config file, if allowed. + + If none of the above is found, the default value is returned. + + Args: + name: Name of the parameter. + runtime: Value provided at runtime. + + Returns: + The value of the parameter. + """ + @cached_property + def pydantic_plugin(self): ... + +def default_param_manager(): ... diff --git a/logfire-api/logfire_api/_internal/constants.pyi b/logfire-api/logfire_api/_internal/constants.pyi new file mode 100644 index 00000000..70b09458 --- /dev/null +++ b/logfire-api/logfire_api/_internal/constants.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete +from opentelemetry.util import types as otel_types + +LOGFIRE_ATTRIBUTES_NAMESPACE: str +LevelName: Incomplete +LEVEL_NUMBERS: Incomplete +NUMBER_TO_LEVEL: Incomplete +LOGGING_TO_OTEL_LEVEL_NUMBERS: Incomplete +ATTRIBUTES_LOG_LEVEL_NAME_KEY: Incomplete +ATTRIBUTES_LOG_LEVEL_NUM_KEY: Incomplete + +def log_level_attributes(level: LevelName | int) -> dict[str, otel_types.AttributeValue]: ... + +SpanTypeType: Incomplete +ATTRIBUTES_SPAN_TYPE_KEY: Incomplete +ATTRIBUTES_PENDING_SPAN_REAL_PARENT_KEY: Incomplete +ATTRIBUTES_TAGS_KEY: Incomplete +ATTRIBUTES_MESSAGE_TEMPLATE_KEY: Incomplete +ATTRIBUTES_MESSAGE_KEY: Incomplete +DISABLE_CONSOLE_KEY: Incomplete +ATTRIBUTES_JSON_SCHEMA_KEY: Incomplete +ATTRIBUTES_LOGGING_ARGS_KEY: Incomplete +ATTRIBUTES_VALIDATION_ERROR_KEY: str +NULL_ARGS_KEY: str +PENDING_SPAN_NAME_SUFFIX: str +LOGFIRE_BASE_URL: str +RESOURCE_ATTRIBUTES_PACKAGE_VERSIONS: str +OTLP_MAX_INT_SIZE: Incomplete +DEFAULT_FALLBACK_FILE_NAME: str +ATTRIBUTES_SAMPLE_RATE_KEY: str +CONTEXT_ATTRIBUTES_KEY: Incomplete +CONTEXT_SAMPLE_RATE_KEY: Incomplete +OTLP_MAX_BODY_SIZE: Incomplete +MESSAGE_FORMATTED_VALUE_LENGTH_LIMIT: int +ONE_SECOND_IN_NANOSECONDS: int diff --git a/logfire-api/logfire_api/_internal/exporters/__init__.pyi b/logfire-api/logfire_api/_internal/exporters/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/logfire-api/logfire_api/_internal/exporters/console.pyi b/logfire-api/logfire_api/_internal/exporters/console.pyi new file mode 100644 index 00000000..5166a49e --- /dev/null +++ b/logfire-api/logfire_api/_internal/exporters/console.pyi @@ -0,0 +1,39 @@ +from ..constants import ATTRIBUTES_JSON_SCHEMA_KEY as ATTRIBUTES_JSON_SCHEMA_KEY, ATTRIBUTES_LOG_LEVEL_NUM_KEY as ATTRIBUTES_LOG_LEVEL_NUM_KEY, ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_PENDING_SPAN_REAL_PARENT_KEY as ATTRIBUTES_PENDING_SPAN_REAL_PARENT_KEY, ATTRIBUTES_SPAN_TYPE_KEY as ATTRIBUTES_SPAN_TYPE_KEY, ATTRIBUTES_TAGS_KEY as ATTRIBUTES_TAGS_KEY, DISABLE_CONSOLE_KEY as DISABLE_CONSOLE_KEY, LEVEL_NUMBERS as LEVEL_NUMBERS, LevelName as LevelName, NUMBER_TO_LEVEL as NUMBER_TO_LEVEL, ONE_SECOND_IN_NANOSECONDS as ONE_SECOND_IN_NANOSECONDS +from ..json_formatter import json_args_value_formatter as json_args_value_formatter +from _typeshed import Incomplete +from collections.abc import Sequence +from opentelemetry.sdk.trace import ReadableSpan +from opentelemetry.sdk.trace.export import SpanExportResult, SpanExporter +from typing import TextIO + +ConsoleColorsValues: Incomplete +TextParts = list[tuple[str, str]] + +class SimpleConsoleSpanExporter(SpanExporter): + """The ConsoleSpanExporter prints spans to the console. + + This simple version does not indent spans based on their parent(s), instead spans are printed as a + flat list. + """ + def __init__(self, output: TextIO | None = None, colors: ConsoleColorsValues = 'auto', include_timestamp: bool = True, verbose: bool = False, min_log_level: LevelName = 'info') -> None: ... + def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: + """Export the spans to the console.""" + def force_flush(self, timeout_millis: int = 0) -> bool: + """Force flush all spans, does nothing for this exporter.""" + +class IndentedConsoleSpanExporter(SimpleConsoleSpanExporter): + """The ConsoleSpanExporter exports spans to the console, indented. + + Spans are intended based simply on how many parents they have. This will work well when spans don't overlap, + but will be hard to understand when multiple spans are in progress at the same time. + """ + def __init__(self, output: TextIO | None = None, colors: ConsoleColorsValues = 'auto', include_timestamp: bool = True, verbose: bool = False, min_log_level: LevelName = 'info') -> None: ... + +class ShowParentsConsoleSpanExporter(SimpleConsoleSpanExporter): + '''The ConsoleSpanExporter exports spans to the console, indented with parents displayed where necessary. + + Spans are intended based on how many parents they have, where multiple concurrent spans overlap and therefore + the previously displayed span is not the parent or sibling of a span, parents are printed (with "dim" color) + so it\'s easy (or as easy as possible in a terminal) to understand how nested spans are related. + ''' + def __init__(self, output: TextIO | None = None, colors: ConsoleColorsValues = 'auto', include_timestamp: bool = True, verbose: bool = False, min_log_level: LevelName = 'info') -> None: ... diff --git a/logfire-api/logfire_api/_internal/exporters/fallback.pyi b/logfire-api/logfire_api/_internal/exporters/fallback.pyi new file mode 100644 index 00000000..425e1e73 --- /dev/null +++ b/logfire-api/logfire_api/_internal/exporters/fallback.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from opentelemetry.sdk.trace import ReadableSpan as ReadableSpan +from opentelemetry.sdk.trace.export import SpanExportResult, SpanExporter +from typing import Sequence + +class FallbackSpanExporter(SpanExporter): + exporter: Incomplete + fallback: Incomplete + def __init__(self, exporter: SpanExporter, fallback: SpanExporter) -> None: ... + def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: ... + def force_flush(self, timeout_millis: int = 30000) -> bool: ... + def shutdown(self) -> None: ... diff --git a/logfire-api/logfire_api/_internal/exporters/file.pyi b/logfire-api/logfire_api/_internal/exporters/file.pyi new file mode 100644 index 00000000..b856014f --- /dev/null +++ b/logfire-api/logfire_api/_internal/exporters/file.pyi @@ -0,0 +1,64 @@ +from ..constants import DEFAULT_FALLBACK_FILE_NAME as DEFAULT_FALLBACK_FILE_NAME +from ..utils import ensure_data_dir_exists as ensure_data_dir_exists +from _typeshed import Incomplete +from opentelemetry.proto.collector.trace.v1.trace_service_pb2 import ExportTraceServiceRequest +from opentelemetry.sdk.trace import ReadableSpan as ReadableSpan +from opentelemetry.sdk.trace.export import SpanExportResult, SpanExporter +from pathlib import Path +from typing import Generator, IO, Iterable, Iterator, Sequence + +HEADER: bytes +VERSION: bytes + +class Writer: + def write_header(self) -> bytes: ... + def write(self, spans: ExportTraceServiceRequest) -> Iterable[bytes]: ... + +class WritingFallbackWarning(Warning): ... + +class FileSpanExporter(SpanExporter): + file_path: Incomplete + def __init__(self, file_path: str | Path | IO[bytes], *, warn: bool = False) -> None: ... + def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: ... + def force_flush(self, timeout_millis: int = 30000) -> bool: ... + def shutdown(self) -> None: ... + +class FileParser: + MISSING_HEADER: int + MISSING_VERSION: int + MISSING_BEG: int + IN_MESSAGE: int + state: Incomplete + buffer: Incomplete + message_size: int + def __init__(self) -> None: ... + def get_suggested_read_size(self) -> int: ... + def finish(self) -> None: ... + def push(self, data: bytes) -> Generator[ExportTraceServiceRequest, None, None]: ... + +class InvalidFile(ValueError): + """Raised when a dump file is invalid.""" + +def load_file(file_path: str | Path | IO[bytes] | None) -> Iterator[ExportTraceServiceRequest]: + """Load a backup file. + + Args: + file_path: The path to the backup file. + + Raises: + ValueError: If the file is not a valid backup file. + + Returns: + An iterator over each `ExportTraceServiceRequest` message in the backup file. + """ +def to_json_lines(file_path: str | Path | IO[bytes] | None) -> Iterator[str]: + """Convert a backup file to JSON lines. + + Args: + file_path: The path to the backup file. + + Raises: + ValueError: If the file is not a valid backup file. + + Returns: An iterator over each JSON line in the backup file. + """ diff --git a/logfire-api/logfire_api/_internal/exporters/otlp.pyi b/logfire-api/logfire_api/_internal/exporters/otlp.pyi new file mode 100644 index 00000000..4c0b683e --- /dev/null +++ b/logfire-api/logfire_api/_internal/exporters/otlp.pyi @@ -0,0 +1,49 @@ +import requests +from ..stack_info import STACK_INFO_KEYS as STACK_INFO_KEYS +from ..utils import logger as logger, truncate_string as truncate_string +from .wrapper import WrapperSpanExporter as WrapperSpanExporter +from _typeshed import Incomplete +from functools import cached_property +from opentelemetry.sdk.trace import ReadableSpan +from opentelemetry.sdk.trace.export import SpanExportResult +from requests import Session +from typing import Any, Mapping, Sequence + +class OTLPExporterHttpSession(Session): + """A requests.Session subclass that raises a BodyTooLargeError if the request body is too large. + + Also defers failed requests to a DiskRetryer. + """ + max_body_size: Incomplete + def __init__(self, *args: Any, max_body_size: int, **kwargs: Any) -> None: ... + def post(self, url: str, data: bytes, **kwargs: Any): ... + @cached_property + def retryer(self) -> DiskRetryer: ... + +def raise_for_retryable_status(response: requests.Response): ... + +class DiskRetryer: + """Retries requests failed by OTLPExporterHttpSession, saving the request body to disk to save memory.""" + MAX_DELAY: int + MAX_TASKS: int + LOG_INTERVAL: int + lock: Incomplete + thread: Incomplete + tasks: Incomplete + session: Incomplete + dir: Incomplete + last_log_time: Incomplete + def __init__(self, headers: Mapping[str, str | bytes]) -> None: ... + def add_task(self, data: bytes, kwargs: dict[str, Any]): ... + +class RetryFewerSpansSpanExporter(WrapperSpanExporter): + """A SpanExporter that retries exporting spans in smaller batches if BodyTooLargeError is raised. + + This wraps another exporter, typically an OTLPSpanExporter using an OTLPExporterHttpSession. + """ + def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: ... + +class BodyTooLargeError(Exception): + size: Incomplete + max_size: Incomplete + def __init__(self, size: int, max_size: int) -> None: ... diff --git a/logfire-api/logfire_api/_internal/exporters/processor_wrapper.pyi b/logfire-api/logfire_api/_internal/exporters/processor_wrapper.pyi new file mode 100644 index 00000000..b3e39517 --- /dev/null +++ b/logfire-api/logfire_api/_internal/exporters/processor_wrapper.pyi @@ -0,0 +1,18 @@ +from ..constants import ATTRIBUTES_LOG_LEVEL_NUM_KEY as ATTRIBUTES_LOG_LEVEL_NUM_KEY, ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY, ATTRIBUTES_SPAN_TYPE_KEY as ATTRIBUTES_SPAN_TYPE_KEY, LEVEL_NUMBERS as LEVEL_NUMBERS, PENDING_SPAN_NAME_SUFFIX as PENDING_SPAN_NAME_SUFFIX, log_level_attributes as log_level_attributes +from ..scrubbing import BaseScrubber as BaseScrubber +from ..utils import ReadableSpanDict as ReadableSpanDict, is_instrumentation_suppressed as is_instrumentation_suppressed, span_to_dict as span_to_dict, truncate_string as truncate_string +from .wrapper import WrapperSpanProcessor as WrapperSpanProcessor +from _typeshed import Incomplete +from opentelemetry import context +from opentelemetry.sdk.trace import ReadableSpan, Span, SpanProcessor + +class MainSpanProcessorWrapper(WrapperSpanProcessor): + """Wrapper around other processors to intercept starting and ending spans with our own global logic. + + Suppresses starting/ending if the current context has a `suppress_instrumentation` value. + Tweaks the send/receive span names generated by the ASGI middleware. + """ + scrubber: Incomplete + def __init__(self, processor: SpanProcessor, scrubber: BaseScrubber) -> None: ... + def on_start(self, span: Span, parent_context: context.Context | None = None) -> None: ... + def on_end(self, span: ReadableSpan) -> None: ... diff --git a/logfire-api/logfire_api/_internal/exporters/quiet_metrics.pyi b/logfire-api/logfire_api/_internal/exporters/quiet_metrics.pyi new file mode 100644 index 00000000..d6ec7670 --- /dev/null +++ b/logfire-api/logfire_api/_internal/exporters/quiet_metrics.pyi @@ -0,0 +1,8 @@ +from .wrapper import WrapperMetricExporter as WrapperMetricExporter +from opentelemetry.sdk.metrics._internal.export import MetricExportResult +from opentelemetry.sdk.metrics._internal.point import MetricsData +from typing import Any + +class QuietMetricExporter(WrapperMetricExporter): + """A MetricExporter that catches request exceptions to prevent OTEL from logging a huge traceback.""" + def export(self, metrics_data: MetricsData, timeout_millis: float = 10000, **kwargs: Any) -> MetricExportResult: ... diff --git a/logfire-api/logfire_api/_internal/exporters/remove_pending.pyi b/logfire-api/logfire_api/_internal/exporters/remove_pending.pyi new file mode 100644 index 00000000..5286f6ff --- /dev/null +++ b/logfire-api/logfire_api/_internal/exporters/remove_pending.pyi @@ -0,0 +1,9 @@ +from ..constants import ATTRIBUTES_SPAN_TYPE_KEY as ATTRIBUTES_SPAN_TYPE_KEY +from .wrapper import WrapperSpanExporter as WrapperSpanExporter +from opentelemetry.sdk.trace import ReadableSpan as ReadableSpan +from opentelemetry.sdk.trace.export import SpanExportResult +from typing import Sequence + +class RemovePendingSpansExporter(WrapperSpanExporter): + """An exporter that filters out pending spans if the corresponding final span is already in the same batch.""" + def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: ... diff --git a/logfire-api/logfire_api/_internal/exporters/tail_sampling.pyi b/logfire-api/logfire_api/_internal/exporters/tail_sampling.pyi new file mode 100644 index 00000000..2a3a8e21 --- /dev/null +++ b/logfire-api/logfire_api/_internal/exporters/tail_sampling.pyi @@ -0,0 +1,35 @@ +from _typeshed import Incomplete +from dataclasses import dataclass +from functools import cached_property +from logfire._internal.constants import ATTRIBUTES_LOG_LEVEL_NUM_KEY as ATTRIBUTES_LOG_LEVEL_NUM_KEY, LEVEL_NUMBERS as LEVEL_NUMBERS, LevelName as LevelName, ONE_SECOND_IN_NANOSECONDS as ONE_SECOND_IN_NANOSECONDS +from logfire._internal.exporters.wrapper import WrapperSpanProcessor as WrapperSpanProcessor +from opentelemetry import context +from opentelemetry.sdk.trace import ReadableSpan, Span, SpanProcessor + +@dataclass +class TailSamplingOptions: + level: LevelName | None = ... + duration: float | None = ... + +@dataclass +class TraceBuffer: + """Arguments of `on_start` and `on_end` for spans in a single trace.""" + started: list[tuple[Span, context.Context | None]] + ended: list[ReadableSpan] + @cached_property + def first_span(self) -> Span: ... + +class TailSamplingProcessor(WrapperSpanProcessor): + """Passes spans to the wrapped processor if any span in a trace meets the sampling criteria.""" + duration: Incomplete + level: Incomplete + random_rate: Incomplete + traces: Incomplete + lock: Incomplete + def __init__(self, processor: SpanProcessor, options: TailSamplingOptions, random_rate: float) -> None: ... + def on_start(self, span: Span, parent_context: context.Context | None = None) -> None: ... + def on_end(self, span: ReadableSpan) -> None: ... + def check_span(self, span: ReadableSpan, buffer: TraceBuffer) -> bool: + """If the span meets the sampling criteria, drop the buffer and return True. Otherwise, return False.""" + def drop_buffer(self, buffer: TraceBuffer) -> None: ... + def push_buffer(self, buffer: TraceBuffer) -> None: ... diff --git a/logfire-api/logfire_api/_internal/exporters/wrapper.pyi b/logfire-api/logfire_api/_internal/exporters/wrapper.pyi new file mode 100644 index 00000000..c329d4a0 --- /dev/null +++ b/logfire-api/logfire_api/_internal/exporters/wrapper.pyi @@ -0,0 +1,32 @@ +from _typeshed import Incomplete +from opentelemetry import context +from opentelemetry.sdk.metrics.export import AggregationTemporality as AggregationTemporality, MetricExportResult, MetricExporter, MetricsData +from opentelemetry.sdk.metrics.view import Aggregation as Aggregation +from opentelemetry.sdk.trace import ReadableSpan, Span, SpanProcessor +from opentelemetry.sdk.trace.export import SpanExportResult, SpanExporter +from typing import Any, Sequence + +class WrapperSpanExporter(SpanExporter): + """A base class for SpanExporters that wrap another exporter.""" + wrapped_exporter: Incomplete + def __init__(self, exporter: SpanExporter) -> None: ... + def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult: ... + def force_flush(self, timeout_millis: int = 30000) -> bool: ... + def shutdown(self) -> None: ... + +class WrapperMetricExporter(MetricExporter): + """A base class for MetricExporters that wrap another exporter.""" + wrapped_exporter: Incomplete + def __init__(self, exporter: MetricExporter, preferred_temporality: dict[type, AggregationTemporality] | None = None, preferred_aggregation: dict[type, Aggregation] | None = None) -> None: ... + def export(self, metrics_data: MetricsData, timeout_millis: float = 10000, **kwargs: Any) -> MetricExportResult: ... + def force_flush(self, timeout_millis: float = 10000) -> bool: ... + def shutdown(self, timeout_millis: float = 30000, **kwargs: Any) -> None: ... + +class WrapperSpanProcessor(SpanProcessor): + """A base class for SpanProcessors that wrap another processor.""" + processor: Incomplete + def __init__(self, processor: SpanProcessor) -> None: ... + def on_start(self, span: Span, parent_context: context.Context | None = None) -> None: ... + def on_end(self, span: ReadableSpan) -> None: ... + def shutdown(self) -> None: ... + def force_flush(self, timeout_millis: int = 30000) -> bool: ... diff --git a/logfire-api/logfire_api/_internal/formatter.pyi b/logfire-api/logfire_api/_internal/formatter.pyi new file mode 100644 index 00000000..84bffde9 --- /dev/null +++ b/logfire-api/logfire_api/_internal/formatter.pyi @@ -0,0 +1,50 @@ +import ast +import executing +import types +from .constants import MESSAGE_FORMATTED_VALUE_LENGTH_LIMIT as MESSAGE_FORMATTED_VALUE_LENGTH_LIMIT +from .scrubbing import BaseScrubber as BaseScrubber +from .utils import truncate_string as truncate_string +from _typeshed import Incomplete +from logfire._internal.stack_info import get_user_frame_and_stacklevel as get_user_frame_and_stacklevel +from string import Formatter +from types import CodeType as CodeType +from typing import Any, Final, Literal, Mapping +from typing_extensions import NotRequired, TypedDict + +class LiteralChunk(TypedDict): + t: Literal['lit'] + v: str + +class ArgChunk(TypedDict): + t: Literal['arg'] + v: str + spec: NotRequired[str] + +class ChunksFormatter(Formatter): + NONE_REPR: Final[str] + def chunks(self, format_string: str, kwargs: Mapping[str, Any], *, scrubber: BaseScrubber, fstring_frame: types.FrameType | None = None) -> tuple[list[LiteralChunk | ArgChunk], dict[str, Any], str]: ... + +chunks_formatter: Incomplete + +def logfire_format(format_string: str, kwargs: dict[str, Any], scrubber: BaseScrubber) -> str: ... +def logfire_format_with_magic(format_string: str, kwargs: dict[str, Any], scrubber: BaseScrubber, fstring_frame: types.FrameType | None = None) -> tuple[str, dict[str, Any], str]: ... +def compile_formatted_value(node: ast.FormattedValue, ex_source: executing.Source) -> tuple[str, CodeType, CodeType]: + """Returns three things that can be expensive to compute. + + 1. Source code corresponding to the node value (excluding the format spec). + 2. A compiled code object which can be evaluated to calculate the value. + 3. Another code object which formats the value. + """ +def get_node_source_text(node: ast.AST, ex_source: executing.Source): + """Returns some Python source code representing `node`. + + Preferably the actual original code given by `ast.get_source_segment`, + but falling back to `ast.unparse(node)` if the former is incorrect. + This happens sometimes due to Python bugs (especially for older Python versions) + in the source positions of AST nodes inside f-strings. + """ +def get_stacklevel(frame: types.FrameType): ... + +class InspectArgumentsFailedWarning(Warning): ... + +def warn_inspect_arguments(msg: str, stacklevel: int): ... diff --git a/logfire-api/logfire_api/_internal/instrument.pyi b/logfire-api/logfire_api/_internal/instrument.pyi new file mode 100644 index 00000000..16dcb79f --- /dev/null +++ b/logfire-api/logfire_api/_internal/instrument.pyi @@ -0,0 +1,17 @@ +import ast +from .ast_utils import BaseTransformer as BaseTransformer, LogfireArgs as LogfireArgs +from .main import Logfire as Logfire +from dataclasses import dataclass +from types import CodeType +from typing import Callable + +def instrument(logfire: Logfire, args: LogfireArgs) -> Callable[[Callable[_PARAMS, _RETURN]], Callable[_PARAMS, _RETURN]]: ... +def transform_code(func_code: CodeType, args: LogfireArgs): ... + +@dataclass +class InstrumentTransformer(BaseTransformer): + """Only modifies the function definition at the given line.""" + code_lineno: int + def rewrite_function(self, node: ast.FunctionDef | ast.AsyncFunctionDef, qualname: str) -> ast.AST: ... + def logfire_method_call_node(self, node: ast.FunctionDef | ast.AsyncFunctionDef, qualname: str) -> ast.Call: ... + def logfire_method_arg_nodes(self, node: ast.FunctionDef | ast.AsyncFunctionDef, qualname: str) -> list[ast.expr]: ... diff --git a/logfire-api/logfire_api/_internal/integrations/__init__.pyi b/logfire-api/logfire_api/_internal/integrations/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/logfire-api/logfire_api/_internal/integrations/aiohttp_client.pyi b/logfire-api/logfire_api/_internal/integrations/aiohttp_client.pyi new file mode 100644 index 00000000..0b56586c --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/aiohttp_client.pyi @@ -0,0 +1,7 @@ +from typing import Any + +def instrument_aiohttp_client(**kwargs: Any): + """Instrument the `aiohttp` module so that spans are automatically created for each client request. + + See the `Logfire.instrument_aiohttp_client` method for details. + """ diff --git a/logfire-api/logfire_api/_internal/integrations/asyncpg.pyi b/logfire-api/logfire_api/_internal/integrations/asyncpg.pyi new file mode 100644 index 00000000..3dbcd7c6 --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/asyncpg.pyi @@ -0,0 +1,5 @@ +def instrument_asyncpg() -> None: + """Instrument the `asyncpg` module so that spans are automatically created for each query. + + See the `Logfire.instrument_asyncpg` method for details. + """ diff --git a/logfire-api/logfire_api/_internal/integrations/django.pyi b/logfire-api/logfire_api/_internal/integrations/django.pyi new file mode 100644 index 00000000..34887516 --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/django.pyi @@ -0,0 +1,7 @@ +from typing import Any + +def instrument_django(**kwargs: Any): + """Instrument the `django` module so that spans are automatically created for each web request. + + See the `Logfire.instrument_django` method for details. + """ diff --git a/logfire-api/logfire_api/_internal/integrations/executors.pyi b/logfire-api/logfire_api/_internal/integrations/executors.pyi new file mode 100644 index 00000000..51f54ead --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/executors.pyi @@ -0,0 +1,18 @@ +from _typeshed import Incomplete +from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor +from logfire.propagate import ContextCarrier as ContextCarrier, attach_context as attach_context, get_context as get_context +from typing import Any, Callable + +submit_t_orig: Incomplete +submit_p_orig: Incomplete + +def instrument_executors() -> None: + """Monkey-patch `submit()` methods of `ThreadPoolExecutor` and `ProcessPoolExecutor` + to carry over OTEL context across threads and processes. + """ +def submit_t(s: ThreadPoolExecutor, fn: Callable[..., Any], /, *args: Any, **kwargs: Any): + """A wrapper around ThreadPoolExecutor.submit() that carries over OTEL context across threads.""" +def submit_p(s: ProcessPoolExecutor, fn: Callable[..., Any], /, *args: Any, **kwargs: Any): + """A wrapper around ProcessPoolExecutor.submit() that carries over OTEL context across processes.""" +def serialize_config() -> dict[str, Any]: ... +def deserialize_config(config: dict[str, Any]) -> None: ... diff --git a/logfire-api/logfire_api/_internal/integrations/fastapi.pyi b/logfire-api/logfire_api/_internal/integrations/fastapi.pyi new file mode 100644 index 00000000..7055463b --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/fastapi.pyi @@ -0,0 +1,28 @@ +from ..main import Logfire as Logfire +from ..stack_info import StackInfo as StackInfo, get_code_object_info as get_code_object_info +from _typeshed import Incomplete +from fastapi import FastAPI +from starlette.requests import Request +from starlette.websockets import WebSocket +from typing import Any, Awaitable, Callable, ContextManager, Iterable + +def find_mounted_apps(app: FastAPI) -> list[FastAPI]: + """Fetch all sub-apps mounted to a FastAPI app, including nested sub-apps.""" +def instrument_fastapi(logfire_instance: Logfire, app: FastAPI, *, request_attributes_mapper: Callable[[Request | WebSocket, dict[str, Any]], dict[str, Any] | None] | None = None, use_opentelemetry_instrumentation: bool = True, excluded_urls: str | Iterable[str] | None = None, **opentelemetry_kwargs: Any) -> ContextManager[None]: + """Instrument a FastAPI app so that spans and logs are automatically created for each request. + + See `Logfire.instrument_fastapi` for more details. + """ +def patch_fastapi(): + """Globally monkeypatch fastapi functions and return a dictionary for recording instrumentation config per app.""" + +class FastAPIInstrumentation: + logfire_instance: Incomplete + request_attributes_mapper: Incomplete + excluded_urls_list: Incomplete + def __init__(self, logfire_instance: Logfire, request_attributes_mapper: Callable[[Request | WebSocket, dict[str, Any]], dict[str, Any] | None], excluded_urls: str | None) -> None: ... + async def solve_dependencies(self, request: Request | WebSocket, original: Awaitable[tuple[dict[str, Any], list[Any], Any, Any, Any]]): ... + async def run_endpoint_function(self, original_run_endpoint_function: Any, request: Request, dependant: Any, values: dict[str, Any], **kwargs: Any) -> Any: ... + +class _InstrumentedValues(dict): + request: Request diff --git a/logfire-api/logfire_api/_internal/integrations/flask.pyi b/logfire-api/logfire_api/_internal/integrations/flask.pyi new file mode 100644 index 00000000..5acfe373 --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/flask.pyi @@ -0,0 +1,8 @@ +from flask.app import Flask +from typing import Any + +def instrument_flask(app: Flask, **kwargs: Any): + """Instrument `app` so that spans are automatically created for each request. + + See the `Logfire.instrument_flask` method for details. + """ diff --git a/logfire-api/logfire_api/_internal/integrations/httpx.pyi b/logfire-api/logfire_api/_internal/integrations/httpx.pyi new file mode 100644 index 00000000..40a49b4d --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/httpx.pyi @@ -0,0 +1,7 @@ +from typing import Any + +def instrument_httpx(**kwargs: Any): + """Instrument the `httpx` module so that spans are automatically created for each request. + + See the `Logfire.instrument_httpx` method for details. + """ diff --git a/logfire-api/logfire_api/_internal/integrations/psycopg.pyi b/logfire-api/logfire_api/_internal/integrations/psycopg.pyi new file mode 100644 index 00000000..64885f83 --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/psycopg.pyi @@ -0,0 +1,12 @@ +from _typeshed import Incomplete +from typing import Any + +Instrumentor: Incomplete +PACKAGE_NAMES: Incomplete + +def instrument_psycopg(conn_or_module: Any = None, **kwargs: Any): + """Instrument a `psycopg` connection or module so that spans are automatically created for each query. + + See the `Logfire.instrument_psycopg` method for details. + """ +def check_version(name: str, version: str, instrumentor: Instrumentor): ... diff --git a/logfire-api/logfire_api/_internal/integrations/pymongo.pyi b/logfire-api/logfire_api/_internal/integrations/pymongo.pyi new file mode 100644 index 00000000..5531a117 --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/pymongo.pyi @@ -0,0 +1,7 @@ +from typing import Any + +def instrument_pymongo(**kwargs: Any): + """Instrument the `pymongo` module so that spans are automatically created for each operation. + + See the `Logfire.instrument_pymongo` method for details. + """ diff --git a/logfire-api/logfire_api/_internal/integrations/redis.pyi b/logfire-api/logfire_api/_internal/integrations/redis.pyi new file mode 100644 index 00000000..73247fff --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/redis.pyi @@ -0,0 +1,7 @@ +from typing import Any + +def instrument_redis(**kwargs: Any): + """Instrument the `redis` module so that spans are automatically created for each operation. + + See the `Logfire.instrument_redis` method for details. + """ diff --git a/logfire-api/logfire_api/_internal/integrations/requests.pyi b/logfire-api/logfire_api/_internal/integrations/requests.pyi new file mode 100644 index 00000000..b5e4c7e5 --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/requests.pyi @@ -0,0 +1,7 @@ +from typing import Any + +def instrument_requests(excluded_urls: str | None = None, **kwargs: Any): + """Instrument the `requests` module so that spans are automatically created for each request. + + See the `Logfire.instrument_requests` method for details. + """ diff --git a/logfire-api/logfire_api/_internal/integrations/sqlalchemy.pyi b/logfire-api/logfire_api/_internal/integrations/sqlalchemy.pyi new file mode 100644 index 00000000..fd35eaaa --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/sqlalchemy.pyi @@ -0,0 +1,7 @@ +from typing import Any + +def instrument_sqlalchemy(**kwargs: Any): + """Instrument the `sqlalchemy` module so that spans are automatically created for each query. + + See the `Logfire.instrument_sqlalchemy` method for details. + """ diff --git a/logfire-api/logfire_api/_internal/integrations/starlette.pyi b/logfire-api/logfire_api/_internal/integrations/starlette.pyi new file mode 100644 index 00000000..15024d60 --- /dev/null +++ b/logfire-api/logfire_api/_internal/integrations/starlette.pyi @@ -0,0 +1,8 @@ +from starlette.applications import Starlette +from typing import Any + +def instrument_starlette(app: Starlette, **kwargs: Any): + """Instrument `app` so that spans are automatically created for each request. + + See the `Logfire.instrument_starlette` method for details. + """ diff --git a/logfire-api/logfire_api/_internal/json_encoder.pyi b/logfire-api/logfire_api/_internal/json_encoder.pyi new file mode 100644 index 00000000..491e1704 --- /dev/null +++ b/logfire-api/logfire_api/_internal/json_encoder.pyi @@ -0,0 +1,12 @@ +from .utils import JsonValue as JsonValue, safe_repr as safe_repr +from _typeshed import Incomplete +from typing import Any + +NUMPY_DIMENSION_MAX_SIZE: int +EncoderFunction: Incomplete + +def encoder_by_type() -> dict[type[Any], EncoderFunction]: ... +def to_json_value(o: Any, seen: set[int]) -> JsonValue: ... +def logfire_json_dumps(obj: Any) -> str: ... +def is_sqlalchemy(obj: Any) -> bool: ... +def is_attrs(obj: Any) -> bool: ... diff --git a/logfire-api/logfire_api/_internal/json_formatter.pyi b/logfire-api/logfire_api/_internal/json_formatter.pyi new file mode 100644 index 00000000..e14d770e --- /dev/null +++ b/logfire-api/logfire_api/_internal/json_formatter.pyi @@ -0,0 +1,15 @@ +from .json_types import ArraySchema as ArraySchema, DataType as DataType, JSONSchema as JSONSchema +from .utils import safe_repr as safe_repr +from _typeshed import Incomplete +from typing import Any + +class JsonArgsValueFormatter: + """Format values recursively based on the information provided in value dict. + + When a custom format is identified, the `$__datatype__` key is always present. + """ + def __init__(self, *, indent: int) -> None: ... + def __call__(self, value: Any, *, schema: JSONSchema | None = None, indent_current: int = 0): ... + +json_args_value_formatter: Incomplete +json_args_value_formatter_compact: Incomplete diff --git a/logfire-api/logfire_api/_internal/json_schema.pyi b/logfire-api/logfire_api/_internal/json_schema.pyi new file mode 100644 index 00000000..ca638055 --- /dev/null +++ b/logfire-api/logfire_api/_internal/json_schema.pyi @@ -0,0 +1,21 @@ +from .utils import JsonDict +from _typeshed import Incomplete +from typing import Any + +__all__ = ['create_json_schema', 'attributes_json_schema_properties', 'attributes_json_schema', 'JsonSchemaProperties'] + +def create_json_schema(obj: Any, seen: set[int]) -> JsonDict: + """Create a JSON Schema from the given object. + + Args: + obj: The object to create the JSON Schema from. + seen: A set of object IDs that have already been processed. + + Returns: + The JSON Schema. + """ + +JsonSchemaProperties: Incomplete + +def attributes_json_schema(properties: JsonSchemaProperties) -> str: ... +def attributes_json_schema_properties(attributes: dict[str, Any]) -> JsonSchemaProperties: ... diff --git a/logfire-api/logfire_api/_internal/json_types.pyi b/logfire-api/logfire_api/_internal/json_types.pyi new file mode 100644 index 00000000..ee6831eb --- /dev/null +++ b/logfire-api/logfire_api/_internal/json_types.pyi @@ -0,0 +1,55 @@ +from _typeshed import Incomplete +from typing import Any, Literal, TypeVar, TypedDict +from typing_extensions import NotRequired + +T = TypeVar('T') +DataType: Incomplete +DateFormat: Incomplete +IPFormat: Incomplete +Format: Incomplete +_EnumBase = TypedDict('_EnumBase', {'x-python-datatype': Literal['Enum']}) + +class _EnumAny(_EnumBase): + type: Literal['object'] + enum: list[Any] + +class _EnumString(_EnumBase): + type: Literal['string'] + enum: list[str] + +class _EnumInt(_EnumBase): + type: Literal['integer'] + enum: list[int] + +class _EnumFloat(_EnumBase): + type: Literal['number'] + enum: list[float] + +class _EnumBool(_EnumBase): + type: Literal['boolean'] + enum: list[bool] + +EnumSchema: Incomplete + +class _Items(TypedDict): + items: JSONSchema + +class _PrefixItems(TypedDict): + prefixItems: list[JSONSchema] + +_ArrayBase = TypedDict('_ArrayBase', {'type': Literal['array'], 'x-python-datatype': Literal['tuple', 'deque', 'set', 'frozenset', 'ndarray'], 'x-columns': NotRequired[list[str]], 'x-indices': NotRequired[list[Any]], 'x-shape': NotRequired[list[int]], 'x-dtype': NotRequired[str]}) + +class _ArrayItems(_ArrayBase, _Items): ... +class _ArrayPrefixItems(_ArrayBase, _PrefixItems): ... + +ArraySchema: Incomplete +_PropertyDataType = TypedDict('_PropertyDataType', {'x-python-datatype': DataType}, total=False) +Type: Incomplete + +class _Property(_PropertyDataType, total=False): + type: Type + title: str + format: Format + properties: dict[str, JSONSchema] + +JSONSchema: Incomplete diff --git a/logfire-api/logfire_api/_internal/main.pyi b/logfire-api/logfire_api/_internal/main.pyi new file mode 100644 index 00000000..87003a55 --- /dev/null +++ b/logfire-api/logfire_api/_internal/main.pyi @@ -0,0 +1,927 @@ +import anthropic +import openai +import opentelemetry.trace as trace_api +import typing +from . import async_ as async_ +from ..version import VERSION as VERSION +from .auto_trace import AutoTraceModule as AutoTraceModule, install_auto_tracing as install_auto_tracing +from .config import GLOBAL_CONFIG as GLOBAL_CONFIG, LogfireConfig as LogfireConfig +from .constants import ATTRIBUTES_JSON_SCHEMA_KEY as ATTRIBUTES_JSON_SCHEMA_KEY, ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY, ATTRIBUTES_SAMPLE_RATE_KEY as ATTRIBUTES_SAMPLE_RATE_KEY, ATTRIBUTES_SPAN_TYPE_KEY as ATTRIBUTES_SPAN_TYPE_KEY, ATTRIBUTES_TAGS_KEY as ATTRIBUTES_TAGS_KEY, ATTRIBUTES_VALIDATION_ERROR_KEY as ATTRIBUTES_VALIDATION_ERROR_KEY, DISABLE_CONSOLE_KEY as DISABLE_CONSOLE_KEY, LevelName as LevelName, NULL_ARGS_KEY as NULL_ARGS_KEY, OTLP_MAX_INT_SIZE as OTLP_MAX_INT_SIZE, log_level_attributes as log_level_attributes +from .formatter import logfire_format as logfire_format, logfire_format_with_magic as logfire_format_with_magic +from .instrument import LogfireArgs as LogfireArgs, instrument as instrument +from .json_encoder import logfire_json_dumps as logfire_json_dumps +from .json_schema import JsonSchemaProperties as JsonSchemaProperties, attributes_json_schema as attributes_json_schema, attributes_json_schema_properties as attributes_json_schema_properties, create_json_schema as create_json_schema +from .metrics import ProxyMeterProvider as ProxyMeterProvider +from .stack_info import get_user_stack_info as get_user_stack_info +from .tracer import ProxyTracerProvider as ProxyTracerProvider +from .utils import handle_internal_errors as handle_internal_errors, log_internal_error as log_internal_error, uniquify_sequence as uniquify_sequence +from django.http import HttpRequest as HttpRequest, HttpResponse as HttpResponse +from fastapi import FastAPI +from flask.app import Flask +from opentelemetry.metrics import CallbackT as CallbackT, Counter, Histogram, UpDownCounter, _Gauge as Gauge +from opentelemetry.sdk.trace import ReadableSpan, Span as Span +from opentelemetry.trace import Tracer +from opentelemetry.util import types as otel_types +from starlette.applications import Starlette +from starlette.requests import Request as Request +from starlette.websockets import WebSocket as WebSocket +from types import TracebackType as TracebackType +from typing import Any, Callable, ContextManager, Iterable, Literal, Sequence, TypeVar +from typing_extensions import LiteralString + +ExcInfo: typing.TypeAlias + +class Logfire: + """The main logfire class.""" + def __init__(self, *, config: LogfireConfig = ..., sample_rate: float | None = None, tags: Sequence[str] = (), console_log: bool = True, otel_scope: str = 'logfire') -> None: ... + @property + def config(self) -> LogfireConfig: ... + def trace(self, msg_template: str, /, *, _tags: Sequence[str] | None = None, _exc_info: ExcInfo = False, **attributes: Any) -> None: + """Log a trace message. + + ```py + import logfire + + logfire.configure() + + logfire.trace('This is a trace log') + ``` + + Args: + msg_template: The message to log. + attributes: The attributes to bind to the log. + _tags: An optional sequence of tags to include in the log. + _exc_info: Set to an exception or a tuple as returned by [`sys.exc_info()`][sys.exc_info] + to record a traceback with the log message. + + Set to `True` to use the currently handled exception. + """ + def debug(self, msg_template: str, /, *, _tags: Sequence[str] | None = None, _exc_info: ExcInfo = False, **attributes: Any) -> None: + """Log a debug message. + + ```py + import logfire + + logfire.configure() + + logfire.debug('This is a debug log') + ``` + + Args: + msg_template: The message to log. + attributes: The attributes to bind to the log. + _tags: An optional sequence of tags to include in the log. + _exc_info: Set to an exception or a tuple as returned by [`sys.exc_info()`][sys.exc_info] + to record a traceback with the log message. + + Set to `True` to use the currently handled exception. + """ + def info(self, msg_template: str, /, *, _tags: Sequence[str] | None = None, _exc_info: ExcInfo = False, **attributes: Any) -> None: + """Log an info message. + + ```py + import logfire + + logfire.configure() + + logfire.info('This is an info log') + ``` + + Args: + msg_template: The message to log. + attributes: The attributes to bind to the log. + _tags: An optional sequence of tags to include in the log. + _exc_info: Set to an exception or a tuple as returned by [`sys.exc_info()`][sys.exc_info] + to record a traceback with the log message. + + Set to `True` to use the currently handled exception. + """ + def notice(self, msg_template: str, /, *, _tags: Sequence[str] | None = None, _exc_info: ExcInfo = False, **attributes: Any) -> None: + """Log a notice message. + + ```py + import logfire + + logfire.configure() + + logfire.notice('This is a notice log') + ``` + + Args: + msg_template: The message to log. + attributes: The attributes to bind to the log. + _tags: An optional sequence of tags to include in the log. + _exc_info: Set to an exception or a tuple as returned by [`sys.exc_info()`][sys.exc_info] + to record a traceback with the log message. + + Set to `True` to use the currently handled exception. + """ + def warn(self, msg_template: str, /, *, _tags: Sequence[str] | None = None, _exc_info: ExcInfo = False, **attributes: Any) -> None: + """Log a warning message. + + ```py + import logfire + + logfire.configure() + + logfire.warn('This is a warning log') + ``` + + Args: + msg_template: The message to log. + attributes: The attributes to bind to the log. + _tags: An optional sequence of tags to include in the log. + _exc_info: Set to an exception or a tuple as returned by [`sys.exc_info()`][sys.exc_info] + to record a traceback with the log message. + + Set to `True` to use the currently handled exception. + """ + def error(self, msg_template: str, /, *, _tags: Sequence[str] | None = None, _exc_info: ExcInfo = False, **attributes: Any) -> None: + """Log an error message. + + ```py + import logfire + + logfire.configure() + + logfire.error('This is an error log') + ``` + + Args: + msg_template: The message to log. + attributes: The attributes to bind to the log. + _tags: An optional sequence of tags to include in the log. + _exc_info: Set to an exception or a tuple as returned by [`sys.exc_info()`][sys.exc_info] + to record a traceback with the log message. + + Set to `True` to use the currently handled exception. + """ + def fatal(self, msg_template: str, /, *, _tags: Sequence[str] | None = None, _exc_info: ExcInfo = False, **attributes: Any) -> None: + """Log a fatal message. + + ```py + import logfire + + logfire.configure() + + logfire.fatal('This is a fatal log') + ``` + + Args: + msg_template: The message to log. + attributes: The attributes to bind to the log. + _tags: An optional sequence of tags to include in the log. + _exc_info: Set to an exception or a tuple as returned by [`sys.exc_info()`][sys.exc_info] + to record a traceback with the log message. + + Set to `True` to use the currently handled exception. + """ + def exception(self, msg_template: str, /, *, _tags: Sequence[str] | None = None, _exc_info: ExcInfo = True, **attributes: Any) -> None: + """The same as `error` but with `_exc_info=True` by default. + + This means that a traceback will be logged for any currently handled exception. + + Args: + msg_template: The message to log. + attributes: The attributes to bind to the log. + _tags: An optional sequence of tags to include in the log. + _exc_info: Set to an exception or a tuple as returned by [`sys.exc_info()`][sys.exc_info] + to record a traceback with the log message. + """ + def span(self, msg_template: str, /, *, _tags: Sequence[str] | None = None, _span_name: str | None = None, _level: LevelName | None = None, **attributes: Any) -> LogfireSpan: + """Context manager for creating a span. + + ```py + import logfire + + logfire.configure() + + with logfire.span('This is a span {a=}', a='data'): + logfire.info('new log 1') + ``` + + Args: + msg_template: The template for the span message. + _span_name: The span name. If not provided, the `msg_template` will be used. + _tags: An optional sequence of tags to include in the span. + _level: An optional log level name. + attributes: The arguments to include in the span and format the message template with. + Attributes starting with an underscore are not allowed. + """ + def instrument(self, msg_template: LiteralString | None = None, *, span_name: str | None = None, extract_args: bool = True) -> Callable[[Callable[_PARAMS, _RETURN]], Callable[_PARAMS, _RETURN]]: + """Decorator for instrumenting a function as a span. + + ```py + import logfire + + logfire.configure() + + + @logfire.instrument('This is a span {a=}') + def my_function(a: int): + logfire.info('new log {a=}', a=a) + ``` + + !!! note + - This decorator MUST be applied first, i.e. UNDER any other decorators. + - The source code of the function MUST be accessible. + + Args: + msg_template: The template for the span message. If not provided, the module and function name will be used. + span_name: The span name. If not provided, the `msg_template` will be used. + extract_args: Whether to extract arguments from the function signature and log them as span attributes. + """ + def log(self, level: LevelName | int, msg_template: str, attributes: dict[str, Any] | None = None, tags: Sequence[str] | None = None, exc_info: ExcInfo = False, console_log: bool | None = None, custom_scope_suffix: str | None = None) -> None: + """Log a message. + + ```py + import logfire + + logfire.configure() + + logfire.log('info', 'This is a log {a}', {'a': 'Apple'}) + ``` + + Args: + level: The level of the log. + msg_template: The message to log. + attributes: The attributes to bind to the log. + tags: An optional sequence of tags to include in the log. + exc_info: Set to an exception or a tuple as returned by [`sys.exc_info()`][sys.exc_info] + to record a traceback with the log message. + + Set to `True` to use the currently handled exception. + console_log: Whether to log to the console, defaults to `True`. + custom_scope_suffix: A custom suffix to append to `logfire.` e.g. `logfire.loguru`. + + It should only be used when instrumenting another library with Logfire, such as structlog or loguru. + + See the `instrumenting_module_name` parameter on + [TracerProvider.get_tracer][opentelemetry.sdk.trace.TracerProvider.get_tracer] for more info. + """ + def with_tags(self, *tags: str) -> Logfire: + """A new Logfire instance which always uses the given tags. + + ```py + import logfire + + logfire.configure() + + local_logfire = logfire.with_tags('tag1') + local_logfire.info('a log message', _tags=['tag2']) + + # This is equivalent to: + logfire.info('a log message', _tags=['tag1', 'tag2']) + ``` + + Args: + tags: The tags to add. + + Returns: + A new Logfire instance with the `tags` added to any existing tags. + """ + def with_trace_sample_rate(self, sample_rate: float) -> Logfire: + """A new Logfire instance with the given sampling ratio applied. + + Args: + sample_rate: The sampling ratio to use. + + Returns: + A new Logfire instance with the sampling ratio applied. + """ + def with_settings(self, *, tags: Sequence[str] = (), stack_offset: int | None = None, console_log: bool | None = None, custom_scope_suffix: str | None = None) -> Logfire: + """A new Logfire instance which uses the given settings. + + Args: + tags: Sequence of tags to include in the log. + stack_offset: The stack level offset to use when collecting stack info, also affects the warning which + message formatting might emit, defaults to `0` which means the stack info will be collected from the + position where [`logfire.log`][logfire.Logfire.log] was called. + console_log: Whether to log to the console, defaults to `True`. + custom_scope_suffix: A custom suffix to append to `logfire.` e.g. `logfire.loguru`. + + It should only be used when instrumenting another library with Logfire, such as structlog or loguru. + + See the `instrumenting_module_name` parameter on + [TracerProvider.get_tracer][opentelemetry.sdk.trace.TracerProvider.get_tracer] for more info. + + Returns: + A new Logfire instance with the given settings applied. + """ + def force_flush(self, timeout_millis: int = 3000) -> bool: + """Force flush all spans. + + Args: + timeout_millis: The timeout in milliseconds. + + Returns: + Whether the flush was successful. + """ + def log_slow_async_callbacks(self, slow_duration: float = 0.1) -> ContextManager[None]: + """Log a warning whenever a function running in the asyncio event loop blocks for too long. + + This works by patching the `asyncio.events.Handle._run` method. + + Args: + slow_duration: the threshold in seconds for when a callback is considered slow. + + Returns: + A context manager that will revert the patch when exited. + This context manager doesn't take into account threads or other concurrency. + Calling this method will immediately apply the patch + without waiting for the context manager to be opened, + i.e. it's not necessary to use this as a context manager. + """ + def install_auto_tracing(self, modules: Sequence[str] | Callable[[AutoTraceModule], bool], *, check_imported_modules: Literal['error', 'warn', 'ignore'] = 'error', min_duration: float = 0) -> None: + """Install automatic tracing. + + This will trace all function calls in the modules specified by the modules argument. + It's equivalent to wrapping the body of every function in matching modules in `with logfire.span(...):`. + + !!! note + This function MUST be called before any of the modules to be traced are imported. + + This works by inserting a new meta path finder into `sys.meta_path`, so inserting another finder before it + may prevent it from working. + + It relies on being able to retrieve the source code via at least one other existing finder in the meta path, + so it may not work if standard finders are not present or if the source code is not available. + A modified version of the source code is then compiled and executed in place of the original module. + + Args: + modules: List of module names to trace, or a function which returns True for modules that should be traced. + If a list is provided, any submodules within a given module will also be traced. + check_imported_modules: If this is `'error'` (the default), then an exception will be raised if any of the + modules in `sys.modules` (i.e. modules that have already been imported) match the modules to trace. + Set to `'warn'` to issue a warning instead, or `'ignore'` to skip the check. + min_duration: An optional minimum duration in seconds for which a function must run before it's traced. + The default is `0`, which means all functions are traced from the beginning. + Otherwise, the first time(s) each function is called, it will be timed but not traced. + Only after the function has run for at least `min_duration` will it be traced in subsequent calls. + """ + def instrument_fastapi(self, app: FastAPI, *, request_attributes_mapper: Callable[[Request | WebSocket, dict[str, Any]], dict[str, Any] | None] | None = None, use_opentelemetry_instrumentation: bool = True, excluded_urls: str | Iterable[str] | None = None, **opentelemetry_kwargs: Any) -> ContextManager[None]: + """Instrument a FastAPI app so that spans and logs are automatically created for each request. + + Args: + app: The FastAPI app to instrument. + request_attributes_mapper: A function that takes a [`Request`][fastapi.Request] or [`WebSocket`][fastapi.WebSocket] + and a dictionary of attributes and returns a new dictionary of attributes. + The input dictionary will contain: + + - `values`: A dictionary mapping argument names of the endpoint function to parsed and validated values. + - `errors`: A list of validation errors for any invalid inputs. + + The returned dictionary will be used as the attributes for a log message. + If `None` is returned, no log message will be created. + + You can use this to e.g. only log validation errors, or nothing at all. + You can also add custom attributes. + + The default implementation will return the input dictionary unchanged. + The function mustn't modify the contents of `values` or `errors`. + excluded_urls: A string of comma-separated regexes which will exclude a request from tracing if the full URL + matches any of the regexes. This applies to both the Logfire and OpenTelemetry instrumentation. + If not provided, the environment variables + `OTEL_PYTHON_FASTAPI_EXCLUDED_URLS` and `OTEL_PYTHON_EXCLUDED_URLS` will be checked. + use_opentelemetry_instrumentation: If True (the default) then + [`FastAPIInstrumentor`][opentelemetry.instrumentation.fastapi.FastAPIInstrumentor] + will also instrument the app. + + See [OpenTelemetry FastAPI Instrumentation](https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/fastapi/fastapi.html). + opentelemetry_kwargs: Additional keyword arguments to pass to the OpenTelemetry FastAPI instrumentation. + + Returns: + A context manager that will revert the instrumentation when exited. + This context manager doesn't take into account threads or other concurrency. + Calling this method will immediately apply the instrumentation + without waiting for the context manager to be opened, + i.e. it's not necessary to use this as a context manager. + """ + def instrument_openai(self, openai_client: openai.OpenAI | openai.AsyncOpenAI | type[openai.OpenAI] | type[openai.AsyncOpenAI] | None = None, *, suppress_other_instrumentation: bool = True) -> ContextManager[None]: + """Instrument an OpenAI client so that spans are automatically created for each request. + + The following methods are instrumented for both the sync and the async clients: + + - [`client.chat.completions.create`](https://platform.openai.com/docs/guides/text-generation/chat-completions-api) — with and without `stream=True` + - [`client.completions.create`](https://platform.openai.com/docs/guides/text-generation/completions-api) — with and without `stream=True` + - [`client.embeddings.create`](https://platform.openai.com/docs/guides/embeddings/how-to-get-embeddings) + - [`client.images.generate`](https://platform.openai.com/docs/guides/images/generations) + + When `stream=True` a second span is created to instrument the streamed response. + + Example usage: + + ```python + import logfire + import openai + + client = openai.OpenAI() + logfire.configure() + logfire.instrument_openai(client) + + response = client.chat.completions.create( + model='gpt-4', + messages=[ + {'role': 'system', 'content': 'You are a helpful assistant.'}, + {'role': 'user', 'content': 'What is four plus five?'}, + ], + ) + print('answer:', response.choices[0].message.content) + ``` + + Args: + openai_client: The OpenAI client or class to instrument: + + - `None` (the default) to instrument both the `openai.OpenAI` and `openai.AsyncOpenAI` classes. + - The `openai.OpenAI` class or a subclass + - The `openai.AsyncOpenAI` class or a subclass + - An instance of `openai.OpenAI` + - An instance of `openai.AsyncOpenAI` + + suppress_other_instrumentation: If True, suppress any other OTEL instrumentation that may be otherwise + enabled. In reality, this means the HTTPX instrumentation, which could otherwise be called since + OpenAI uses HTTPX to make HTTP requests. + + Returns: + A context manager that will revert the instrumentation when exited. + Use of this context manager is optional. + """ + def instrument_anthropic(self, anthropic_client: anthropic.Anthropic | anthropic.AsyncAnthropic | type[anthropic.Anthropic] | type[anthropic.AsyncAnthropic] | None = None, *, suppress_other_instrumentation: bool = True) -> ContextManager[None]: + """Instrument an Anthropic client so that spans are automatically created for each request. + + The following methods are instrumented for both the sync and the async clients: + + - [`client.messages.create`](https://docs.anthropic.com/en/api/messages) + - [`client.messages.stream`](https://docs.anthropic.com/en/api/messages-streaming) + - [`client.beta.tools.messages.create`](https://docs.anthropic.com/en/docs/tool-use) + + When `stream=True` a second span is created to instrument the streamed response. + + Example usage: + + ```python + import logfire + import anthropic + + client = anthropic.Anthropic() + logfire.configure() + logfire.instrument_anthropic(client) + + response = client.messages.create( + model='claude-3-haiku-20240307', + system='You are a helpful assistant.', + messages=[ + {'role': 'user', 'content': 'What is four plus five?'}, + ], + ) + print('answer:', response.content[0].text) + ``` + + Args: + anthropic_client: The Anthropic client or class to instrument: + + - `None` (the default) to instrument both the + `anthropic.Anthropic` and `anthropic.AsyncAnthropic` classes. + - The `anthropic.Anthropic` class or a subclass + - The `anthropic.AsyncAnthropic` class or a subclass + - An instance of `anthropic.Anthropic` + - An instance of `anthropic.AsyncAnthropic` + + suppress_other_instrumentation: If True, suppress any other OTEL instrumentation that may be otherwise + enabled. In reality, this means the HTTPX instrumentation, which could otherwise be called since + OpenAI uses HTTPX to make HTTP requests. + + Returns: + A context manager that will revert the instrumentation when exited. + Use of this context manager is optional. + """ + def instrument_asyncpg(self): + """Instrument the `asyncpg` module so that spans are automatically created for each query.""" + def instrument_httpx(self, **kwargs: Any): + """Instrument the `httpx` module so that spans are automatically created for each request. + + Uses the + [OpenTelemetry HTTPX Instrumentation](https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/httpx/httpx.html) + library, specifically `HTTPXClientInstrumentor().instrument()`, to which it passes `**kwargs`. + """ + def instrument_django(self, is_sql_commentor_enabled: bool | None = None, request_hook: Callable[[Span, HttpRequest], None] | None = None, response_hook: Callable[[Span, HttpRequest, HttpResponse], None] | None = None, excluded_urls: str | None = None, **kwargs: Any) -> None: + """Instrument `django` so that spans are automatically created for each web request. + + Uses the + [OpenTelemetry Django Instrumentation](https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/django/django.html) + library. + + Args: + is_sql_commentor_enabled: Adds comments to SQL queries performed by Django, + so that database logs have additional context. + + This does NOT create spans/logs for the queries themselves. + For that you need to instrument the database driver, e.g. with `logfire.instrument_psycopg()`. + + To configure the SQL Commentor, see the OpenTelemetry documentation for the + values that need to be added to `settings.py`. + + request_hook: A function called right after a span is created for a request. + The function should accept two arguments: the span and the Django `Request` object. + + response_hook: A function called right before a span is finished for the response. + The function should accept three arguments: + the span, the Django `Request` object, and the Django `Response` object. + + excluded_urls: A string containing a comma-delimited list of regexes used to exclude URLs from tracking. + + **kwargs: Additional keyword arguments to pass to the OpenTelemetry `instrument` method, + for future compatibility. + + """ + def instrument_requests(self, excluded_urls: str | None = None, **kwargs: Any): + """Instrument the `requests` module so that spans are automatically created for each request. + + Args: + excluded_urls: A string containing a comma-delimited list of regexes used to exclude URLs from tracking + **kwargs: Additional keyword arguments to pass to the OpenTelemetry `instrument` methods, + particularly `request_hook` and `response_hook`. + """ + def instrument_psycopg(self, conn_or_module: Any = None, **kwargs: Any): + """Instrument a `psycopg` connection or module so that spans are automatically created for each query. + + Uses the OpenTelemetry instrumentation libraries for + [`psycopg`](https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/psycopg/psycopg.html) + and + [`psycopg2`](https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/psycopg2/psycopg2.html). + + Args: + conn_or_module: Can be: + + - The `psycopg` (version 3) or `psycopg2` module. + - The string `'psycopg'` or `'psycopg2'` to instrument the module. + - `None` (the default) to instrument whichever module(s) are installed. + - A `psycopg` or `psycopg2` connection. + + **kwargs: Additional keyword arguments to pass to the OpenTelemetry `instrument` methods, + particularly `enable_commenter` and `commenter_options`. + """ + def instrument_flask(self, app: Flask, **kwargs: Any): + """Instrument `app` so that spans are automatically created for each request. + + Uses the + [OpenTelemetry Flask Instrumentation](https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/flask/flask.html) + library, specifically `FlaskInstrumentor().instrument_app()`, to which it passes `**kwargs`. + """ + def instrument_starlette(self, app: Starlette, **kwargs: Any): + """Instrument `app` so that spans are automatically created for each request. + + Uses the + [OpenTelemetry Starlette Instrumentation](https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/starlette/starlette.html) + library, specifically `StarletteInstrumentor.instrument_app()`, to which it passes `**kwargs`. + """ + def instrument_aiohttp_client(self, **kwargs: Any): + """Instrument the `aiohttp` module so that spans are automatically created for each client request. + + Uses the + [OpenTelemetry aiohttp client Instrumentation](https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/aiohttp_client/aiohttp_client.html) + library, specifically `AioHttpClientInstrumentor().instrument()`, to which it passes `**kwargs`. + """ + def instrument_sqlalchemy(self, **kwargs: Any): + """Instrument the `sqlalchemy` module so that spans are automatically created for each query. + + Uses the + [OpenTelemetry SQLAlchemy Instrumentation](https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/sqlalchemy/sqlalchemy.html) + library, specifically `SQLAlchemyInstrumentor().instrument()`, to which it passes `**kwargs`. + """ + def instrument_pymongo(self, **kwargs: Any): + """Instrument the `pymongo` module so that spans are automatically created for each operation. + + Uses the + [OpenTelemetry pymongo Instrumentation](https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/pymongo/pymongo.html) + library, specifically `PymongoInstrumentor().instrument()`, to which it passes `**kwargs`. + """ + def instrument_redis(self, **kwargs: Any): + """Instrument the `redis` module so that spans are automatically created for each operation. + + Uses the + [OpenTelemetry Redis Instrumentation](https://opentelemetry-python-contrib.readthedocs.io/en/latest/instrumentation/redis/redis.html) + library, specifically `RedisInstrumentor().instrument()`, to which it passes `**kwargs`. + """ + def metric_counter(self, name: str, *, unit: str = '', description: str = '') -> Counter: + """Create a counter metric. + + A counter is a cumulative metric that represents a single numerical value that only ever goes up. + + ```py + import logfire + + logfire.configure() + counter = logfire.metric_counter('exceptions', unit='1', description='Number of exceptions caught') + + try: + raise Exception('oops') + except Exception: + counter.add(1) + ``` + + See the [Opentelemetry documentation](https://opentelemetry.io/docs/specs/otel/metrics/api/#counter) about + counters. + + Args: + name: The name of the metric. + unit: The unit of the metric. + description: The description of the metric. + + Returns: + The counter metric. + """ + def metric_histogram(self, name: str, *, unit: str = '', description: str = '') -> Histogram: + """Create a histogram metric. + + A histogram is a metric that samples observations (usually things like request durations or response sizes). + + ```py + import logfire + + logfire.configure() + histogram = logfire.metric_histogram('bank.amount_transferred', unit='$', description='Amount transferred') + + + def transfer(amount: int): + histogram.record(amount) + ``` + + See the [Opentelemetry documentation](https://opentelemetry.io/docs/specs/otel/metrics/api/#histogram) about + + Args: + name: The name of the metric. + unit: The unit of the metric. + description: The description of the metric. + + Returns: + The histogram metric. + """ + def metric_gauge(self, name: str, *, unit: str = '', description: str = '') -> Gauge: + """Create a gauge metric. + + Gauge is a synchronous instrument which can be used to record non-additive measurements. + + ```py + import logfire + + logfire.configure() + gauge = logfire.metric_gauge('system.cpu_usage', unit='%', description='CPU usage') + + + def update_cpu_usage(cpu_percent): + gauge.set(cpu_percent) + ``` + + See the [Opentelemetry documentation](https://opentelemetry.io/docs/specs/otel/metrics/api/#gauge) about gauges. + + Args: + name: The name of the metric. + unit: The unit of the metric. + description: The description of the metric. + + Returns: + The gauge metric. + """ + def metric_up_down_counter(self, name: str, *, unit: str = '', description: str = '') -> UpDownCounter: + """Create an up-down counter metric. + + An up-down counter is a cumulative metric that represents a single numerical value that can be adjusted up or + down. + + ```py + import logfire + + logfire.configure() + up_down_counter = logfire.metric_up_down_counter('users.logged_in', unit='1', description='Users logged in') + + + def on_login(user): + up_down_counter.add(1) + + + def on_logout(user): + up_down_counter.add(-1) + ``` + + See the [Opentelemetry documentation](https://opentelemetry.io/docs/specs/otel/metrics/api/#updowncounter) about + up-down counters. + + Args: + name: The name of the metric. + unit: The unit of the metric. + description: The description of the metric. + + Returns: + The up-down counter metric. + """ + def metric_counter_callback(self, name: str, *, callbacks: Sequence[CallbackT], unit: str = '', description: str = '') -> None: + """Create a counter metric that uses a callback to collect observations. + + The counter metric is a cumulative metric that represents a single numerical value that only ever goes up. + + ```py + import logfire + import psutil + from opentelemetry.metrics import CallbackOptions, Observation + + logfire.configure() + + + def cpu_usage_callback(options: CallbackOptions): + cpu_percents = psutil.cpu_percent(percpu=True) + + for i, cpu_percent in enumerate(cpu_percents): + yield Observation(cpu_percent, {'cpu': i}) + + + cpu_usage_counter = logfire.metric_counter_callback( + 'system.cpu.usage', + callbacks=[cpu_usage_callback], + unit='%', + description='CPU usage', + ) + ``` + + See the [Opentelemetry documentation](https://opentelemetry.io/docs/specs/otel/metrics/api/#asynchronous-counter) + about asynchronous counter. + + Args: + name: The name of the metric. + callbacks: A sequence of callbacks that return an iterable of + [Observation](https://opentelemetry-python.readthedocs.io/en/latest/api/metrics.html#opentelemetry.metrics.Observation). + unit: The unit of the metric. + description: The description of the metric. + """ + def metric_gauge_callback(self, name: str, callbacks: Sequence[CallbackT], *, unit: str = '', description: str = '') -> None: + """Create a gauge metric that uses a callback to collect observations. + + The gauge metric is a metric that represents a single numerical value that can arbitrarily go up and down. + + ```py + import threading + + import logfire + from opentelemetry.metrics import CallbackOptions, Observation + + logfire.configure() + + + def thread_count_callback(options: CallbackOptions): + yield Observation(threading.active_count()) + + + logfire.metric_gauge_callback( + 'system.thread_count', + callbacks=[thread_count_callback], + unit='1', + description='Number of threads', + ) + ``` + + See the [Opentelemetry documentation](https://opentelemetry.io/docs/specs/otel/metrics/api/#asynchronous-gauge) + about asynchronous gauge. + + Args: + name: The name of the metric. + callbacks: A sequence of callbacks that return an iterable of + [Observation](https://opentelemetry-python.readthedocs.io/en/latest/api/metrics.html#opentelemetry.metrics.Observation). + unit: The unit of the metric. + description: The description of the metric. + """ + def metric_up_down_counter_callback(self, name: str, callbacks: Sequence[CallbackT], *, unit: str = '', description: str = '') -> None: + """Create an up-down counter metric that uses a callback to collect observations. + + The up-down counter is a cumulative metric that represents a single numerical value that can be adjusted up or + down. + + ```py + import logfire + from opentelemetry.metrics import CallbackOptions, Observation + + logfire.configure() + + items = [] + + + def inventory_callback(options: CallbackOptions): + yield Observation(len(items)) + + + logfire.metric_up_down_counter_callback( + name='store.inventory', + description='Number of items in the inventory', + callbacks=[inventory_callback], + ) + ``` + + See the [Opentelemetry documentation](https://opentelemetry.io/docs/specs/otel/metrics/api/#asynchronous-updowncounter) + about asynchronous up-down counters. + + Args: + name: The name of the metric. + callbacks: A sequence of callbacks that return an iterable of + [Observation](https://opentelemetry-python.readthedocs.io/en/latest/api/metrics.html#opentelemetry.metrics.Observation). + unit: The unit of the metric. + description: The description of the metric. + """ + def shutdown(self, timeout_millis: int = 30000, flush: bool = True) -> bool: + """Shut down all tracers and meters. + + This will clean up any resources used by the tracers and meters and flush any remaining spans and metrics. + + Args: + timeout_millis: The timeout in milliseconds. + flush: Whether to flush remaining spans and metrics before shutting down. + + Returns: + `False` if the timeout was reached before the shutdown was completed, `True` otherwise. + """ + +class FastLogfireSpan: + """A simple version of `LogfireSpan` optimized for auto-tracing.""" + def __init__(self, span: trace_api.Span) -> None: ... + def __enter__(self) -> FastLogfireSpan: ... + def __exit__(self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: Any) -> None: ... + +class LogfireSpan(ReadableSpan): + end_on_exit: bool + def __init__(self, span_name: str, otlp_attributes: dict[str, otel_types.AttributeValue], tracer: Tracer, json_schema_properties: JsonSchemaProperties) -> None: ... + def __getattr__(self, name: str) -> Any: ... + def __enter__(self) -> LogfireSpan: ... + def __exit__(self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: Any) -> None: ... + @property + def message_template(self) -> str | None: ... + @property + def tags(self) -> Sequence[str]: ... + @property + def message(self) -> str: ... + @message.setter + def message(self, message: str): ... + def end(self) -> None: + """Sets the current time as the span's end time. + + The span's end time is the wall time at which the operation finished. + + Only the first call to this method is recorded, further calls are ignored so you + can call this within the span's context manager to end it before the context manager + exits. + """ + def set_attribute(self, key: str, value: Any) -> None: + """Sets an attribute on the span. + + Args: + key: The key of the attribute. + value: The value of the attribute. + """ + def set_attributes(self, attributes: dict[str, otel_types.AttributeValue]) -> None: + """Sets the given attributes on the span.""" + def record_exception(self, exception: BaseException, attributes: otel_types.Attributes = None, timestamp: int | None = None, escaped: bool = False) -> None: + """Records an exception as a span event. + + Delegates to the OpenTelemetry SDK `Span.record_exception` method. + """ + def is_recording(self) -> bool: ... + def set_level(self, level: LevelName | int): + """Set the log level of this span.""" + +class NoopSpan: + """Implements the same methods as `LogfireSpan` but does nothing. + + Used in place of `LogfireSpan` and `FastLogfireSpan` when an exception occurs during span creation. + This way code like: + + with logfire.span(...) as span: + span.set_attribute(...) + + doesn't raise an error even if `logfire.span` fails internally. + If `logfire.span` just returned `None` then the `with` block and the `span.set_attribute` call would raise an error. + + TODO this should also be used when tracing is disabled, e.g. before `logfire.configure()` has been called. + """ + def __init__(self, /, *_args: Any, **__kwargs: Any) -> None: ... + def __getattr__(self, _name: str) -> Any: ... + def __enter__(self) -> NoopSpan: ... + def __exit__(self, exc_type: type[BaseException] | None, exc_value: BaseException | None, traceback: Any) -> None: ... + @property + def message_template(self) -> str: ... + @property + def tags(self) -> Sequence[str]: ... + @property + def message(self) -> str: ... + @message.setter + def message(self, message: str): ... + def is_recording(self) -> bool: ... +AttributesValueType = TypeVar('AttributesValueType', bound=Any | otel_types.AttributeValue) + +def user_attributes(attributes: dict[str, Any]) -> dict[str, otel_types.AttributeValue]: + """Prepare attributes for sending to OpenTelemetry. + + This will convert any non-OpenTelemetry compatible types to JSON. + """ +def set_user_attribute(otlp_attributes: dict[str, otel_types.AttributeValue], key: str, value: Any) -> tuple[str, otel_types.AttributeValue]: + """Convert a user attribute to an OpenTelemetry compatible type and add it to the given dictionary. + + Returns the final key and value that was added to the dictionary. + The key will be the original key unless the value was `None`, in which case it will be `NULL_ARGS_KEY`. + """ diff --git a/logfire-api/logfire_api/_internal/metrics.pyi b/logfire-api/logfire_api/_internal/metrics.pyi new file mode 100644 index 00000000..aa76cc20 --- /dev/null +++ b/logfire-api/logfire_api/_internal/metrics.pyi @@ -0,0 +1,67 @@ +import dataclasses +from _typeshed import Incomplete +from abc import ABC +from opentelemetry.metrics import CallbackT as CallbackT, Counter, Histogram, Instrument, Meter, MeterProvider, ObservableCounter, ObservableGauge, ObservableUpDownCounter, UpDownCounter, _Gauge +from opentelemetry.util.types import Attributes +from threading import Lock +from typing import Generic, Sequence, TypeVar +from weakref import WeakSet + +Gauge: Incomplete +CPU_FIELDS: Incomplete +MEMORY_FIELDS: Incomplete +DEFAULT_CONFIG: Incomplete +INSTRUMENTOR: Incomplete + +def configure_metrics(meter_provider: MeterProvider) -> None: ... + +@dataclasses.dataclass +class ProxyMeterProvider(MeterProvider): + provider: MeterProvider + meters: WeakSet[_ProxyMeter] = ... + lock: Lock = ... + def get_meter(self, name: str, version: str | None = None, schema_url: str | None = None) -> Meter: ... + def set_meter_provider(self, meter_provider: MeterProvider) -> None: ... + def shutdown(self, timeout_millis: float = 30000) -> None: ... + def force_flush(self, timeout_millis: float = 30000) -> None: ... + +class _ProxyMeter(Meter): + def __init__(self, meter: Meter, name: str, version: str | None, schema_url: str | None) -> None: ... + def set_meter(self, meter_provider: MeterProvider) -> None: + """Called when a real meter provider is set on the creating _ProxyMeterProvider. + + Creates a real backing meter for this instance and notifies all created + instruments so they can create real backing instruments. + """ + def create_counter(self, name: str, unit: str = '', description: str = '') -> Counter: ... + def create_up_down_counter(self, name: str, unit: str = '', description: str = '') -> UpDownCounter: ... + def create_observable_counter(self, name: str, callbacks: Sequence[CallbackT] | None = None, unit: str = '', description: str = '') -> ObservableCounter: ... + def create_histogram(self, name: str, unit: str = '', description: str = '') -> Histogram: ... + def create_gauge(self, name: str, unit: str = '', description: str = '') -> _Gauge: ... + def create_observable_gauge(self, name: str, callbacks: Sequence[CallbackT] | None = None, unit: str = '', description: str = '') -> ObservableGauge: ... + def create_observable_up_down_counter(self, name: str, callbacks: Sequence[CallbackT] | None = None, unit: str = '', description: str = '') -> ObservableUpDownCounter: ... +InstrumentT = TypeVar('InstrumentT', bound=Instrument) + +class _ProxyInstrument(ABC, Generic[InstrumentT]): + def __init__(self, instrument: InstrumentT, name: str, unit: str, description: str) -> None: ... + def on_meter_set(self, meter: Meter) -> None: + """Called when a real meter is set on the creating _ProxyMeter.""" + +class _ProxyAsynchronousInstrument(_ProxyInstrument[InstrumentT], ABC): + def __init__(self, instrument: InstrumentT, name: str, callbacks: Sequence[CallbackT] | None, unit: str, description: str) -> None: ... + +class _ProxyCounter(_ProxyInstrument[Counter], Counter): + def add(self, amount: int | float, attributes: Attributes | None = None) -> None: ... + +class _ProxyHistogram(_ProxyInstrument[Histogram], Histogram): + def record(self, amount: int | float, attributes: Attributes | None = None) -> None: ... + +class _ProxyObservableCounter(_ProxyAsynchronousInstrument[ObservableCounter], ObservableCounter): ... +class _ProxyObservableGauge(_ProxyAsynchronousInstrument[ObservableGauge], ObservableGauge): ... +class _ProxyObservableUpDownCounter(_ProxyAsynchronousInstrument[ObservableUpDownCounter], ObservableUpDownCounter): ... + +class _ProxyUpDownCounter(_ProxyInstrument[UpDownCounter], UpDownCounter): + def add(self, amount: int | float, attributes: Attributes | None = None) -> None: ... + +class _ProxyGauge(_ProxyInstrument[Gauge], Gauge): + def set(self, amount: int | float, attributes: Attributes | None = None) -> None: ... diff --git a/logfire-api/logfire_api/_internal/scrubbing.pyi b/logfire-api/logfire_api/_internal/scrubbing.pyi new file mode 100644 index 00000000..c3761d32 --- /dev/null +++ b/logfire-api/logfire_api/_internal/scrubbing.pyi @@ -0,0 +1,48 @@ +import re +from .constants import ATTRIBUTES_JSON_SCHEMA_KEY as ATTRIBUTES_JSON_SCHEMA_KEY, ATTRIBUTES_LOG_LEVEL_NAME_KEY as ATTRIBUTES_LOG_LEVEL_NAME_KEY, ATTRIBUTES_LOG_LEVEL_NUM_KEY as ATTRIBUTES_LOG_LEVEL_NUM_KEY, ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY, ATTRIBUTES_PENDING_SPAN_REAL_PARENT_KEY as ATTRIBUTES_PENDING_SPAN_REAL_PARENT_KEY, ATTRIBUTES_SAMPLE_RATE_KEY as ATTRIBUTES_SAMPLE_RATE_KEY, ATTRIBUTES_SPAN_TYPE_KEY as ATTRIBUTES_SPAN_TYPE_KEY, ATTRIBUTES_TAGS_KEY as ATTRIBUTES_TAGS_KEY, NULL_ARGS_KEY as NULL_ARGS_KEY, RESOURCE_ATTRIBUTES_PACKAGE_VERSIONS as RESOURCE_ATTRIBUTES_PACKAGE_VERSIONS +from .stack_info import STACK_INFO_KEYS as STACK_INFO_KEYS +from .utils import ReadableSpanDict as ReadableSpanDict +from _typeshed import Incomplete +from abc import ABC, abstractmethod +from dataclasses import dataclass +from opentelemetry.sdk.trace import Event +from typing import Any, Callable, Sequence + +DEFAULT_PATTERNS: Incomplete + +@dataclass +class ScrubMatch: + """An object passed to the [`scrubbing_callback`][logfire.configure(scrubbing_callback)] function.""" + path: tuple[str | int, ...] + value: Any + pattern_match: re.Match[str] +ScrubCallback = Callable[[ScrubMatch], Any] + +@dataclass +class ScrubbingOptions: + """Options for redacting sensitive data.""" + callback: ScrubCallback | None = ... + extra_patterns: Sequence[str] | None = ... + +class BaseScrubber(ABC): + SAFE_KEYS: Incomplete + @abstractmethod + def scrub_span(self, span: ReadableSpanDict): ... + @abstractmethod + def scrub(self, path: tuple[str | int, ...], value: Any) -> Any: ... + +class NoopScrubber(BaseScrubber): + def scrub_span(self, span: ReadableSpanDict): ... + def scrub(self, path: tuple[str | int, ...], value: Any) -> Any: ... + +class Scrubber(BaseScrubber): + """Redacts potentially sensitive data.""" + def __init__(self, patterns: Sequence[str] | None, callback: ScrubCallback | None = None) -> None: ... + def scrub_span(self, span: ReadableSpanDict): ... + def scrub_event_attributes(self, event: Event, index: int): ... + def scrub(self, path: tuple[str | int, ...], value: Any) -> Any: + """Redacts sensitive data from `value`, recursing into nested sequences and mappings. + + `path` is a list of keys and indices leading to `value` in the span. + Similar to the truncation code, it should use the field names in the frontend, e.g. `otel_events`. + """ diff --git a/logfire-api/logfire_api/_internal/stack_info.pyi b/logfire-api/logfire_api/_internal/stack_info.pyi new file mode 100644 index 00000000..a5df680b --- /dev/null +++ b/logfire-api/logfire_api/_internal/stack_info.pyi @@ -0,0 +1,39 @@ +from _typeshed import Incomplete +from types import CodeType, FrameType +from typing import TypedDict + +StackInfo = TypedDict('StackInfo', {'code.filepath': str, 'code.lineno': int, 'code.function': str}, total=False) +STACK_INFO_KEYS: Incomplete +SITE_PACKAGES_DIR: Incomplete +PYTHON_LIB_DIR: Incomplete +LOGFIRE_DIR: Incomplete +PREFIXES: Incomplete + +def get_filepath_attribute(file: str) -> StackInfo: ... +def get_code_object_info(code: CodeType) -> StackInfo: ... +def get_stack_info_from_frame(frame: FrameType) -> StackInfo: ... +def get_user_stack_info() -> StackInfo: + """Get the stack info for the first calling frame in user code. + + See is_user_code for details. + Returns an empty dict if no such frame is found. + """ +def get_user_frame_and_stacklevel() -> tuple[FrameType | None, int]: + """Get the first calling frame in user code and a corresponding stacklevel that can be passed to `warnings.warn`. + + See is_user_code for details. + Returns `(None, 0)` if no such frame is found. + """ +def is_user_code(code: CodeType) -> bool: + """Check if the code object is from user code. + + A code object is not user code if: + - It is from a file in + - the standard library + - site-packages (specifically wherever opentelemetry is installed) + - the logfire package + - It is a list/dict/set comprehension. + These are artificial frames only created before Python 3.12, + and they are always called directly from the enclosing function so it makes sense to skip them. + On the other hand, generator expressions and lambdas might be called far away from where they are defined. + """ diff --git a/logfire-api/logfire_api/_internal/tracer.pyi b/logfire-api/logfire_api/_internal/tracer.pyi new file mode 100644 index 00000000..448b0195 --- /dev/null +++ b/logfire-api/logfire_api/_internal/tracer.pyi @@ -0,0 +1,75 @@ +import opentelemetry.trace as trace_api +from .config import LogfireConfig as LogfireConfig +from .constants import ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_PENDING_SPAN_REAL_PARENT_KEY as ATTRIBUTES_PENDING_SPAN_REAL_PARENT_KEY, ATTRIBUTES_SAMPLE_RATE_KEY as ATTRIBUTES_SAMPLE_RATE_KEY, ATTRIBUTES_SPAN_TYPE_KEY as ATTRIBUTES_SPAN_TYPE_KEY, PENDING_SPAN_NAME_SUFFIX as PENDING_SPAN_NAME_SUFFIX +from dataclasses import dataclass +from opentelemetry import context as context_api +from opentelemetry.context import Context +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import ReadableSpan, SpanProcessor, TracerProvider as SDKTracerProvider +from opentelemetry.sdk.trace.id_generator import IdGenerator +from opentelemetry.trace import Link as Link, Span, SpanContext, SpanKind, Tracer, TracerProvider +from opentelemetry.trace.status import Status, StatusCode +from opentelemetry.util import types as otel_types +from threading import Lock +from typing import Any, Callable, Mapping, Sequence +from weakref import WeakKeyDictionary + +@dataclass +class ProxyTracerProvider(TracerProvider): + """A tracer provider that wraps another internal tracer provider allowing it to be re-assigned.""" + provider: TracerProvider + config: LogfireConfig + tracers: WeakKeyDictionary[_ProxyTracer, Callable[[], Tracer]] = ... + lock: Lock = ... + def set_provider(self, provider: SDKTracerProvider) -> None: ... + def get_tracer(self, instrumenting_module_name: str, instrumenting_library_version: str | None = None, schema_url: str | None = None, is_span_tracer: bool = True) -> _ProxyTracer: ... + def add_span_processor(self, span_processor: Any) -> None: ... + def shutdown(self) -> None: ... + @property + def resource(self) -> Resource: ... + def force_flush(self, timeout_millis: int = 30000) -> bool: ... + +@dataclass +class _MaybeDeterministicTimestampSpan(trace_api.Span, ReadableSpan): + """Span that overrides end() to use a timestamp generator if one was provided.""" + span: Span + ns_timestamp_generator: Callable[[], int] + def end(self, end_time: int | None = None) -> None: ... + def get_span_context(self) -> SpanContext: ... + def set_attributes(self, attributes: dict[str, otel_types.AttributeValue]) -> None: ... + def set_attribute(self, key: str, value: otel_types.AttributeValue) -> None: ... + def add_event(self, name: str, attributes: otel_types.Attributes = None, timestamp: int | None = None) -> None: ... + def update_name(self, name: str) -> None: ... + def is_recording(self) -> bool: ... + def set_status(self, status: Status | StatusCode, description: str | None = None) -> None: ... + def record_exception(self, exception: BaseException, attributes: otel_types.Attributes = None, timestamp: int | None = None, escaped: bool = False) -> None: ... + def __getattr__(self, name: str) -> Any: ... + +@dataclass +class _ProxyTracer(Tracer): + """A tracer that wraps another internal tracer allowing it to be re-assigned.""" + tracer: Tracer + provider: ProxyTracerProvider + is_span_tracer: bool + def __hash__(self) -> int: ... + def __eq__(self, other: object) -> bool: ... + def set_tracer(self, tracer: Tracer) -> None: ... + def start_span(self, name: str, context: Context | None = None, kind: SpanKind = ..., attributes: otel_types.Attributes = None, links: Sequence[Link] | None = None, start_time: int | None = None, record_exception: bool = True, set_status_on_exception: bool = True) -> Span: ... + start_as_current_span = ... + +@dataclass +class PendingSpanProcessor(SpanProcessor): + """Span processor that emits an extra pending span for each span as it starts. + + The pending span is emitted by calling `on_end` on all other processors. + """ + id_generator: IdGenerator + other_processors: tuple[SpanProcessor, ...] + def on_start(self, span: Span, parent_context: context_api.Context | None = None) -> None: ... + +def should_sample(span_context: SpanContext, attributes: Mapping[str, otel_types.AttributeValue]) -> bool: + """Determine if a span should be sampled. + + This is used to sample spans that are not sampled by the OTEL sampler. + """ +def get_sample_rate_from_attributes(attributes: otel_types.Attributes) -> float | None: ... diff --git a/logfire-api/logfire_api/_internal/utils.pyi b/logfire-api/logfire_api/_internal/utils.pyi new file mode 100644 index 00000000..348281a3 --- /dev/null +++ b/logfire-api/logfire_api/_internal/utils.pyi @@ -0,0 +1,85 @@ +from _typeshed import Incomplete +from collections.abc import Generator +from opentelemetry import trace as trace_api +from opentelemetry.sdk.resources import Resource +from opentelemetry.sdk.trace import Event as Event, ReadableSpan +from opentelemetry.sdk.util.instrumentation import InstrumentationScope +from opentelemetry.trace.status import Status +from opentelemetry.util import types as otel_types +from packaging.version import Version +from pathlib import Path +from requests import RequestException, Response +from typing import Any, Mapping, Sequence, TypeVar, TypedDict + +T = TypeVar('T') +JsonValue: Incomplete +JsonDict = dict[str, JsonValue] + +def dump_json(obj: JsonValue) -> str: ... + +logger: Incomplete + +def uniquify_sequence(seq: Sequence[T]) -> tuple[T, ...]: + """Remove duplicates from a sequence preserving order.""" +def safe_repr(obj: Any) -> str: + """Return some kind of non-empty string representation of an object, catching exceptions.""" +Truncatable = TypeVar('Truncatable', str, bytes, 'list[Any]', 'tuple[Any, ...]') + +def truncate_string(s: str, *, max_length: int, middle: str = '...') -> str: + """Return a string at most max_length characters long, with `middle` in the middle if truncated.""" +def truncate_sequence(seq: Truncatable, *, max_length: int, middle: Truncatable) -> Truncatable: + """Return a sequence at with `len()` at most `max_length`, with `middle` in the middle if truncated.""" +def read_toml_file(path: Path) -> dict[str, Any]: + """Read a TOML file and return the parsed data. + + It wraps the `tomllib.load` function from Python 3.11 or the `tomli.load` function from older versions. + """ + +class ReadableSpanDict(TypedDict): + """A dictionary representation of a ReadableSpan. + + ReadableSpan is immutable, so making modified versions of it is inconvenient and slow. + Converting a ReadableSpan to a ReadableSpanDict using span_to_dict makes it easier to modify. + See `SpanProcessorWrapper.on_end` for an example of how this is useful. + """ + name: str + context: trace_api.SpanContext | None + parent: trace_api.SpanContext | None + resource: Resource | None + attributes: Mapping[str, otel_types.AttributeValue] + events: Sequence[Event] + links: Sequence[trace_api.Link] + kind: trace_api.SpanKind + status: Status + start_time: int | None + end_time: int | None + instrumentation_scope: InstrumentationScope | None + +def span_to_dict(span: ReadableSpan) -> ReadableSpanDict: + """See ReadableSpanDict.""" + +class UnexpectedResponse(RequestException): + """An unexpected response was received from the server.""" + def __init__(self, response: Response) -> None: ... + @classmethod + def raise_for_status(cls, response: Response) -> None: + """Like the requests method, but raises a more informative exception.""" + +def ensure_data_dir_exists(data_dir: Path) -> None: ... +def get_version(version: str) -> Version: + """Return a packaging.version.Version object from a version string. + + We check if `packaging` is available, falling back to `setuptools._vendor.packaging` if it's not. + """ + +SUPPRESS_INSTRUMENTATION_CONTEXT_KEYS: Incomplete + +def is_instrumentation_suppressed() -> bool: + """Return True if the `suppress_instrumentation` context manager is currently active. + + This means that any logs/spans generated by logfire or OpenTelemetry will not be logged in any way. + """ +def suppress_instrumentation() -> Generator[None, None, None]: + """Context manager to suppress all logs/spans generated by logfire or OpenTelemetry.""" +def log_internal_error() -> None: ... +def handle_internal_errors() -> Generator[None, None, None]: ... diff --git a/logfire-api/logfire_api/cli.pyi b/logfire-api/logfire_api/cli.pyi new file mode 100644 index 00000000..911dad07 --- /dev/null +++ b/logfire-api/logfire_api/cli.pyi @@ -0,0 +1,3 @@ +from ._internal.cli import main as main + +__all__ = ['main'] diff --git a/logfire-api/logfire_api/exceptions.pyi b/logfire-api/logfire_api/exceptions.pyi new file mode 100644 index 00000000..c4d34738 --- /dev/null +++ b/logfire-api/logfire_api/exceptions.pyi @@ -0,0 +1,2 @@ +class LogfireConfigError(ValueError): + """Error raised when there is a problem with the Logfire configuration.""" diff --git a/logfire-api/logfire_api/integrations/__init__.pyi b/logfire-api/logfire_api/integrations/__init__.pyi new file mode 100644 index 00000000..e69de29b diff --git a/logfire-api/logfire_api/integrations/logging.pyi b/logfire-api/logfire_api/integrations/logging.pyi new file mode 100644 index 00000000..3a75b2b2 --- /dev/null +++ b/logfire-api/logfire_api/integrations/logging.pyi @@ -0,0 +1,30 @@ +from .._internal.constants import ATTRIBUTES_LOGGING_ARGS_KEY as ATTRIBUTES_LOGGING_ARGS_KEY, ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY, LOGGING_TO_OTEL_LEVEL_NUMBERS as LOGGING_TO_OTEL_LEVEL_NUMBERS +from .._internal.utils import is_instrumentation_suppressed as is_instrumentation_suppressed +from _typeshed import Incomplete +from logging import Handler as LoggingHandler, LogRecord +from typing import Any, ClassVar + +RESERVED_ATTRS: frozenset[str] + +class LogfireLoggingHandler(LoggingHandler): + """A logging handler that sends logs to **Logfire**.""" + custom_scope_suffix: ClassVar[str] + fallback: Incomplete + def __init__(self, level: int | str = ..., fallback: LoggingHandler = ...) -> None: ... + def emit(self, record: LogRecord) -> None: + """Send the log to Logfire. + + Args: + record: The log record to send. + """ + def fill_attributes(self, record: LogRecord) -> dict[str, Any]: + """Fill the attributes to send to Logfire. + + This method can be overridden to add more attributes. + + Args: + record: The log record. + + Returns: + The attributes for the log record. + """ diff --git a/logfire-api/logfire_api/integrations/loguru.pyi b/logfire-api/logfire_api/integrations/loguru.pyi new file mode 100644 index 00000000..ce0a6152 --- /dev/null +++ b/logfire-api/logfire_api/integrations/loguru.pyi @@ -0,0 +1,19 @@ +from .._internal.constants import ATTRIBUTES_LOGGING_ARGS_KEY as ATTRIBUTES_LOGGING_ARGS_KEY, ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY +from .logging import LogfireLoggingHandler as LogfireLoggingHandler +from logging import LogRecord +from typing import Any + +class LogfireHandler(LogfireLoggingHandler): + """A loguru handler that sends logs to **Logfire**.""" + custom_scope_suffix: str + def fill_attributes(self, record: LogRecord) -> dict[str, Any]: + """Fill attributes from a log record. + + It filters out the 'extra' attribute and adds it's content to the attributes. + + Args: + record: The log record. + + Returns: + The attributes for the log record. + """ diff --git a/logfire-api/logfire_api/integrations/pydantic.pyi b/logfire-api/logfire_api/integrations/pydantic.pyi new file mode 100644 index 00000000..ef163fa1 --- /dev/null +++ b/logfire-api/logfire_api/integrations/pydantic.pyi @@ -0,0 +1,96 @@ +from .._internal.config import GLOBAL_CONFIG as GLOBAL_CONFIG, PydanticPlugin as PydanticPlugin +from .._internal.config_params import default_param_manager as default_param_manager +from .._internal.utils import get_version as get_version +from _typeshed import Incomplete +from dataclasses import dataclass +from logfire import LogfireSpan as LogfireSpan +from pydantic.plugin import SchemaKind, SchemaTypePath +from pydantic_core import CoreConfig, CoreSchema +from typing import Any, Literal, TypeVar, TypedDict +from typing_extensions import ParamSpec + +METER: Incomplete +validation_counter: Incomplete + +class PluginSettings(TypedDict, total=False): + """A typed dict for the Pydantic plugin settings. + + This is how you can use the [`PluginSettings`][logfire.integrations.pydantic.PluginSettings] + with a Pydantic model: + + ```py + from logfire.integrations.pydantic import PluginSettings + from pydantic import BaseModel + + + class Model(BaseModel, plugin_settings=PluginSettings(logfire={'record': 'all'})): + a: int + ``` + """ + logfire: LogfireSettings + +class LogfireSettings(TypedDict, total=False): + """Settings for the logfire integration.""" + trace_sample_rate: float + tags: list[str] + record: Literal['all', 'failure', 'metrics'] + +class _ValidateWrapper: + """Decorator factory for one schema validator method.""" + validation_method: Incomplete + schema_name: Incomplete + def __init__(self, validation_method: Literal['validate_python', 'validate_json', 'validate_strings'], schema: CoreSchema, _config: CoreConfig | None, _plugin_settings: PluginSettings | dict[str, Any], schema_type_path: SchemaTypePath, record: Literal['all', 'failure', 'metrics']) -> None: ... + def __call__(self, validator: Any) -> Any: + """Decorator which wraps a schema validator method with instrumentation.""" + +def get_schema_name(schema: CoreSchema) -> str: + """Find the best name to use for a schema. + + The follow rules are used: + * If the schema represents a model or dataclass, use the name of the class. + * If the root schema is a wrap/before/after validator, look at its `schema` property. + * Otherwise use the schema's `type` property. + + Args: + schema: The schema to get the name for. + + Returns: + The name of the schema. + """ + +@dataclass +class LogfirePydanticPlugin: + '''Implements a new API for pydantic plugins. + + Patches Pydantic to accept this new API shape. + + Set the `LOGFIRE_PYDANTIC_RECORD` environment variable to `"off"` to disable the plugin, or + `PYDANTIC_DISABLE_PLUGINS` to `true` to disable all Pydantic plugins. + ''' + def new_schema_validator(self, *_: Any, **__: Any) -> tuple[_ValidateWrapper, ...] | tuple[None, ...]: + """Backwards compatibility for Pydantic < 2.5.0. + + This method is called every time a new `SchemaValidator` is created, and is a NO-OP for Pydantic < 2.5.0. + """ + def new_schema_validator(self, schema: CoreSchema, schema_type: Any, schema_type_path: SchemaTypePath, schema_kind: SchemaKind, config: CoreConfig | None, plugin_settings: dict[str, Any]) -> tuple[_ValidateWrapper, ...] | tuple[None, ...]: + """This method is called every time a new `SchemaValidator` is created. + + Args: + schema: The schema to validate against. + schema_type: The original type which the schema was created from, e.g. the model class. + schema_type_path: Path defining where `schema_type` was defined, or where `TypeAdapter` was called. + schema_kind: The kind of schema to validate against. + config: The config to use for validation. + plugin_settings: The plugin settings. + + Returns: + A tuple of decorator factories for each of the three validation methods - + `validate_python`, `validate_json`, `validate_strings` or a tuple of + three `None` if recording is `off`. + """ + +plugin: Incomplete +IGNORED_MODULES: tuple[str, ...] +IGNORED_MODULE_PREFIXES: tuple[str, ...] +P = ParamSpec('P') +R = TypeVar('R') diff --git a/logfire-api/logfire_api/integrations/structlog.pyi b/logfire-api/logfire_api/integrations/structlog.pyi new file mode 100644 index 00000000..d3b97334 --- /dev/null +++ b/logfire-api/logfire_api/integrations/structlog.pyi @@ -0,0 +1,12 @@ +from .._internal.constants import ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY +from _typeshed import Incomplete +from structlog.types import EventDict, WrappedLogger + +RESERVED_ATTRS: Incomplete + +class LogfireProcessor: + """Logfire processor for structlog.""" + console_log: Incomplete + def __init__(self, *, console_log: bool = False) -> None: ... + def __call__(self, logger: WrappedLogger, name: str, event_dict: EventDict) -> EventDict: + """A middleware to process structlog event, and send it to **Logfire**.""" diff --git a/logfire-api/logfire_api/propagate.pyi b/logfire-api/logfire_api/propagate.pyi new file mode 100644 index 00000000..d005eb9b --- /dev/null +++ b/logfire-api/logfire_api/propagate.pyi @@ -0,0 +1,41 @@ +from typing import Any, Iterator, Mapping + +__all__ = ['get_context', 'attach_context', 'ContextCarrier'] + +ContextCarrier = Mapping[str, Any] + +def get_context() -> ContextCarrier: + """Create a new empty carrier dict and inject context into it. + + Returns: + A new dict with the context injected into it. + + Usage: + + ```py + from logfire.propagate import get_context, attach_context + + logfire_context = get_context() + + ... + + # later on in another thread, process or service + with attach_context(logfire_context): + ... + ``` + + You could also inject context into an existing mapping like headers with: + + ```py + from logfire.propagate import get_context + + existing_headers = {'X-Foobar': 'baz'} + existing_headers.update(get_context()) + ... + ``` + """ +def attach_context(carrier: ContextCarrier) -> Iterator[None]: + """Attach a context as generated by [`get_context`][logfire.propagate.get_context] to the current execution context. + + Since `attach_context` is a context manager, it restores the previous context when exiting. + """ diff --git a/logfire-api/logfire_api/version.pyi b/logfire-api/logfire_api/version.pyi new file mode 100644 index 00000000..d0ab90f3 --- /dev/null +++ b/logfire-api/logfire_api/version.pyi @@ -0,0 +1,3 @@ +from _typeshed import Incomplete + +VERSION: Incomplete diff --git a/logfire-api/pyproject.toml b/logfire-api/pyproject.toml new file mode 100644 index 00000000..3ff89e16 --- /dev/null +++ b/logfire-api/pyproject.toml @@ -0,0 +1,31 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "logfire-api" +version = "0.45.1" +description = "Shim for the Logfire SDK which does nothing unless Logfire is installed" +authors = [ + { name = "Pydantic Team", email = "engineering@pydantic.dev" }, + { name = "Samuel Colvin", email = "samuel@pydantic.dev" }, + { name = "Hasan Ramezani", email = "hasan@pydantic.dev" }, + { name = "Adrian Garcia Badaracco", email = "adrian@pydantic.dev" }, + { name = "David Montague", email = "david@pydantic.dev" }, + { name = "Marcelo Trylesinski", email = "marcelo@pydantic.dev" }, + { name = "David Hewitt", email = "david.hewitt@pydantic.dev" }, + { name = "Alex Hall", email = "alex@pydantic.dev" }, +] +dependencies = [] +readme = "README.md" +requires-python = ">= 3.8" + +[tool.rye] +managed = true +dev-dependencies = [] + +[tool.hatch.metadata] +allow-direct-references = true + +[tool.hatch.build.targets.wheel] +packages = ["logfire_api"] diff --git a/logfire/__init__.py b/logfire/__init__.py index c5265610..bca9870e 100644 --- a/logfire/__init__.py +++ b/logfire/__init__.py @@ -6,12 +6,7 @@ from ._internal.auto_trace import AutoTraceModule from ._internal.auto_trace.rewrite_ast import no_auto_trace -from ._internal.config import ( - METRICS_PREFERRED_TEMPORALITY, - ConsoleOptions, - PydanticPlugin, - configure, -) +from ._internal.config import METRICS_PREFERRED_TEMPORALITY, ConsoleOptions, PydanticPlugin, configure from ._internal.constants import LevelName from ._internal.exporters.file import load_file as load_spans_from_file from ._internal.exporters.tail_sampling import TailSamplingOptions @@ -91,9 +86,10 @@ def loguru_handler() -> dict[str, Any]: 'span', 'instrument', 'log', - 'info', + 'trace', 'debug', 'notice', + 'info', 'warn', 'error', 'fatal', @@ -101,9 +97,24 @@ def loguru_handler() -> dict[str, Any]: 'log_slow_async_callbacks', 'install_auto_tracing', 'instrument_fastapi', + 'instrument_openai', + 'instrument_anthropic', + 'instrument_asyncpg', + 'instrument_httpx', + 'instrument_requests', + 'instrument_psycopg', + 'instrument_django', + 'instrument_flask', + 'instrument_starlette', + 'instrument_aiohttp_client', + 'instrument_sqlalchemy', + 'instrument_redis', + 'instrument_pymongo', 'AutoTraceModule', 'with_tags', + 'with_settings', # 'with_trace_sample_rate', + 'shutdown', 'load_spans_from_file', 'no_auto_trace', 'METRICS_PREFERRED_TEMPORALITY', diff --git a/pyproject.toml b/pyproject.toml index c8af803c..44bd2b49 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -133,6 +133,7 @@ dev-dependencies = [ "cloudpickle>=3.0.0", "anthropic>=0.27.0", "sqlmodel", + "mypy>=1.10.0", ] [tool.rye.scripts] @@ -140,6 +141,9 @@ typecheck = "pyright" docs = "mkdocs build" # no strict so you can build the docs without insiders packages docs-serve = "mkdocs serve --no-strict" +generate-stubs = { chain = ["generate-stubs:stubgen", "generate-stubs:sync"] } +"generate-stubs:stubgen" = "stubgen -p logfire --include-docstrings --no-analysis" +"generate-stubs:sync" = "rsync -a out/logfire/ logfire-api/logfire_api/" [tool.hatch.metadata] allow-direct-references = true @@ -153,6 +157,7 @@ include = ["/README.md", "CHANGELOG.md", "/Makefile", "/logfire", "/tests"] # https://beta.ruff.rs/docs/configuration/ [tool.ruff] line-length = 120 +extend-exclude = ["logfire-api/logfire_api/*"] [tool.ruff.lint] extend-select = [ @@ -165,7 +170,10 @@ extend-select = [ "DTZ005", # https://docs.astral.sh/ruff/rules/call-datetime-now-without-tzinfo/ ] # add "T" to disallow prints flake8-quotes = { inline-quotes = "single", multiline-quotes = "double" } -isort = { combine-as-imports = true, known-first-party = ["logfire"] } +isort = { combine-as-imports = true, known-first-party = [ + "logfire", + "logfire_api", +] } mccabe = { max-complexity = 14 } ignore = [ "D107", # ignore missing docstring in __init__ methods @@ -181,6 +189,7 @@ convention = "google" [tool.ruff.lint.per-file-ignores] "tests/**/*.py" = ["D"] "docs/**/*.py" = ["D"] +"logfire-api/logfire_api/**/*.py" = ["D"] [tool.ruff.format] docstring-code-format = true @@ -190,7 +199,7 @@ quote-style = "single" typeCheckingMode = "strict" reportUnnecessaryTypeIgnoreComment = true reportMissingTypeStubs = false -exclude = ["docs/**/*.py", "site/**/*.py", ".venv", "venv*", "ignoreme"] +exclude = ["docs/**/*.py", "site/**/*.py", ".venv", "venv*", "ignoreme", "out", "logfire-api"] venvPath = ".venv" [tool.pytest.ini_options] diff --git a/requirements-dev.lock b/requirements-dev.lock index a250c501..cdca55a6 100644 --- a/requirements-dev.lock +++ b/requirements-dev.lock @@ -6,7 +6,6 @@ # features: [] # all-features: false # with-sources: false -# generate-hashes: false -e file:. aiohttp==3.9.5 @@ -168,8 +167,10 @@ mkdocstrings-python==1.10.5 multidict==6.0.5 # via aiohttp # via yarl +mypy==1.10.0 mypy-extensions==1.0.0 # via black + # via mypy nodeenv==1.9.1 # via pre-commit # via pyright @@ -400,6 +401,7 @@ typing-extensions==4.12.2 # via fastapi # via huggingface-hub # via logfire + # via mypy # via openai # via opentelemetry-sdk # via psycopg diff --git a/requirements.lock b/requirements.lock index 443d44bf..c8b3672e 100644 --- a/requirements.lock +++ b/requirements.lock @@ -6,7 +6,6 @@ # features: [] # all-features: false # with-sources: false -# generate-hashes: false -e file:. certifi==2024.6.2 diff --git a/tests/test_logfire_api.py b/tests/test_logfire_api.py new file mode 100644 index 00000000..3ed932fd --- /dev/null +++ b/tests/test_logfire_api.py @@ -0,0 +1,226 @@ +from __future__ import annotations + +import importlib +import sys +from pathlib import Path +from types import ModuleType +from typing import Callable +from unittest.mock import MagicMock + +import pytest + + +def logfire_dunder_all() -> set[str]: + logfire = importlib.import_module('logfire') + return set(logfire.__all__) + + +def import_logfire_api_without_logfire() -> ModuleType: + logfire = sys.modules['logfire'] + try: + sys.modules['logfire'] = None # type: ignore + return importlib.import_module('logfire_api') + finally: + sys.modules['logfire'] = logfire + + +def import_logfire_api_with_logfire() -> ModuleType: + logfire_api = importlib.import_module('logfire_api') + return importlib.reload(logfire_api) + + +@pytest.mark.parametrize( + ['logfire_api_factory', 'module_name'], + [ + pytest.param(import_logfire_api_without_logfire, 'logfire_api.', id='without_logfire'), + pytest.param(import_logfire_api_with_logfire, 'logfire.', id='with_logfire'), + ], +) +def test_runtime(logfire_api_factory: Callable[[], ModuleType], module_name: str) -> None: + logfire__all__ = logfire_dunder_all() + + logfire_api = logfire_api_factory() + assert logfire_api is not None + + assert hasattr(logfire_api, 'Logfire') + assert module_name in str(logfire_api.Logfire()) + logfire__all__.remove('Logfire') + + assert hasattr(logfire_api, 'configure') + logfire_api.configure(send_to_logfire=False, console=False) + logfire__all__.remove('configure') + + assert hasattr(logfire_api, 'VERSION') + logfire__all__.remove('VERSION') + + assert hasattr(logfire_api, 'LevelName') + logfire__all__.remove('LevelName') + + with logfire_api.span('test span') as span: + assert isinstance(span, logfire_api.LogfireSpan) + span.set_attribute('foo', 'bar') + logfire__all__.remove('LogfireSpan') + logfire__all__.remove('span') + + assert hasattr(logfire_api, 'log') + logfire_api.log('info', 'test log') + logfire__all__.remove('log') + + for log_method in ['trace', 'debug', 'info', 'notice', 'warn', 'error', 'fatal']: + assert hasattr(logfire_api, log_method) + getattr(logfire_api, log_method)('test log') + logfire__all__.remove(log_method) + + assert hasattr(logfire_api, 'with_settings') + assert isinstance(logfire_api.with_settings(), logfire_api.Logfire) + logfire__all__.remove('with_settings') + + assert hasattr(logfire_api, 'with_tags') + logfire_api.with_tags('test tag') + logfire__all__.remove('with_tags') + + assert hasattr(logfire_api, 'force_flush') + logfire_api.force_flush() + logfire__all__.remove('force_flush') + + assert hasattr(logfire_api, 'no_auto_trace') + logfire_api.no_auto_trace(lambda: None) # pragma: no branch + logfire__all__.remove('no_auto_trace') + + assert hasattr(logfire_api, 'suppress_instrumentation') + with logfire_api.suppress_instrumentation(): + ... + logfire__all__.remove('suppress_instrumentation') + + assert hasattr(logfire_api, 'ConsoleOptions') + logfire_api.ConsoleOptions(colors='auto') + logfire__all__.remove('ConsoleOptions') + + assert hasattr(logfire_api, 'PydanticPlugin') + logfire_api.PydanticPlugin() + logfire__all__.remove('PydanticPlugin') + + assert hasattr(logfire_api, 'ScrubMatch') + logfire_api.ScrubMatch(path='test', value='test', pattern_match='test') + logfire__all__.remove('ScrubMatch') + + assert hasattr(logfire_api, 'log_slow_async_callbacks') + # NOTE: We don't call the log_slow_async_callbacks, to not give side effect to the test suite. + logfire__all__.remove('log_slow_async_callbacks') + + assert hasattr(logfire_api, 'install_auto_tracing') + logfire_api.install_auto_tracing(modules=['all']) + logfire__all__.remove('install_auto_tracing') + + assert hasattr(logfire_api, 'instrument') + + @logfire_api.instrument() + def func() -> None: ... + + func() + logfire__all__.remove('instrument') + + for member in [m for m in ('instrument_flask', 'instrument_fastapi', 'instrument_starlette')]: + assert hasattr(logfire_api, member), member + getattr(logfire_api, member)(app=MagicMock()) + logfire__all__.remove(member) + + for member in [m for m in ('instrument_openai', 'instrument_anthropic')]: + assert hasattr(logfire_api, member), member + with getattr(logfire_api, member)(): + ... + logfire__all__.remove(member) + + for member in [m for m in logfire__all__ if m.startswith('instrument_')]: + assert hasattr(logfire_api, member), member + getattr(logfire_api, member)() + logfire__all__.remove(member) + + assert hasattr(logfire_api, 'shutdown') + logfire_api.shutdown() + logfire__all__.remove('shutdown') + + assert hasattr(logfire_api, 'AutoTraceModule') + logfire_api.AutoTraceModule(name='test', filename='test') + logfire__all__.remove('AutoTraceModule') + + assert hasattr(logfire_api, 'LogfireLoggingHandler') + logfire_api.LogfireLoggingHandler() + logfire__all__.remove('LogfireLoggingHandler') + + assert hasattr(logfire_api, 'StructlogProcessor') + logfire_api.StructlogProcessor() + logfire__all__.remove('StructlogProcessor') + + assert hasattr(logfire_api, 'TailSamplingOptions') + logfire_api.TailSamplingOptions() + logfire__all__.remove('TailSamplingOptions') + + assert hasattr(logfire_api, 'ScrubbingOptions') + logfire_api.ScrubbingOptions() + logfire__all__.remove('ScrubbingOptions') + + assert hasattr(logfire_api, 'METRICS_PREFERRED_TEMPORALITY') + logfire__all__.remove('METRICS_PREFERRED_TEMPORALITY') + + assert hasattr(logfire_api, 'load_spans_from_file') + logfire_api.load_spans_from_file(file_path='test') + logfire__all__.remove('load_spans_from_file') + + # If it's not empty, it means that some of the __all__ members are not tested. + assert logfire__all__ == set(), logfire__all__ + + +@pytest.mark.skipif(sys.version_info < (3, 11), reason='We only need this test for a single Python version.') +def test_match_version_on_pyproject() -> None: + import tomllib + + logfire_pyproject = (Path(__file__).parent.parent / 'pyproject.toml').read_text() + logfire_api_pyproject = (Path(__file__).parent.parent / 'logfire-api' / 'pyproject.toml').read_text() + + logfire_pyproject_content = tomllib.loads(logfire_pyproject) + logfire_api_pyproject_content = tomllib.loads(logfire_api_pyproject) + + assert logfire_pyproject_content['project']['version'] == logfire_api_pyproject_content['project']['version'] + + +def test_override_init_pyi() -> None: # pragma: no cover + """The logic here is: + + 1. If `span: Incomplete` is present, it means we need to regenerate the `DEFAULT_LOGFIRE_INSTANCE` logic. + 2. If the `span: Incomplete` is present, but we have `Incomplete` in the file, it means we need to update to a + `DEFAULT_LOGFIRE_INSTANCE` logic. + 3. If none of the above is present, we skip the test. + """ + incomplete = ': Incomplete' + len_incomplete = len(incomplete) + + init_pyi = (Path(__file__).parent.parent / 'logfire-api' / 'logfire_api' / '__init__.pyi').read_text() + lines = init_pyi.splitlines() + + try: + span_index = lines.index('span: Incomplete') + except ValueError: + for i, line in enumerate(lines.copy()): + if line.endswith(incomplete): + prefix = line[: len(line) - len_incomplete] + lines[i] = f'{prefix} = DEFAULT_LOGFIRE_INSTANCE.{prefix}' + else: + default_logfire_instance = 'DEFAULT_LOGFIRE_INSTANCE' + + new_end_lines: list[str] = [f'{default_logfire_instance} = Logfire()'] + + for line in lines[span_index:]: + if line.endswith(incomplete): + prefix = line[: len(line) - len_incomplete] + new_end_lines.append(f'{prefix} = {default_logfire_instance}.{prefix}') + else: + new_end_lines.append(line) + lines.remove('from _typeshed import Incomplete') + lines[span_index - 1 :] = new_end_lines + + new_init_pyi = '\n'.join(lines) + '\n' + if new_init_pyi == init_pyi: + pytest.skip('No changes were made to the __init__.pyi file.') + (Path(__file__).parent.parent / 'logfire-api' / 'logfire_api' / '__init__.pyi').write_text(new_init_pyi) + pytest.fail('The __init__.pyi file was updated.')