Skip to content

Commit

Permalink
TailSamplingOptions added
Browse files Browse the repository at this point in the history
  • Loading branch information
Kludex committed Jun 21, 2024
1 parent 08e0d32 commit d42ad35
Show file tree
Hide file tree
Showing 7 changed files with 70 additions and 15 deletions.
3 changes: 3 additions & 0 deletions logfire-api/logfire_api/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,9 @@ def suppress_instrumentation():
class ConsoleOptions:
def __init__(self, *args, **kwargs) -> None: ...

class TailSamplingOptions:
def __init__(self, *args, **kwargs) -> None: ...

class PydanticPlugin:
def __init__(self, *args, **kwargs) -> None: ...

Expand Down
3 changes: 2 additions & 1 deletion logfire-api/logfire_api/__init__.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -3,14 +3,15 @@ from ._internal.auto_trace.rewrite_ast import no_auto_trace as no_auto_trace
from ._internal.config import ConsoleOptions as ConsoleOptions, METRICS_PREFERRED_TEMPORALITY as METRICS_PREFERRED_TEMPORALITY, PydanticPlugin as PydanticPlugin, configure as configure
from ._internal.constants import LevelName as LevelName
from ._internal.exporters.file import load_file as load_spans_from_file
from ._internal.exporters.tail_sampling import TailSamplingOptions as TailSamplingOptions
from ._internal.main import Logfire as Logfire, LogfireSpan as LogfireSpan
from ._internal.scrubbing import ScrubMatch as ScrubMatch
from ._internal.utils import suppress_instrumentation as suppress_instrumentation
from .integrations.logging import LogfireLoggingHandler as LogfireLoggingHandler
from .integrations.structlog import LogfireProcessor as StructlogProcessor
from .version import VERSION as VERSION

__all__ = ['Logfire', 'LogfireSpan', 'LevelName', 'ConsoleOptions', 'PydanticPlugin', 'configure', 'span', 'instrument', 'log', 'trace', 'debug', 'notice', 'info', 'warn', 'error', 'fatal', 'force_flush', 'log_slow_async_callbacks', 'install_auto_tracing', 'instrument_fastapi', 'instrument_openai', 'instrument_anthropic', 'instrument_asyncpg', 'instrument_httpx', 'instrument_requests', 'instrument_psycopg', 'instrument_django', 'instrument_flask', 'instrument_starlette', 'instrument_aiohttp_client', 'instrument_sqlalchemy', 'instrument_redis', 'instrument_pymongo', 'AutoTraceModule', 'with_tags', 'with_settings', 'shutdown', 'load_spans_from_file', 'no_auto_trace', 'METRICS_PREFERRED_TEMPORALITY', 'ScrubMatch', 'VERSION', 'suppress_instrumentation', 'StructlogProcessor', 'LogfireLoggingHandler']
__all__ = ['Logfire', 'LogfireSpan', 'LevelName', 'ConsoleOptions', 'PydanticPlugin', 'configure', 'span', 'instrument', 'log', 'trace', 'debug', 'notice', 'info', 'warn', 'error', 'fatal', 'force_flush', 'log_slow_async_callbacks', 'install_auto_tracing', 'instrument_fastapi', 'instrument_openai', 'instrument_anthropic', 'instrument_asyncpg', 'instrument_httpx', 'instrument_requests', 'instrument_psycopg', 'instrument_django', 'instrument_flask', 'instrument_starlette', 'instrument_aiohttp_client', 'instrument_sqlalchemy', 'instrument_redis', 'instrument_pymongo', 'AutoTraceModule', 'with_tags', 'with_settings', 'shutdown', 'load_spans_from_file', 'no_auto_trace', 'METRICS_PREFERRED_TEMPORALITY', 'ScrubMatch', 'VERSION', 'suppress_instrumentation', 'StructlogProcessor', 'LogfireLoggingHandler', 'TailSamplingOptions']

DEFAULT_LOGFIRE_INSTANCE = Logfire()
span = DEFAULT_LOGFIRE_INSTANCE.span
Expand Down
16 changes: 9 additions & 7 deletions logfire-api/logfire_api/_internal/config.pyi
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,10 @@ from .exporters.console import ConsoleColorsValues as ConsoleColorsValues, Inden
from .exporters.fallback import FallbackSpanExporter as FallbackSpanExporter
from .exporters.file import FileSpanExporter as FileSpanExporter
from .exporters.otlp import OTLPExporterHttpSession as OTLPExporterHttpSession, RetryFewerSpansSpanExporter as RetryFewerSpansSpanExporter
from .exporters.processor_wrapper import SpanProcessorWrapper as SpanProcessorWrapper
from .exporters.processor_wrapper import MainSpanProcessorWrapper as MainSpanProcessorWrapper
from .exporters.quiet_metrics import QuietMetricExporter as QuietMetricExporter
from .exporters.remove_pending import RemovePendingSpansExporter as RemovePendingSpansExporter
from .exporters.tail_sampling import TailSamplingOptions as TailSamplingOptions, TailSamplingProcessor as TailSamplingProcessor
from .integrations.executors import instrument_executors as instrument_executors
from .metrics import ProxyMeterProvider as ProxyMeterProvider, configure_metrics as configure_metrics
from .scrubbing import ScrubCallback as ScrubCallback, Scrubber as Scrubber
Expand Down Expand Up @@ -54,7 +55,7 @@ class PydanticPlugin:
exclude: set[str] = ...
def __init__(self, record=..., include=..., exclude=...) -> None: ...

def configure(*, send_to_logfire: bool | Literal['if-token-present'] | None = None, token: str | None = None, project_name: str | None = None, service_name: str | None = None, service_version: str | None = None, trace_sample_rate: float | None = None, console: ConsoleOptions | Literal[False] | None = None, show_summary: bool | None = None, config_dir: Path | str | None = None, data_dir: Path | str | None = None, base_url: str | None = None, collect_system_metrics: bool | None = None, id_generator: IdGenerator | None = None, ns_timestamp_generator: Callable[[], int] | None = None, processors: None = None, additional_span_processors: Sequence[SpanProcessor] | None = None, default_span_processor: Callable[[SpanExporter], SpanProcessor] | None = None, metric_readers: None = None, additional_metric_readers: Sequence[MetricReader] | None = None, logfire_api_session: requests.Session | None = None, pydantic_plugin: PydanticPlugin | None = None, fast_shutdown: bool = False, scrubbing_patterns: Sequence[str] | None = None, scrubbing_callback: ScrubCallback | None = None, inspect_arguments: bool | None = None) -> None:
def configure(*, send_to_logfire: bool | Literal['if-token-present'] | None = None, token: str | None = None, project_name: str | None = None, service_name: str | None = None, service_version: str | None = None, trace_sample_rate: float | None = None, console: ConsoleOptions | Literal[False] | None = None, show_summary: bool | None = None, config_dir: Path | str | None = None, data_dir: Path | str | None = None, base_url: str | None = None, collect_system_metrics: bool | None = None, id_generator: IdGenerator | None = None, ns_timestamp_generator: Callable[[], int] | None = None, processors: None = None, additional_span_processors: Sequence[SpanProcessor] | None = None, default_span_processor: Callable[[SpanExporter], SpanProcessor] | None = None, metric_readers: None = None, additional_metric_readers: Sequence[MetricReader] | None = None, pydantic_plugin: PydanticPlugin | None = None, fast_shutdown: bool = False, scrubbing_patterns: Sequence[str] | None = None, scrubbing_callback: ScrubCallback | None = None, inspect_arguments: bool | None = None, tail_sampling: TailSamplingOptions | None = None) -> None:
"""Configure the logfire SDK.
Args:
Expand Down Expand Up @@ -96,7 +97,6 @@ def configure(*, send_to_logfire: bool | Literal['if-token-present'] | None = No
which exports metrics to Logfire's API.
Ensure that `preferred_temporality=logfire.METRICS_PREFERRED_TEMPORALITY`
is passed to the constructor of metric readers/exporters that accept the `preferred_temporality` argument.
logfire_api_session: HTTP client session used to communicate with the Logfire API.
pydantic_plugin: Configuration for the Pydantic plugin. If `None` uses the `LOGFIRE_PYDANTIC_PLUGIN_*` environment
variables, otherwise defaults to `PydanticPlugin(record='off')`.
fast_shutdown: Whether to shut down exporters and providers quickly, mostly used for tests. Defaults to `False`.
Expand All @@ -111,6 +111,7 @@ def configure(*, send_to_logfire: bool | Literal['if-token-present'] | None = No
[f-string magic](https://docs.pydantic.dev/logfire/guides/onboarding_checklist/add_manual_tracing/#f-strings).
If `None` uses the `LOGFIRE_INSPECT_ARGUMENTS` environment variable.
Defaults to `True` if and only if the Python version is at least 3.11.
tail_sampling: Tail sampling options. Not ready for general use.
"""

@dataclasses.dataclass
Expand All @@ -135,25 +136,26 @@ class _LogfireConfigData:
data_dir: Path
collect_system_metrics: bool
id_generator: IdGenerator
logfire_api_session: requests.Session
ns_timestamp_generator: Callable[[], int]
additional_span_processors: Sequence[SpanProcessor] | None
pydantic_plugin: PydanticPlugin
default_span_processor: Callable[[SpanExporter], SpanProcessor]
fast_shutdown: bool
scrubbing_patterns: Sequence[str] | None
scrubbing_callback: ScrubCallback | None
def __init__(self, base_url, send_to_logfire, token, project_name, service_name, trace_sample_rate, console, show_summary, data_dir, collect_system_metrics, id_generator, logfire_api_session, ns_timestamp_generator, additional_span_processors, pydantic_plugin, default_span_processor, fast_shutdown, scrubbing_patterns, scrubbing_callback) -> None: ...
inspect_arguments: bool
tail_sampling: TailSamplingOptions | None
def __init__(self, base_url, send_to_logfire, token, project_name, service_name, trace_sample_rate, console, show_summary, data_dir, collect_system_metrics, id_generator, ns_timestamp_generator, additional_span_processors, pydantic_plugin, default_span_processor, fast_shutdown, scrubbing_patterns, scrubbing_callback, inspect_arguments, tail_sampling) -> None: ...

class LogfireConfig(_LogfireConfigData):
def __init__(self, base_url: str | None = None, send_to_logfire: bool | None = None, token: str | None = None, project_name: str | None = None, service_name: str | None = None, service_version: str | None = None, trace_sample_rate: float | None = None, console: ConsoleOptions | Literal[False] | None = None, show_summary: bool | None = None, config_dir: Path | None = None, data_dir: Path | None = None, collect_system_metrics: bool | None = None, id_generator: IdGenerator | None = None, ns_timestamp_generator: Callable[[], int] | None = None, additional_span_processors: Sequence[SpanProcessor] | None = None, default_span_processor: Callable[[SpanExporter], SpanProcessor] | None = None, additional_metric_readers: Sequence[MetricReader] | None = None, logfire_api_session: requests.Session | None = None, pydantic_plugin: PydanticPlugin | None = None, fast_shutdown: bool = False, scrubbing_patterns: Sequence[str] | None = None, scrubbing_callback: ScrubCallback | None = None, inspect_arguments: bool | None = None) -> None:
def __init__(self, base_url: str | None = None, send_to_logfire: bool | None = None, token: str | None = None, project_name: str | None = None, service_name: str | None = None, service_version: str | None = None, trace_sample_rate: float | None = None, console: ConsoleOptions | Literal[False] | None = None, show_summary: bool | None = None, config_dir: Path | None = None, data_dir: Path | None = None, collect_system_metrics: bool | None = None, id_generator: IdGenerator | None = None, ns_timestamp_generator: Callable[[], int] | None = None, additional_span_processors: Sequence[SpanProcessor] | None = None, default_span_processor: Callable[[SpanExporter], SpanProcessor] | None = None, additional_metric_readers: Sequence[MetricReader] | None = None, pydantic_plugin: PydanticPlugin | None = None, fast_shutdown: bool = False, scrubbing_patterns: Sequence[str] | None = None, scrubbing_callback: ScrubCallback | None = None, inspect_arguments: bool | None = None, tail_sampling: TailSamplingOptions | None = None) -> None:
"""Create a new LogfireConfig.
Users should never need to call this directly, instead use `logfire.configure`.
See `_LogfireConfigData` for parameter documentation.
"""
def configure(self, base_url: str | None, send_to_logfire: bool | Literal['if-token-present'] | None, token: str | None, project_name: str | None, service_name: str | None, service_version: str | None, trace_sample_rate: float | None, console: ConsoleOptions | Literal[False] | None, show_summary: bool | None, config_dir: Path | None, data_dir: Path | None, collect_system_metrics: bool | None, id_generator: IdGenerator | None, ns_timestamp_generator: Callable[[], int] | None, additional_span_processors: Sequence[SpanProcessor] | None, default_span_processor: Callable[[SpanExporter], SpanProcessor] | None, additional_metric_readers: Sequence[MetricReader] | None, logfire_api_session: requests.Session | None, pydantic_plugin: PydanticPlugin | None, fast_shutdown: bool, scrubbing_patterns: Sequence[str] | None, scrubbing_callback: ScrubCallback | None, inspect_arguments: bool | None) -> None: ...
def configure(self, base_url: str | None, send_to_logfire: bool | Literal['if-token-present'] | None, token: str | None, project_name: str | None, service_name: str | None, service_version: str | None, trace_sample_rate: float | None, console: ConsoleOptions | Literal[False] | None, show_summary: bool | None, config_dir: Path | None, data_dir: Path | None, collect_system_metrics: bool | None, id_generator: IdGenerator | None, ns_timestamp_generator: Callable[[], int] | None, additional_span_processors: Sequence[SpanProcessor] | None, default_span_processor: Callable[[SpanExporter], SpanProcessor] | None, additional_metric_readers: Sequence[MetricReader] | None, pydantic_plugin: PydanticPlugin | None, fast_shutdown: bool, scrubbing_patterns: Sequence[str] | None, scrubbing_callback: ScrubCallback | None, inspect_arguments: bool | None, tail_sampling: TailSamplingOptions | None) -> None: ...
def initialize(self) -> ProxyTracerProvider:
"""Configure internals to start exporting traces and metrics."""
def get_tracer_provider(self) -> ProxyTracerProvider:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,21 +1,19 @@
from ..constants import ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY, ATTRIBUTES_SPAN_TYPE_KEY as ATTRIBUTES_SPAN_TYPE_KEY, PENDING_SPAN_NAME_SUFFIX as PENDING_SPAN_NAME_SUFFIX, log_level_attributes as log_level_attributes
from ..constants import ATTRIBUTES_LOG_LEVEL_NUM_KEY as ATTRIBUTES_LOG_LEVEL_NUM_KEY, ATTRIBUTES_MESSAGE_KEY as ATTRIBUTES_MESSAGE_KEY, ATTRIBUTES_MESSAGE_TEMPLATE_KEY as ATTRIBUTES_MESSAGE_TEMPLATE_KEY, ATTRIBUTES_SPAN_TYPE_KEY as ATTRIBUTES_SPAN_TYPE_KEY, LEVEL_NUMBERS as LEVEL_NUMBERS, PENDING_SPAN_NAME_SUFFIX as PENDING_SPAN_NAME_SUFFIX, log_level_attributes as log_level_attributes
from ..scrubbing import Scrubber as Scrubber
from ..utils import ReadableSpanDict as ReadableSpanDict, is_instrumentation_suppressed as is_instrumentation_suppressed, span_to_dict as span_to_dict, truncate_string as truncate_string
from .wrapper import WrapperSpanProcessor as WrapperSpanProcessor
from _typeshed import Incomplete
from opentelemetry import context as context
from opentelemetry.sdk.trace import ReadableSpan, Span as Span, SpanProcessor
from opentelemetry.sdk.trace import ReadableSpan, Span as Span, SpanProcessor as SpanProcessor
from opentelemetry.sdk.util.instrumentation import InstrumentationScope as InstrumentationScope

class SpanProcessorWrapper(SpanProcessor):
class MainSpanProcessorWrapper(WrapperSpanProcessor):
"""Wrapper around other processors to intercept starting and ending spans with our own global logic.
Suppresses starting/ending if the current context has a `suppress_instrumentation` value.
Tweaks the send/receive span names generated by the ASGI middleware.
"""
processor: Incomplete
scrubber: Incomplete
def __init__(self, processor: SpanProcessor, scrubber: Scrubber) -> None: ...
def on_start(self, span: Span, parent_context: context.Context | None = None) -> None: ...
def on_end(self, span: ReadableSpan) -> None: ...
def shutdown(self) -> None: ...
def force_flush(self, timeout_millis: int = 30000) -> bool: ...
37 changes: 37 additions & 0 deletions logfire-api/logfire_api/_internal/exporters/tail_sampling.pyi
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
from _typeshed import Incomplete
from dataclasses import dataclass
from functools import cached_property as cached_property
from logfire._internal.constants import ATTRIBUTES_LOG_LEVEL_NUM_KEY as ATTRIBUTES_LOG_LEVEL_NUM_KEY, LEVEL_NUMBERS as LEVEL_NUMBERS, LevelName as LevelName, ONE_SECOND_IN_NANOSECONDS as ONE_SECOND_IN_NANOSECONDS
from logfire._internal.exporters.wrapper import WrapperSpanProcessor as WrapperSpanProcessor
from opentelemetry import context as context
from opentelemetry.sdk.trace import ReadableSpan as ReadableSpan, Span as Span, SpanProcessor as SpanProcessor

@dataclass
class TailSamplingOptions:
level: LevelName | None = ...
duration: float | None = ...
def __init__(self, level=..., duration=...) -> None: ...

@dataclass
class TraceBuffer:
"""Arguments of `on_start` and `on_end` for spans in a single trace."""
started: list[tuple[Span, context.Context | None]]
ended: list[ReadableSpan]
@cached_property
def first_span(self) -> Span: ...
def __init__(self, started, ended) -> None: ...

class TailSamplingProcessor(WrapperSpanProcessor):
"""Passes spans to the wrapped processor if any span in a trace meets the sampling criteria."""
duration: Incomplete
level: Incomplete
random_rate: Incomplete
traces: Incomplete
lock: Incomplete
def __init__(self, processor: SpanProcessor, options: TailSamplingOptions, random_rate: float) -> None: ...
def on_start(self, span: Span, parent_context: context.Context | None = None) -> None: ...
def on_end(self, span: ReadableSpan) -> None: ...
def check_span(self, span: ReadableSpan, buffer: TraceBuffer) -> bool:
"""If the span meets the sampling criteria, drop the buffer and return True. Otherwise, return False."""
def drop_buffer(self, buffer: TraceBuffer) -> None: ...
def push_buffer(self, buffer: TraceBuffer) -> None: ...
12 changes: 11 additions & 1 deletion logfire-api/logfire_api/_internal/exporters/wrapper.pyi
Original file line number Diff line number Diff line change
@@ -1,7 +1,8 @@
from _typeshed import Incomplete
from opentelemetry import context as context
from opentelemetry.sdk.metrics.export import AggregationTemporality as AggregationTemporality, MetricExportResult as MetricExportResult, MetricExporter, MetricsData as MetricsData
from opentelemetry.sdk.metrics.view import Aggregation as Aggregation
from opentelemetry.sdk.trace import ReadableSpan as ReadableSpan
from opentelemetry.sdk.trace import ReadableSpan as ReadableSpan, Span as Span, SpanProcessor
from opentelemetry.sdk.trace.export import SpanExportResult as SpanExportResult, SpanExporter
from typing import Any, Sequence

Expand All @@ -20,3 +21,12 @@ class WrapperMetricExporter(MetricExporter):
def export(self, metrics_data: MetricsData, timeout_millis: float = 10000, **kwargs: Any) -> MetricExportResult: ...
def force_flush(self, timeout_millis: float = 10000) -> bool: ...
def shutdown(self, timeout_millis: float = 30000, **kwargs: Any) -> None: ...

class WrapperSpanProcessor(SpanProcessor):
"""A base class for SpanProcessors that wrap another processor."""
processor: Incomplete
def __init__(self, processor: SpanProcessor) -> None: ...
def on_start(self, span: Span, parent_context: context.Context | None = None) -> None: ...
def on_end(self, span: ReadableSpan) -> None: ...
def shutdown(self) -> None: ...
def force_flush(self, timeout_millis: int = 30000) -> bool: ...
4 changes: 4 additions & 0 deletions tests/test_logfire_api.py
Original file line number Diff line number Diff line change
Expand Up @@ -142,6 +142,10 @@ def test_runtime(logfire_api_factory: Callable[[], ModuleType], module_name: str
logfire_api.StructlogProcessor()
logfire__all__.remove('StructlogProcessor')

assert hasattr(logfire_api, 'TailSamplingOptions')
logfire_api.TailSamplingOptions()
logfire__all__.remove('TailSamplingOptions')

assert hasattr(logfire_api, 'METRICS_PREFERRED_TEMPORALITY')
logfire__all__.remove('METRICS_PREFERRED_TEMPORALITY')

Expand Down

0 comments on commit d42ad35

Please sign in to comment.