chore: 添加虚拟环境到仓库

- 添加 backend_service/venv 虚拟环境
- 包含所有Python依赖包
- 注意:虚拟环境约393MB,包含12655个文件
This commit is contained in:
2025-12-03 10:19:25 +08:00
parent a6c2027caa
commit c4f851d387
12655 changed files with 3009376 additions and 0 deletions

View File

@@ -0,0 +1,224 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from abc import ABC, abstractmethod
from logging import getLogger
from os import environ
from typing import Optional, cast
from opentelemetry._logs import LogRecord
from opentelemetry._logs.severity import SeverityNumber
from opentelemetry.environment_variables import (
_OTEL_PYTHON_EVENT_LOGGER_PROVIDER,
)
from opentelemetry.trace.span import TraceFlags
from opentelemetry.util._once import Once
from opentelemetry.util._providers import _load_provider
from opentelemetry.util.types import AnyValue, _ExtendedAttributes
_logger = getLogger(__name__)
class Event(LogRecord):
def __init__(
self,
name: str,
timestamp: Optional[int] = None,
trace_id: Optional[int] = None,
span_id: Optional[int] = None,
trace_flags: Optional["TraceFlags"] = None,
body: Optional[AnyValue] = None,
severity_number: Optional[SeverityNumber] = None,
attributes: Optional[_ExtendedAttributes] = None,
):
attributes = attributes or {}
event_attributes = {
**attributes,
"event.name": name,
}
super().__init__(
timestamp=timestamp,
trace_id=trace_id,
span_id=span_id,
trace_flags=trace_flags,
body=body,
severity_number=severity_number,
attributes=event_attributes,
)
self.name = name
class EventLogger(ABC):
def __init__(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[_ExtendedAttributes] = None,
):
self._name = name
self._version = version
self._schema_url = schema_url
self._attributes = attributes
@abstractmethod
def emit(self, event: "Event") -> None:
"""Emits a :class:`Event` representing an event."""
class NoOpEventLogger(EventLogger):
def emit(self, event: Event) -> None:
pass
class ProxyEventLogger(EventLogger):
def __init__(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[_ExtendedAttributes] = None,
):
super().__init__(
name=name,
version=version,
schema_url=schema_url,
attributes=attributes,
)
self._real_event_logger: Optional[EventLogger] = None
self._noop_event_logger = NoOpEventLogger(name)
@property
def _event_logger(self) -> EventLogger:
if self._real_event_logger:
return self._real_event_logger
if _EVENT_LOGGER_PROVIDER:
self._real_event_logger = _EVENT_LOGGER_PROVIDER.get_event_logger(
self._name,
self._version,
self._schema_url,
self._attributes,
)
return self._real_event_logger
return self._noop_event_logger
def emit(self, event: Event) -> None:
self._event_logger.emit(event)
class EventLoggerProvider(ABC):
@abstractmethod
def get_event_logger(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[_ExtendedAttributes] = None,
) -> EventLogger:
"""Returns an EventLoggerProvider for use."""
class NoOpEventLoggerProvider(EventLoggerProvider):
def get_event_logger(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[_ExtendedAttributes] = None,
) -> EventLogger:
return NoOpEventLogger(
name, version=version, schema_url=schema_url, attributes=attributes
)
class ProxyEventLoggerProvider(EventLoggerProvider):
def get_event_logger(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[_ExtendedAttributes] = None,
) -> EventLogger:
if _EVENT_LOGGER_PROVIDER:
return _EVENT_LOGGER_PROVIDER.get_event_logger(
name,
version=version,
schema_url=schema_url,
attributes=attributes,
)
return ProxyEventLogger(
name,
version=version,
schema_url=schema_url,
attributes=attributes,
)
_EVENT_LOGGER_PROVIDER_SET_ONCE = Once()
_EVENT_LOGGER_PROVIDER: Optional[EventLoggerProvider] = None
_PROXY_EVENT_LOGGER_PROVIDER = ProxyEventLoggerProvider()
def get_event_logger_provider() -> EventLoggerProvider:
global _EVENT_LOGGER_PROVIDER # pylint: disable=global-variable-not-assigned
if _EVENT_LOGGER_PROVIDER is None:
if _OTEL_PYTHON_EVENT_LOGGER_PROVIDER not in environ:
return _PROXY_EVENT_LOGGER_PROVIDER
event_logger_provider: EventLoggerProvider = _load_provider( # type: ignore
_OTEL_PYTHON_EVENT_LOGGER_PROVIDER, "event_logger_provider"
)
_set_event_logger_provider(event_logger_provider, log=False)
return cast("EventLoggerProvider", _EVENT_LOGGER_PROVIDER)
def _set_event_logger_provider(
event_logger_provider: EventLoggerProvider, log: bool
) -> None:
def set_elp() -> None:
global _EVENT_LOGGER_PROVIDER # pylint: disable=global-statement
_EVENT_LOGGER_PROVIDER = event_logger_provider
did_set = _EVENT_LOGGER_PROVIDER_SET_ONCE.do_once(set_elp)
if log and not did_set:
_logger.warning(
"Overriding of current EventLoggerProvider is not allowed"
)
def set_event_logger_provider(
event_logger_provider: EventLoggerProvider,
) -> None:
_set_event_logger_provider(event_logger_provider, log=True)
def get_event_logger(
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[_ExtendedAttributes] = None,
event_logger_provider: Optional[EventLoggerProvider] = None,
) -> "EventLogger":
if event_logger_provider is None:
event_logger_provider = get_event_logger_provider()
return event_logger_provider.get_event_logger(
name,
version,
schema_url,
attributes,
)

View File

@@ -0,0 +1,58 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The OpenTelemetry logging API describes the classes used to generate logs and events.
The :class:`.LoggerProvider` provides users access to the :class:`.Logger`.
This module provides abstract (i.e. unimplemented) classes required for
logging, and a concrete no-op implementation :class:`.NoOpLogger` that allows applications
to use the API package alone without a supporting implementation.
To get a logger, you need to provide the package name from which you are
calling the logging APIs to OpenTelemetry by calling `LoggerProvider.get_logger`
with the calling module name and the version of your package.
The following code shows how to obtain a logger using the global :class:`.LoggerProvider`::
from opentelemetry._logs import get_logger
logger = get_logger("example-logger")
.. versionadded:: 1.15.0
"""
from opentelemetry._logs._internal import (
Logger,
LoggerProvider,
LogRecord,
NoOpLogger,
NoOpLoggerProvider,
get_logger,
get_logger_provider,
set_logger_provider,
)
from opentelemetry._logs.severity import SeverityNumber
__all__ = [
"Logger",
"LoggerProvider",
"LogRecord",
"NoOpLogger",
"NoOpLoggerProvider",
"get_logger",
"get_logger_provider",
"set_logger_provider",
"SeverityNumber",
]

View File

@@ -0,0 +1,448 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The OpenTelemetry logging API describes the classes used to generate logs and events.
The :class:`.LoggerProvider` provides users access to the :class:`.Logger`.
This module provides abstract (i.e. unimplemented) classes required for
logging, and a concrete no-op implementation :class:`.NoOpLogger` that allows applications
to use the API package alone without a supporting implementation.
To get a logger, you need to provide the package name from which you are
calling the logging APIs to OpenTelemetry by calling `LoggerProvider.get_logger`
with the calling module name and the version of your package.
The following code shows how to obtain a logger using the global :class:`.LoggerProvider`::
from opentelemetry._logs import get_logger
logger = get_logger("example-logger")
.. versionadded:: 1.15.0
"""
from __future__ import annotations
from abc import ABC, abstractmethod
from logging import getLogger
from os import environ
from time import time_ns
from typing import Optional, cast, overload
from typing_extensions import deprecated
from opentelemetry._logs.severity import SeverityNumber
from opentelemetry.context import get_current
from opentelemetry.context.context import Context
from opentelemetry.environment_variables import _OTEL_PYTHON_LOGGER_PROVIDER
from opentelemetry.trace import get_current_span
from opentelemetry.trace.span import TraceFlags
from opentelemetry.util._once import Once
from opentelemetry.util._providers import _load_provider
from opentelemetry.util.types import AnyValue, _ExtendedAttributes
_logger = getLogger(__name__)
class LogRecord(ABC):
"""A LogRecord instance represents an event being logged.
LogRecord instances are created and emitted via `Logger`
every time something is logged. They contain all the information
pertinent to the event being logged.
"""
@overload
def __init__(
self,
*,
timestamp: Optional[int] = None,
observed_timestamp: Optional[int] = None,
context: Optional[Context] = None,
severity_text: Optional[str] = None,
severity_number: Optional[SeverityNumber] = None,
body: AnyValue = None,
attributes: Optional[_ExtendedAttributes] = None,
event_name: Optional[str] = None,
) -> None: ...
@overload
@deprecated(
"LogRecord init with `trace_id`, `span_id`, and/or `trace_flags` is deprecated since 1.35.0. Use `context` instead."
)
def __init__(
self,
*,
timestamp: Optional[int] = None,
observed_timestamp: Optional[int] = None,
trace_id: Optional[int] = None,
span_id: Optional[int] = None,
trace_flags: Optional["TraceFlags"] = None,
severity_text: Optional[str] = None,
severity_number: Optional[SeverityNumber] = None,
body: AnyValue = None,
attributes: Optional[_ExtendedAttributes] = None,
) -> None: ...
def __init__(
self,
*,
timestamp: Optional[int] = None,
observed_timestamp: Optional[int] = None,
context: Optional[Context] = None,
trace_id: Optional[int] = None,
span_id: Optional[int] = None,
trace_flags: Optional["TraceFlags"] = None,
severity_text: Optional[str] = None,
severity_number: Optional[SeverityNumber] = None,
body: AnyValue = None,
attributes: Optional[_ExtendedAttributes] = None,
event_name: Optional[str] = None,
) -> None:
if not context:
context = get_current()
span_context = get_current_span(context).get_span_context()
self.timestamp = timestamp
if observed_timestamp is None:
observed_timestamp = time_ns()
self.observed_timestamp = observed_timestamp
self.context = context
self.trace_id = trace_id or span_context.trace_id
self.span_id = span_id or span_context.span_id
self.trace_flags = trace_flags or span_context.trace_flags
self.severity_text = severity_text
self.severity_number = severity_number
self.body = body
self.attributes = attributes
self.event_name = event_name
class Logger(ABC):
"""Handles emitting events and logs via `LogRecord`."""
def __init__(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[_ExtendedAttributes] = None,
) -> None:
super().__init__()
self._name = name
self._version = version
self._schema_url = schema_url
self._attributes = attributes
@overload
def emit(
self,
*,
timestamp: int | None = None,
observed_timestamp: int | None = None,
context: Context | None = None,
severity_number: SeverityNumber | None = None,
severity_text: str | None = None,
body: AnyValue | None = None,
attributes: _ExtendedAttributes | None = None,
event_name: str | None = None,
) -> None: ...
@overload
def emit(
self,
record: LogRecord,
) -> None: ...
@abstractmethod
def emit(
self,
record: LogRecord | None = None,
*,
timestamp: int | None = None,
observed_timestamp: int | None = None,
context: Context | None = None,
severity_number: SeverityNumber | None = None,
severity_text: str | None = None,
body: AnyValue | None = None,
attributes: _ExtendedAttributes | None = None,
event_name: str | None = None,
) -> None:
"""Emits a :class:`LogRecord` representing a log to the processing pipeline."""
class NoOpLogger(Logger):
"""The default Logger used when no Logger implementation is available.
All operations are no-op.
"""
@overload
def emit(
self,
*,
timestamp: int | None = None,
observed_timestamp: int | None = None,
context: Context | None = None,
severity_number: SeverityNumber | None = None,
severity_text: str | None = None,
body: AnyValue | None = None,
attributes: _ExtendedAttributes | None = None,
event_name: str | None = None,
) -> None: ...
@overload
def emit( # pylint:disable=arguments-differ
self,
record: LogRecord,
) -> None: ...
def emit(
self,
record: LogRecord | None = None,
*,
timestamp: int | None = None,
observed_timestamp: int | None = None,
context: Context | None = None,
severity_number: SeverityNumber | None = None,
severity_text: str | None = None,
body: AnyValue | None = None,
attributes: _ExtendedAttributes | None = None,
event_name: str | None = None,
) -> None:
pass
class ProxyLogger(Logger):
def __init__( # pylint: disable=super-init-not-called
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[_ExtendedAttributes] = None,
):
self._name = name
self._version = version
self._schema_url = schema_url
self._attributes = attributes
self._real_logger: Optional[Logger] = None
self._noop_logger = NoOpLogger(name)
@property
def _logger(self) -> Logger:
if self._real_logger:
return self._real_logger
if _LOGGER_PROVIDER:
self._real_logger = _LOGGER_PROVIDER.get_logger(
self._name,
self._version,
self._schema_url,
self._attributes,
)
return self._real_logger
return self._noop_logger
@overload
def emit(
self,
*,
timestamp: int | None = None,
observed_timestamp: int | None = None,
context: Context | None = None,
severity_number: SeverityNumber | None = None,
severity_text: str | None = None,
body: AnyValue | None = None,
attributes: _ExtendedAttributes | None = None,
event_name: str | None = None,
) -> None: ...
@overload
def emit( # pylint:disable=arguments-differ
self,
record: LogRecord,
) -> None: ...
def emit(
self,
record: LogRecord | None = None,
*,
timestamp: int | None = None,
observed_timestamp: int | None = None,
context: Context | None = None,
severity_number: SeverityNumber | None = None,
severity_text: str | None = None,
body: AnyValue | None = None,
attributes: _ExtendedAttributes | None = None,
event_name: str | None = None,
) -> None:
if record:
self._logger.emit(record)
else:
self._logger.emit(
timestamp=timestamp,
observed_timestamp=observed_timestamp,
context=context,
severity_number=severity_number,
severity_text=severity_text,
body=body,
attributes=attributes,
event_name=event_name,
)
class LoggerProvider(ABC):
"""
LoggerProvider is the entry point of the API. It provides access to Logger instances.
"""
@abstractmethod
def get_logger(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[_ExtendedAttributes] = None,
) -> Logger:
"""Returns a `Logger` for use by the given instrumentation library.
For any two calls with identical parameters, it is undefined whether the same
or different `Logger` instances are returned.
This function may return different `Logger` types (e.g. a no-op logger
vs. a functional logger).
Args:
name: The name of the instrumenting module, package or class.
This should *not* be the name of the module, package or class that is
instrumented but the name of the code doing the instrumentation.
E.g., instead of ``"requests"``, use
``"opentelemetry.instrumentation.requests"``.
For log sources which define a logger name (e.g. logging.Logger.name)
the Logger Name should be recorded as the instrumentation scope name.
version: Optional. The version string of the
instrumenting library. Usually this should be the same as
``importlib.metadata.version(instrumenting_library_name)``.
schema_url: Optional. Specifies the Schema URL of the emitted telemetry.
attributes: Optional. Specifies the instrumentation scope attributes to
associate with emitted telemetry.
"""
class NoOpLoggerProvider(LoggerProvider):
"""The default LoggerProvider used when no LoggerProvider implementation is available."""
def get_logger(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[_ExtendedAttributes] = None,
) -> Logger:
"""Returns a NoOpLogger."""
return NoOpLogger(
name, version=version, schema_url=schema_url, attributes=attributes
)
class ProxyLoggerProvider(LoggerProvider):
def get_logger(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[_ExtendedAttributes] = None,
) -> Logger:
if _LOGGER_PROVIDER:
return _LOGGER_PROVIDER.get_logger(
name,
version=version,
schema_url=schema_url,
attributes=attributes,
)
return ProxyLogger(
name,
version=version,
schema_url=schema_url,
attributes=attributes,
)
_LOGGER_PROVIDER_SET_ONCE = Once()
_LOGGER_PROVIDER: Optional[LoggerProvider] = None
_PROXY_LOGGER_PROVIDER = ProxyLoggerProvider()
def get_logger_provider() -> LoggerProvider:
"""Gets the current global :class:`~.LoggerProvider` object."""
global _LOGGER_PROVIDER # pylint: disable=global-variable-not-assigned
if _LOGGER_PROVIDER is None:
if _OTEL_PYTHON_LOGGER_PROVIDER not in environ:
return _PROXY_LOGGER_PROVIDER
logger_provider: LoggerProvider = _load_provider( # type: ignore
_OTEL_PYTHON_LOGGER_PROVIDER, "logger_provider"
)
_set_logger_provider(logger_provider, log=False)
# _LOGGER_PROVIDER will have been set by one thread
return cast("LoggerProvider", _LOGGER_PROVIDER)
def _set_logger_provider(logger_provider: LoggerProvider, log: bool) -> None:
def set_lp() -> None:
global _LOGGER_PROVIDER # pylint: disable=global-statement
_LOGGER_PROVIDER = logger_provider
did_set = _LOGGER_PROVIDER_SET_ONCE.do_once(set_lp)
if log and not did_set:
_logger.warning("Overriding of current LoggerProvider is not allowed")
def set_logger_provider(logger_provider: LoggerProvider) -> None:
"""Sets the current global :class:`~.LoggerProvider` object.
This can only be done once, a warning will be logged if any further attempt
is made.
"""
_set_logger_provider(logger_provider, log=True)
def get_logger(
instrumenting_module_name: str,
instrumenting_library_version: str = "",
logger_provider: Optional[LoggerProvider] = None,
schema_url: Optional[str] = None,
attributes: Optional[_ExtendedAttributes] = None,
) -> "Logger":
"""Returns a `Logger` for use within a python process.
This function is a convenience wrapper for
opentelemetry.sdk._logs.LoggerProvider.get_logger.
If logger_provider param is omitted the current configured one is used.
"""
if logger_provider is None:
logger_provider = get_logger_provider()
return logger_provider.get_logger(
instrumenting_module_name,
instrumenting_library_version,
schema_url,
attributes,
)

View File

@@ -0,0 +1,55 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import enum
class SeverityNumber(enum.Enum):
"""Numerical value of severity.
Smaller numerical values correspond to less severe events
(such as debug events), larger numerical values correspond
to more severe events (such as errors and critical events).
See the `Log Data Model`_ spec for more info and how to map the
severity from source format to OTLP Model.
.. _Log Data Model: https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/logs/data-model.md#field-severitynumber
"""
UNSPECIFIED = 0
TRACE = 1
TRACE2 = 2
TRACE3 = 3
TRACE4 = 4
DEBUG = 5
DEBUG2 = 6
DEBUG3 = 7
DEBUG4 = 8
INFO = 9
INFO2 = 10
INFO3 = 11
INFO4 = 12
WARN = 13
WARN2 = 14
WARN3 = 15
WARN4 = 16
ERROR = 17
ERROR2 = 18
ERROR3 = 19
ERROR4 = 20
FATAL = 21
FATAL2 = 22
FATAL3 = 23
FATAL4 = 24

View File

@@ -0,0 +1,314 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import threading
from collections import OrderedDict
from collections.abc import MutableMapping
from typing import Mapping, Optional, Sequence, Tuple, Union
from opentelemetry.util import types
# bytes are accepted as a user supplied value for attributes but
# decoded to strings internally.
_VALID_ATTR_VALUE_TYPES = (bool, str, bytes, int, float)
# AnyValue possible values
_VALID_ANY_VALUE_TYPES = (
type(None),
bool,
bytes,
int,
float,
str,
Sequence,
Mapping,
)
_logger = logging.getLogger(__name__)
def _clean_attribute(
key: str, value: types.AttributeValue, max_len: Optional[int]
) -> Optional[Union[types.AttributeValue, Tuple[Union[str, int, float], ...]]]:
"""Checks if attribute value is valid and cleans it if required.
The function returns the cleaned value or None if the value is not valid.
An attribute value is valid if it is either:
- A primitive type: string, boolean, double precision floating
point (IEEE 754-1985) or integer.
- An array of primitive type values. The array MUST be homogeneous,
i.e. it MUST NOT contain values of different types.
An attribute needs cleansing if:
- Its length is greater than the maximum allowed length.
- It needs to be encoded/decoded e.g, bytes to strings.
"""
if not (key and isinstance(key, str)):
_logger.warning("invalid key `%s`. must be non-empty string.", key)
return None
if isinstance(value, _VALID_ATTR_VALUE_TYPES):
return _clean_attribute_value(value, max_len)
if isinstance(value, Sequence):
sequence_first_valid_type = None
cleaned_seq = []
for element in value:
element = _clean_attribute_value(element, max_len) # type: ignore
if element is None:
cleaned_seq.append(element)
continue
element_type = type(element)
# Reject attribute value if sequence contains a value with an incompatible type.
if element_type not in _VALID_ATTR_VALUE_TYPES:
_logger.warning(
"Invalid type %s in attribute '%s' value sequence. Expected one of "
"%s or None",
element_type.__name__,
key,
[
valid_type.__name__
for valid_type in _VALID_ATTR_VALUE_TYPES
],
)
return None
# The type of the sequence must be homogeneous. The first non-None
# element determines the type of the sequence
if sequence_first_valid_type is None:
sequence_first_valid_type = element_type
# use equality instead of isinstance as isinstance(True, int) evaluates to True
elif element_type != sequence_first_valid_type:
_logger.warning(
"Attribute %r mixes types %s and %s in attribute value sequence",
key,
sequence_first_valid_type.__name__,
type(element).__name__,
)
return None
cleaned_seq.append(element)
# Freeze mutable sequences defensively
return tuple(cleaned_seq)
_logger.warning(
"Invalid type %s for attribute '%s' value. Expected one of %s or a "
"sequence of those types",
type(value).__name__,
key,
[valid_type.__name__ for valid_type in _VALID_ATTR_VALUE_TYPES],
)
return None
def _clean_extended_attribute_value(
value: types.AnyValue, max_len: Optional[int]
) -> types.AnyValue:
# for primitive types just return the value and eventually shorten the string length
if value is None or isinstance(value, _VALID_ATTR_VALUE_TYPES):
if max_len is not None and isinstance(value, str):
value = value[:max_len]
return value
if isinstance(value, Mapping):
cleaned_dict: dict[str, types.AnyValue] = {}
for key, element in value.items():
# skip invalid keys
if not (key and isinstance(key, str)):
_logger.warning(
"invalid key `%s`. must be non-empty string.", key
)
continue
cleaned_dict[key] = _clean_extended_attribute(
key=key, value=element, max_len=max_len
)
return cleaned_dict
if isinstance(value, Sequence):
sequence_first_valid_type = None
cleaned_seq: list[types.AnyValue] = []
for element in value:
if element is None:
cleaned_seq.append(element)
continue
if max_len is not None and isinstance(element, str):
element = element[:max_len]
element_type = type(element)
if element_type not in _VALID_ATTR_VALUE_TYPES:
element = _clean_extended_attribute_value(
element, max_len=max_len
)
element_type = type(element) # type: ignore
# The type of the sequence must be homogeneous. The first non-None
# element determines the type of the sequence
if sequence_first_valid_type is None:
sequence_first_valid_type = element_type
# use equality instead of isinstance as isinstance(True, int) evaluates to True
elif element_type != sequence_first_valid_type:
_logger.warning(
"Mixed types %s and %s in attribute value sequence",
sequence_first_valid_type.__name__,
type(element).__name__,
)
return None
cleaned_seq.append(element)
# Freeze mutable sequences defensively
return tuple(cleaned_seq)
raise TypeError(
f"Invalid type {type(value).__name__} for attribute value. "
f"Expected one of {[valid_type.__name__ for valid_type in _VALID_ANY_VALUE_TYPES]} or a "
"sequence of those types",
)
def _clean_extended_attribute(
key: str, value: types.AnyValue, max_len: Optional[int]
) -> types.AnyValue:
"""Checks if attribute value is valid and cleans it if required.
The function returns the cleaned value or None if the value is not valid.
An attribute value is valid if it is an AnyValue.
An attribute needs cleansing if:
- Its length is greater than the maximum allowed length.
"""
if not (key and isinstance(key, str)):
_logger.warning("invalid key `%s`. must be non-empty string.", key)
return None
try:
return _clean_extended_attribute_value(value, max_len=max_len)
except TypeError as exception:
_logger.warning("Attribute %s: %s", key, exception)
return None
def _clean_attribute_value(
value: types.AttributeValue, limit: Optional[int]
) -> Optional[types.AttributeValue]:
if value is None:
return None
if isinstance(value, bytes):
try:
value = value.decode()
except UnicodeDecodeError:
_logger.warning("Byte attribute could not be decoded.")
return None
if limit is not None and isinstance(value, str):
value = value[:limit]
return value
class BoundedAttributes(MutableMapping): # type: ignore
"""An ordered dict with a fixed max capacity.
Oldest elements are dropped when the dict is full and a new element is
added.
"""
def __init__(
self,
maxlen: Optional[int] = None,
attributes: Optional[types._ExtendedAttributes] = None,
immutable: bool = True,
max_value_len: Optional[int] = None,
extended_attributes: bool = False,
):
if maxlen is not None:
if not isinstance(maxlen, int) or maxlen < 0:
raise ValueError(
"maxlen must be valid int greater or equal to 0"
)
self.maxlen = maxlen
self.dropped = 0
self.max_value_len = max_value_len
self._extended_attributes = extended_attributes
# OrderedDict is not used until the maxlen is reached for efficiency.
self._dict: Union[
MutableMapping[str, types.AnyValue],
OrderedDict[str, types.AnyValue],
] = {}
self._lock = threading.RLock()
if attributes:
for key, value in attributes.items():
self[key] = value
self._immutable = immutable
def __repr__(self) -> str:
return f"{dict(self._dict)}"
def __getitem__(self, key: str) -> types.AnyValue:
return self._dict[key]
def __setitem__(self, key: str, value: types.AnyValue) -> None:
if getattr(self, "_immutable", False): # type: ignore
raise TypeError
with self._lock:
if self.maxlen is not None and self.maxlen == 0:
self.dropped += 1
return
if self._extended_attributes:
value = _clean_extended_attribute(
key, value, self.max_value_len
)
else:
value = _clean_attribute(key, value, self.max_value_len) # type: ignore
if value is None:
return
if key in self._dict:
del self._dict[key]
elif self.maxlen is not None and len(self._dict) == self.maxlen:
if not isinstance(self._dict, OrderedDict):
self._dict = OrderedDict(self._dict)
self._dict.popitem(last=False) # type: ignore
self.dropped += 1
self._dict[key] = value # type: ignore
def __delitem__(self, key: str) -> None:
if getattr(self, "_immutable", False): # type: ignore
raise TypeError
with self._lock:
del self._dict[key]
def __iter__(self): # type: ignore
with self._lock:
return iter(self._dict.copy()) # type: ignore
def __len__(self) -> int:
return len(self._dict)
def copy(self): # type: ignore
return self._dict.copy() # type: ignore

View File

@@ -0,0 +1,136 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from logging import getLogger
from re import compile
from types import MappingProxyType
from typing import Dict, Mapping, Optional
from opentelemetry.context import create_key, get_value, set_value
from opentelemetry.context.context import Context
from opentelemetry.util.re import (
_BAGGAGE_PROPERTY_FORMAT,
_KEY_FORMAT,
_VALUE_FORMAT,
)
_BAGGAGE_KEY = create_key("baggage")
_logger = getLogger(__name__)
_KEY_PATTERN = compile(_KEY_FORMAT)
_VALUE_PATTERN = compile(_VALUE_FORMAT)
_PROPERT_PATTERN = compile(_BAGGAGE_PROPERTY_FORMAT)
def get_all(
context: Optional[Context] = None,
) -> Mapping[str, object]:
"""Returns the name/value pairs in the Baggage
Args:
context: The Context to use. If not set, uses current Context
Returns:
The name/value pairs in the Baggage
"""
return MappingProxyType(_get_baggage_value(context=context))
def get_baggage(
name: str, context: Optional[Context] = None
) -> Optional[object]:
"""Provides access to the value for a name/value pair in the
Baggage
Args:
name: The name of the value to retrieve
context: The Context to use. If not set, uses current Context
Returns:
The value associated with the given name, or null if the given name is
not present.
"""
return _get_baggage_value(context=context).get(name)
def set_baggage(
name: str, value: object, context: Optional[Context] = None
) -> Context:
"""Sets a value in the Baggage
Args:
name: The name of the value to set
value: The value to set
context: The Context to use. If not set, uses current Context
Returns:
A Context with the value updated
"""
baggage = _get_baggage_value(context=context).copy()
baggage[name] = value
return set_value(_BAGGAGE_KEY, baggage, context=context)
def remove_baggage(name: str, context: Optional[Context] = None) -> Context:
"""Removes a value from the Baggage
Args:
name: The name of the value to remove
context: The Context to use. If not set, uses current Context
Returns:
A Context with the name/value removed
"""
baggage = _get_baggage_value(context=context).copy()
baggage.pop(name, None)
return set_value(_BAGGAGE_KEY, baggage, context=context)
def clear(context: Optional[Context] = None) -> Context:
"""Removes all values from the Baggage
Args:
context: The Context to use. If not set, uses current Context
Returns:
A Context with all baggage entries removed
"""
return set_value(_BAGGAGE_KEY, {}, context=context)
def _get_baggage_value(context: Optional[Context] = None) -> Dict[str, object]:
baggage = get_value(_BAGGAGE_KEY, context=context)
if isinstance(baggage, dict):
return baggage
return {}
def _is_valid_key(name: str) -> bool:
return _KEY_PATTERN.fullmatch(str(name)) is not None
def _is_valid_value(value: object) -> bool:
parts = str(value).split(";")
is_valid_value = _VALUE_PATTERN.fullmatch(parts[0]) is not None
if len(parts) > 1: # one or more properties metadata
for property in parts[1:]:
if _PROPERT_PATTERN.fullmatch(property) is None:
is_valid_value = False
break
return is_valid_value
def _is_valid_pair(key: str, value: str) -> bool:
return _is_valid_key(key) and _is_valid_value(value)

View File

@@ -0,0 +1,146 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from logging import getLogger
from re import split
from typing import Iterable, List, Mapping, Optional, Set
from urllib.parse import quote_plus, unquote_plus
from opentelemetry.baggage import _is_valid_pair, get_all, set_baggage
from opentelemetry.context import get_current
from opentelemetry.context.context import Context
from opentelemetry.propagators import textmap
from opentelemetry.util.re import _DELIMITER_PATTERN
_logger = getLogger(__name__)
class W3CBaggagePropagator(textmap.TextMapPropagator):
"""Extracts and injects Baggage which is used to annotate telemetry."""
_MAX_HEADER_LENGTH = 8192
_MAX_PAIR_LENGTH = 4096
_MAX_PAIRS = 180
_BAGGAGE_HEADER_NAME = "baggage"
def extract(
self,
carrier: textmap.CarrierT,
context: Optional[Context] = None,
getter: textmap.Getter[textmap.CarrierT] = textmap.default_getter,
) -> Context:
"""Extract Baggage from the carrier.
See
`opentelemetry.propagators.textmap.TextMapPropagator.extract`
"""
if context is None:
context = get_current()
header = _extract_first_element(
getter.get(carrier, self._BAGGAGE_HEADER_NAME)
)
if not header:
return context
if len(header) > self._MAX_HEADER_LENGTH:
_logger.warning(
"Baggage header `%s` exceeded the maximum number of bytes per baggage-string",
header,
)
return context
baggage_entries: List[str] = split(_DELIMITER_PATTERN, header)
total_baggage_entries = self._MAX_PAIRS
if len(baggage_entries) > self._MAX_PAIRS:
_logger.warning(
"Baggage header `%s` exceeded the maximum number of list-members",
header,
)
for entry in baggage_entries:
if len(entry) > self._MAX_PAIR_LENGTH:
_logger.warning(
"Baggage entry `%s` exceeded the maximum number of bytes per list-member",
entry,
)
continue
if not entry: # empty string
continue
try:
name, value = entry.split("=", 1)
except Exception: # pylint: disable=broad-exception-caught
_logger.warning(
"Baggage list-member `%s` doesn't match the format", entry
)
continue
if not _is_valid_pair(name, value):
_logger.warning("Invalid baggage entry: `%s`", entry)
continue
name = unquote_plus(name).strip()
value = unquote_plus(value).strip()
context = set_baggage(
name,
value,
context=context,
)
total_baggage_entries -= 1
if total_baggage_entries == 0:
break
return context
def inject(
self,
carrier: textmap.CarrierT,
context: Optional[Context] = None,
setter: textmap.Setter[textmap.CarrierT] = textmap.default_setter,
) -> None:
"""Injects Baggage into the carrier.
See
`opentelemetry.propagators.textmap.TextMapPropagator.inject`
"""
baggage_entries = get_all(context=context)
if not baggage_entries:
return
baggage_string = _format_baggage(baggage_entries)
setter.set(carrier, self._BAGGAGE_HEADER_NAME, baggage_string)
@property
def fields(self) -> Set[str]:
"""Returns a set with the fields set in `inject`."""
return {self._BAGGAGE_HEADER_NAME}
def _format_baggage(baggage_entries: Mapping[str, object]) -> str:
return ",".join(
quote_plus(str(key)) + "=" + quote_plus(str(value))
for key, value in baggage_entries.items()
)
def _extract_first_element(
items: Optional[Iterable[textmap.CarrierT]],
) -> Optional[textmap.CarrierT]:
if items is None:
return None
return next(iter(items), None)

View File

@@ -0,0 +1,176 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import logging
import typing
from contextvars import Token
from os import environ
from uuid import uuid4
# pylint: disable=wrong-import-position
from opentelemetry.context.context import Context, _RuntimeContext # noqa
from opentelemetry.environment_variables import OTEL_PYTHON_CONTEXT
from opentelemetry.util._importlib_metadata import entry_points
logger = logging.getLogger(__name__)
def _load_runtime_context() -> _RuntimeContext:
"""Initialize the RuntimeContext
Returns:
An instance of RuntimeContext.
"""
# FIXME use a better implementation of a configuration manager
# to avoid having to get configuration values straight from
# environment variables
default_context = "contextvars_context"
configured_context = environ.get(OTEL_PYTHON_CONTEXT, default_context) # type: str
try:
return next( # type: ignore
iter( # type: ignore
entry_points( # type: ignore
group="opentelemetry_context",
name=configured_context,
)
)
).load()()
except Exception: # pylint: disable=broad-exception-caught
logger.exception(
"Failed to load context: %s, fallback to %s",
configured_context,
default_context,
)
return next( # type: ignore
iter( # type: ignore
entry_points( # type: ignore
group="opentelemetry_context",
name=default_context,
)
)
).load()()
_RUNTIME_CONTEXT = _load_runtime_context()
def create_key(keyname: str) -> str:
"""To allow cross-cutting concern to control access to their local state,
the RuntimeContext API provides a function which takes a keyname as input,
and returns a unique key.
Args:
keyname: The key name is for debugging purposes and is not required to be unique.
Returns:
A unique string representing the newly created key.
"""
return keyname + "-" + str(uuid4())
def get_value(key: str, context: typing.Optional[Context] = None) -> "object":
"""To access the local state of a concern, the RuntimeContext API
provides a function which takes a context and a key as input,
and returns a value.
Args:
key: The key of the value to retrieve.
context: The context from which to retrieve the value, if None, the current context is used.
Returns:
The value associated with the key.
"""
return context.get(key) if context is not None else get_current().get(key)
def set_value(
key: str, value: "object", context: typing.Optional[Context] = None
) -> Context:
"""To record the local state of a cross-cutting concern, the
RuntimeContext API provides a function which takes a context, a
key, and a value as input, and returns an updated context
which contains the new value.
Args:
key: The key of the entry to set.
value: The value of the entry to set.
context: The context to copy, if None, the current context is used.
Returns:
A new `Context` containing the value set.
"""
if context is None:
context = get_current()
new_values = context.copy()
new_values[key] = value
return Context(new_values)
def get_current() -> Context:
"""To access the context associated with program execution,
the Context API provides a function which takes no arguments
and returns a Context.
Returns:
The current `Context` object.
"""
return _RUNTIME_CONTEXT.get_current()
def attach(context: Context) -> Token[Context]:
"""Associates a Context with the caller's current execution unit. Returns
a token that can be used to restore the previous Context.
Args:
context: The Context to set as current.
Returns:
A token that can be used with `detach` to reset the context.
"""
return _RUNTIME_CONTEXT.attach(context)
def detach(token: Token[Context]) -> None:
"""Resets the Context associated with the caller's current execution unit
to the value it had before attaching a specified Context.
Args:
token: The Token that was returned by a previous call to attach a Context.
"""
try:
_RUNTIME_CONTEXT.detach(token)
except Exception: # pylint: disable=broad-exception-caught
logger.exception("Failed to detach context")
# FIXME This is a temporary location for the suppress instrumentation key.
# Once the decision around how to suppress instrumentation is made in the
# spec, this key should be moved accordingly.
_SUPPRESS_INSTRUMENTATION_KEY = create_key("suppress_instrumentation")
_SUPPRESS_HTTP_INSTRUMENTATION_KEY = create_key(
"suppress_http_instrumentation"
)
__all__ = [
"Context",
"attach",
"create_key",
"detach",
"get_current",
"get_value",
"set_value",
]

View File

@@ -0,0 +1,56 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import typing
from abc import ABC, abstractmethod
from contextvars import Token
class Context(typing.Dict[str, object]):
def __setitem__(self, key: str, value: object) -> None:
raise ValueError
class _RuntimeContext(ABC):
"""The RuntimeContext interface provides a wrapper for the different
mechanisms that are used to propagate context in Python.
Implementations can be made available via entry_points and
selected through environment variables.
"""
@abstractmethod
def attach(self, context: Context) -> Token[Context]:
"""Sets the current `Context` object. Returns a
token that can be used to reset to the previous `Context`.
Args:
context: The Context to set.
"""
@abstractmethod
def get_current(self) -> Context:
"""Returns the current `Context` object."""
@abstractmethod
def detach(self, token: Token[Context]) -> None:
"""Resets Context to a previous value
Args:
token: A reference to a previous Context.
"""
__all__ = ["Context"]

View File

@@ -0,0 +1,56 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from contextvars import ContextVar, Token
from opentelemetry.context.context import Context, _RuntimeContext
class ContextVarsRuntimeContext(_RuntimeContext):
"""An implementation of the RuntimeContext interface which wraps ContextVar under
the hood. This is the preferred implementation for usage with Python 3.5+
"""
_CONTEXT_KEY = "current_context"
def __init__(self) -> None:
self._current_context = ContextVar(
self._CONTEXT_KEY, default=Context()
)
def attach(self, context: Context) -> Token[Context]:
"""Sets the current `Context` object. Returns a
token that can be used to reset to the previous `Context`.
Args:
context: The Context to set.
"""
return self._current_context.set(context)
def get_current(self) -> Context:
"""Returns the current `Context` object."""
return self._current_context.get()
def detach(self, token: Token[Context]) -> None:
"""Resets Context to a previous value
Args:
token: A reference to a previous Context.
"""
self._current_context.reset(token)
__all__ = ["ContextVarsRuntimeContext"]

View File

@@ -0,0 +1,88 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
OTEL_LOGS_EXPORTER = "OTEL_LOGS_EXPORTER"
"""
.. envvar:: OTEL_LOGS_EXPORTER
"""
OTEL_METRICS_EXPORTER = "OTEL_METRICS_EXPORTER"
"""
.. envvar:: OTEL_METRICS_EXPORTER
Specifies which exporter is used for metrics. See `General SDK Configuration
<https://opentelemetry.io/docs/concepts/sdk-configuration/general-sdk-configuration/#otel_metrics_exporter>`_.
**Default value:** ``"otlp"``
**Example:**
``export OTEL_METRICS_EXPORTER="prometheus"``
Accepted values for ``OTEL_METRICS_EXPORTER`` are:
- ``"otlp"``
- ``"prometheus"``
- ``"none"``: No automatically configured exporter for metrics.
.. note::
Exporter packages may add entry points for group ``opentelemetry_metrics_exporter`` which
can then be used with this environment variable by name. The entry point should point to
either a `opentelemetry.sdk.metrics.export.MetricExporter` (push exporter) or
`opentelemetry.sdk.metrics.export.MetricReader` (pull exporter) subclass; it must be
constructable without any required arguments. This mechanism is considered experimental and
may change in subsequent releases.
"""
OTEL_PROPAGATORS = "OTEL_PROPAGATORS"
"""
.. envvar:: OTEL_PROPAGATORS
"""
OTEL_PYTHON_CONTEXT = "OTEL_PYTHON_CONTEXT"
"""
.. envvar:: OTEL_PYTHON_CONTEXT
"""
OTEL_PYTHON_ID_GENERATOR = "OTEL_PYTHON_ID_GENERATOR"
"""
.. envvar:: OTEL_PYTHON_ID_GENERATOR
"""
OTEL_TRACES_EXPORTER = "OTEL_TRACES_EXPORTER"
"""
.. envvar:: OTEL_TRACES_EXPORTER
"""
OTEL_PYTHON_TRACER_PROVIDER = "OTEL_PYTHON_TRACER_PROVIDER"
"""
.. envvar:: OTEL_PYTHON_TRACER_PROVIDER
"""
OTEL_PYTHON_METER_PROVIDER = "OTEL_PYTHON_METER_PROVIDER"
"""
.. envvar:: OTEL_PYTHON_METER_PROVIDER
"""
_OTEL_PYTHON_LOGGER_PROVIDER = "OTEL_PYTHON_LOGGER_PROVIDER"
"""
.. envvar:: OTEL_PYTHON_LOGGER_PROVIDER
"""
_OTEL_PYTHON_EVENT_LOGGER_PROVIDER = "OTEL_PYTHON_EVENT_LOGGER_PROVIDER"
"""
.. envvar:: OTEL_PYTHON_EVENT_LOGGER_PROVIDER
"""

View File

@@ -0,0 +1,18 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from opentelemetry.exporter.otlp.proto.common.version import __version__
__all__ = ["__version__"]

View File

@@ -0,0 +1,177 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import logging
from collections.abc import Sequence
from typing import (
Any,
Callable,
Dict,
List,
Mapping,
Optional,
TypeVar,
)
from opentelemetry.proto.common.v1.common_pb2 import AnyValue as PB2AnyValue
from opentelemetry.proto.common.v1.common_pb2 import (
ArrayValue as PB2ArrayValue,
)
from opentelemetry.proto.common.v1.common_pb2 import (
InstrumentationScope as PB2InstrumentationScope,
)
from opentelemetry.proto.common.v1.common_pb2 import KeyValue as PB2KeyValue
from opentelemetry.proto.common.v1.common_pb2 import (
KeyValueList as PB2KeyValueList,
)
from opentelemetry.proto.resource.v1.resource_pb2 import (
Resource as PB2Resource,
)
from opentelemetry.sdk.trace import Resource
from opentelemetry.sdk.util.instrumentation import InstrumentationScope
from opentelemetry.util.types import _ExtendedAttributes
_logger = logging.getLogger(__name__)
_TypingResourceT = TypeVar("_TypingResourceT")
_ResourceDataT = TypeVar("_ResourceDataT")
def _encode_instrumentation_scope(
instrumentation_scope: InstrumentationScope,
) -> PB2InstrumentationScope:
if instrumentation_scope is None:
return PB2InstrumentationScope()
return PB2InstrumentationScope(
name=instrumentation_scope.name,
version=instrumentation_scope.version,
attributes=_encode_attributes(instrumentation_scope.attributes),
)
def _encode_resource(resource: Resource) -> PB2Resource:
return PB2Resource(attributes=_encode_attributes(resource.attributes))
def _encode_value(
value: Any, allow_null: bool = False
) -> Optional[PB2AnyValue]:
if allow_null is True and value is None:
return None
if isinstance(value, bool):
return PB2AnyValue(bool_value=value)
if isinstance(value, str):
return PB2AnyValue(string_value=value)
if isinstance(value, int):
return PB2AnyValue(int_value=value)
if isinstance(value, float):
return PB2AnyValue(double_value=value)
if isinstance(value, bytes):
return PB2AnyValue(bytes_value=value)
if isinstance(value, Sequence):
return PB2AnyValue(
array_value=PB2ArrayValue(
values=_encode_array(value, allow_null=allow_null)
)
)
elif isinstance(value, Mapping):
return PB2AnyValue(
kvlist_value=PB2KeyValueList(
values=[
_encode_key_value(str(k), v, allow_null=allow_null)
for k, v in value.items()
]
)
)
raise Exception(f"Invalid type {type(value)} of value {value}")
def _encode_key_value(
key: str, value: Any, allow_null: bool = False
) -> PB2KeyValue:
return PB2KeyValue(
key=key, value=_encode_value(value, allow_null=allow_null)
)
def _encode_array(
array: Sequence[Any], allow_null: bool = False
) -> Sequence[PB2AnyValue]:
if not allow_null:
# Let the exception get raised by _encode_value()
return [_encode_value(v, allow_null=allow_null) for v in array]
return [
_encode_value(v, allow_null=allow_null)
if v is not None
# Use an empty AnyValue to represent None in an array. Behavior may change pending
# https://github.com/open-telemetry/opentelemetry-specification/issues/4392
else PB2AnyValue()
for v in array
]
def _encode_span_id(span_id: int) -> bytes:
return span_id.to_bytes(length=8, byteorder="big", signed=False)
def _encode_trace_id(trace_id: int) -> bytes:
return trace_id.to_bytes(length=16, byteorder="big", signed=False)
def _encode_attributes(
attributes: _ExtendedAttributes,
allow_null: bool = False,
) -> Optional[List[PB2KeyValue]]:
if attributes:
pb2_attributes = []
for key, value in attributes.items():
# pylint: disable=broad-exception-caught
try:
pb2_attributes.append(
_encode_key_value(key, value, allow_null=allow_null)
)
except Exception as error:
_logger.exception("Failed to encode key %s: %s", key, error)
else:
pb2_attributes = None
return pb2_attributes
def _get_resource_data(
sdk_resource_scope_data: Dict[Resource, _ResourceDataT],
resource_class: Callable[..., _TypingResourceT],
name: str,
) -> List[_TypingResourceT]:
resource_data = []
for (
sdk_resource,
scope_data,
) in sdk_resource_scope_data.items():
collector_resource = PB2Resource(
attributes=_encode_attributes(sdk_resource.attributes)
)
resource_data.append(
resource_class(
**{
"resource": collector_resource,
"scope_{}".format(name): scope_data.values(),
}
)
)
return resource_data

View File

@@ -0,0 +1,103 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import defaultdict
from typing import List, Sequence
from opentelemetry.exporter.otlp.proto.common._internal import (
_encode_attributes,
_encode_instrumentation_scope,
_encode_resource,
_encode_span_id,
_encode_trace_id,
_encode_value,
)
from opentelemetry.proto.collector.logs.v1.logs_service_pb2 import (
ExportLogsServiceRequest,
)
from opentelemetry.proto.logs.v1.logs_pb2 import LogRecord as PB2LogRecord
from opentelemetry.proto.logs.v1.logs_pb2 import (
ResourceLogs,
ScopeLogs,
)
from opentelemetry.sdk._logs import LogData
def encode_logs(batch: Sequence[LogData]) -> ExportLogsServiceRequest:
return ExportLogsServiceRequest(resource_logs=_encode_resource_logs(batch))
def _encode_log(log_data: LogData) -> PB2LogRecord:
span_id = (
None
if log_data.log_record.span_id == 0
else _encode_span_id(log_data.log_record.span_id)
)
trace_id = (
None
if log_data.log_record.trace_id == 0
else _encode_trace_id(log_data.log_record.trace_id)
)
body = log_data.log_record.body
return PB2LogRecord(
time_unix_nano=log_data.log_record.timestamp,
observed_time_unix_nano=log_data.log_record.observed_timestamp,
span_id=span_id,
trace_id=trace_id,
flags=int(log_data.log_record.trace_flags),
body=_encode_value(body, allow_null=True),
severity_text=log_data.log_record.severity_text,
attributes=_encode_attributes(
log_data.log_record.attributes, allow_null=True
),
dropped_attributes_count=log_data.log_record.dropped_attributes,
severity_number=getattr(
log_data.log_record.severity_number, "value", None
),
event_name=log_data.log_record.event_name,
)
def _encode_resource_logs(batch: Sequence[LogData]) -> List[ResourceLogs]:
sdk_resource_logs = defaultdict(lambda: defaultdict(list))
for sdk_log in batch:
sdk_resource = sdk_log.log_record.resource
sdk_instrumentation = sdk_log.instrumentation_scope or None
pb2_log = _encode_log(sdk_log)
sdk_resource_logs[sdk_resource][sdk_instrumentation].append(pb2_log)
pb2_resource_logs = []
for sdk_resource, sdk_instrumentations in sdk_resource_logs.items():
scope_logs = []
for sdk_instrumentation, pb2_logs in sdk_instrumentations.items():
scope_logs.append(
ScopeLogs(
scope=(_encode_instrumentation_scope(sdk_instrumentation)),
log_records=pb2_logs,
schema_url=sdk_instrumentation.schema_url
if sdk_instrumentation
else None,
)
)
pb2_resource_logs.append(
ResourceLogs(
resource=_encode_resource(sdk_resource),
scope_logs=scope_logs,
schema_url=sdk_resource.schema_url,
)
)
return pb2_resource_logs

View File

@@ -0,0 +1,388 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import logging
from os import environ
from typing import Dict, List
from opentelemetry.exporter.otlp.proto.common._internal import (
_encode_attributes,
_encode_instrumentation_scope,
_encode_span_id,
_encode_trace_id,
)
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import (
ExportMetricsServiceRequest,
)
from opentelemetry.proto.metrics.v1 import metrics_pb2 as pb2
from opentelemetry.proto.resource.v1.resource_pb2 import (
Resource as PB2Resource,
)
from opentelemetry.sdk.environment_variables import (
OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION,
OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE,
)
from opentelemetry.sdk.metrics import (
Counter,
Exemplar,
Histogram,
ObservableCounter,
ObservableGauge,
ObservableUpDownCounter,
UpDownCounter,
)
from opentelemetry.sdk.metrics.export import (
AggregationTemporality,
Gauge,
MetricExporter,
MetricsData,
Sum,
)
from opentelemetry.sdk.metrics.export import (
ExponentialHistogram as ExponentialHistogramType,
)
from opentelemetry.sdk.metrics.export import (
Histogram as HistogramType,
)
from opentelemetry.sdk.metrics.view import (
Aggregation,
ExplicitBucketHistogramAggregation,
ExponentialBucketHistogramAggregation,
)
_logger = logging.getLogger(__name__)
class OTLPMetricExporterMixin:
def _common_configuration(
self,
preferred_temporality: dict[type, AggregationTemporality]
| None = None,
preferred_aggregation: dict[type, Aggregation] | None = None,
) -> None:
MetricExporter.__init__(
self,
preferred_temporality=self._get_temporality(preferred_temporality),
preferred_aggregation=self._get_aggregation(preferred_aggregation),
)
def _get_temporality(
self, preferred_temporality: Dict[type, AggregationTemporality]
) -> Dict[type, AggregationTemporality]:
otel_exporter_otlp_metrics_temporality_preference = (
environ.get(
OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE,
"CUMULATIVE",
)
.upper()
.strip()
)
if otel_exporter_otlp_metrics_temporality_preference == "DELTA":
instrument_class_temporality = {
Counter: AggregationTemporality.DELTA,
UpDownCounter: AggregationTemporality.CUMULATIVE,
Histogram: AggregationTemporality.DELTA,
ObservableCounter: AggregationTemporality.DELTA,
ObservableUpDownCounter: AggregationTemporality.CUMULATIVE,
ObservableGauge: AggregationTemporality.CUMULATIVE,
}
elif otel_exporter_otlp_metrics_temporality_preference == "LOWMEMORY":
instrument_class_temporality = {
Counter: AggregationTemporality.DELTA,
UpDownCounter: AggregationTemporality.CUMULATIVE,
Histogram: AggregationTemporality.DELTA,
ObservableCounter: AggregationTemporality.CUMULATIVE,
ObservableUpDownCounter: AggregationTemporality.CUMULATIVE,
ObservableGauge: AggregationTemporality.CUMULATIVE,
}
else:
if otel_exporter_otlp_metrics_temporality_preference != (
"CUMULATIVE"
):
_logger.warning(
"Unrecognized OTEL_EXPORTER_METRICS_TEMPORALITY_PREFERENCE"
" value found: "
"%s, "
"using CUMULATIVE",
otel_exporter_otlp_metrics_temporality_preference,
)
instrument_class_temporality = {
Counter: AggregationTemporality.CUMULATIVE,
UpDownCounter: AggregationTemporality.CUMULATIVE,
Histogram: AggregationTemporality.CUMULATIVE,
ObservableCounter: AggregationTemporality.CUMULATIVE,
ObservableUpDownCounter: AggregationTemporality.CUMULATIVE,
ObservableGauge: AggregationTemporality.CUMULATIVE,
}
instrument_class_temporality.update(preferred_temporality or {})
return instrument_class_temporality
def _get_aggregation(
self,
preferred_aggregation: Dict[type, Aggregation],
) -> Dict[type, Aggregation]:
otel_exporter_otlp_metrics_default_histogram_aggregation = environ.get(
OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION,
"explicit_bucket_histogram",
)
if otel_exporter_otlp_metrics_default_histogram_aggregation == (
"base2_exponential_bucket_histogram"
):
instrument_class_aggregation = {
Histogram: ExponentialBucketHistogramAggregation(),
}
else:
if otel_exporter_otlp_metrics_default_histogram_aggregation != (
"explicit_bucket_histogram"
):
_logger.warning(
(
"Invalid value for %s: %s, using explicit bucket "
"histogram aggregation"
),
OTEL_EXPORTER_OTLP_METRICS_DEFAULT_HISTOGRAM_AGGREGATION,
otel_exporter_otlp_metrics_default_histogram_aggregation,
)
instrument_class_aggregation = {
Histogram: ExplicitBucketHistogramAggregation(),
}
instrument_class_aggregation.update(preferred_aggregation or {})
return instrument_class_aggregation
class EncodingException(Exception):
"""
Raised by encode_metrics() when an exception is caught during encoding. Contains the problematic metric so
the misbehaving metric name and details can be logged during exception handling.
"""
def __init__(self, original_exception, metric):
super().__init__()
self.original_exception = original_exception
self.metric = metric
def __str__(self):
return f"{self.metric}\n{self.original_exception}"
def encode_metrics(data: MetricsData) -> ExportMetricsServiceRequest:
resource_metrics_dict = {}
for resource_metrics in data.resource_metrics:
_encode_resource_metrics(resource_metrics, resource_metrics_dict)
resource_data = []
for (
sdk_resource,
scope_data,
) in resource_metrics_dict.items():
resource_data.append(
pb2.ResourceMetrics(
resource=PB2Resource(
attributes=_encode_attributes(sdk_resource.attributes)
),
scope_metrics=scope_data.values(),
schema_url=sdk_resource.schema_url,
)
)
return ExportMetricsServiceRequest(resource_metrics=resource_data)
def _encode_resource_metrics(resource_metrics, resource_metrics_dict):
resource = resource_metrics.resource
# It is safe to assume that each entry in data.resource_metrics is
# associated with an unique resource.
scope_metrics_dict = {}
resource_metrics_dict[resource] = scope_metrics_dict
for scope_metrics in resource_metrics.scope_metrics:
instrumentation_scope = scope_metrics.scope
# The SDK groups metrics in instrumentation scopes already so
# there is no need to check for existing instrumentation scopes
# here.
pb2_scope_metrics = pb2.ScopeMetrics(
scope=_encode_instrumentation_scope(instrumentation_scope),
schema_url=instrumentation_scope.schema_url,
)
scope_metrics_dict[instrumentation_scope] = pb2_scope_metrics
for metric in scope_metrics.metrics:
pb2_metric = pb2.Metric(
name=metric.name,
description=metric.description,
unit=metric.unit,
)
try:
_encode_metric(metric, pb2_metric)
except Exception as ex:
# `from None` so we don't get "During handling of the above exception, another exception occurred:"
raise EncodingException(ex, metric) from None
pb2_scope_metrics.metrics.append(pb2_metric)
def _encode_metric(metric, pb2_metric):
if isinstance(metric.data, Gauge):
for data_point in metric.data.data_points:
pt = pb2.NumberDataPoint(
attributes=_encode_attributes(data_point.attributes),
time_unix_nano=data_point.time_unix_nano,
exemplars=_encode_exemplars(data_point.exemplars),
)
if isinstance(data_point.value, int):
pt.as_int = data_point.value
else:
pt.as_double = data_point.value
pb2_metric.gauge.data_points.append(pt)
elif isinstance(metric.data, HistogramType):
for data_point in metric.data.data_points:
pt = pb2.HistogramDataPoint(
attributes=_encode_attributes(data_point.attributes),
time_unix_nano=data_point.time_unix_nano,
start_time_unix_nano=data_point.start_time_unix_nano,
exemplars=_encode_exemplars(data_point.exemplars),
count=data_point.count,
sum=data_point.sum,
bucket_counts=data_point.bucket_counts,
explicit_bounds=data_point.explicit_bounds,
max=data_point.max,
min=data_point.min,
)
pb2_metric.histogram.aggregation_temporality = (
metric.data.aggregation_temporality
)
pb2_metric.histogram.data_points.append(pt)
elif isinstance(metric.data, Sum):
for data_point in metric.data.data_points:
pt = pb2.NumberDataPoint(
attributes=_encode_attributes(data_point.attributes),
start_time_unix_nano=data_point.start_time_unix_nano,
time_unix_nano=data_point.time_unix_nano,
exemplars=_encode_exemplars(data_point.exemplars),
)
if isinstance(data_point.value, int):
pt.as_int = data_point.value
else:
pt.as_double = data_point.value
# note that because sum is a message type, the
# fields must be set individually rather than
# instantiating a pb2.Sum and setting it once
pb2_metric.sum.aggregation_temporality = (
metric.data.aggregation_temporality
)
pb2_metric.sum.is_monotonic = metric.data.is_monotonic
pb2_metric.sum.data_points.append(pt)
elif isinstance(metric.data, ExponentialHistogramType):
for data_point in metric.data.data_points:
if data_point.positive.bucket_counts:
positive = pb2.ExponentialHistogramDataPoint.Buckets(
offset=data_point.positive.offset,
bucket_counts=data_point.positive.bucket_counts,
)
else:
positive = None
if data_point.negative.bucket_counts:
negative = pb2.ExponentialHistogramDataPoint.Buckets(
offset=data_point.negative.offset,
bucket_counts=data_point.negative.bucket_counts,
)
else:
negative = None
pt = pb2.ExponentialHistogramDataPoint(
attributes=_encode_attributes(data_point.attributes),
time_unix_nano=data_point.time_unix_nano,
start_time_unix_nano=data_point.start_time_unix_nano,
exemplars=_encode_exemplars(data_point.exemplars),
count=data_point.count,
sum=data_point.sum,
scale=data_point.scale,
zero_count=data_point.zero_count,
positive=positive,
negative=negative,
flags=data_point.flags,
max=data_point.max,
min=data_point.min,
)
pb2_metric.exponential_histogram.aggregation_temporality = (
metric.data.aggregation_temporality
)
pb2_metric.exponential_histogram.data_points.append(pt)
else:
_logger.warning(
"unsupported data type %s",
metric.data.__class__.__name__,
)
def _encode_exemplars(sdk_exemplars: List[Exemplar]) -> List[pb2.Exemplar]:
"""
Converts a list of SDK Exemplars into a list of protobuf Exemplars.
Args:
sdk_exemplars (list): The list of exemplars from the OpenTelemetry SDK.
Returns:
list: A list of protobuf exemplars.
"""
pb_exemplars = []
for sdk_exemplar in sdk_exemplars:
if (
sdk_exemplar.span_id is not None
and sdk_exemplar.trace_id is not None
):
pb_exemplar = pb2.Exemplar(
time_unix_nano=sdk_exemplar.time_unix_nano,
span_id=_encode_span_id(sdk_exemplar.span_id),
trace_id=_encode_trace_id(sdk_exemplar.trace_id),
filtered_attributes=_encode_attributes(
sdk_exemplar.filtered_attributes
),
)
else:
pb_exemplar = pb2.Exemplar(
time_unix_nano=sdk_exemplar.time_unix_nano,
filtered_attributes=_encode_attributes(
sdk_exemplar.filtered_attributes
),
)
# Assign the value based on its type in the SDK exemplar
if isinstance(sdk_exemplar.value, float):
pb_exemplar.as_double = sdk_exemplar.value
elif isinstance(sdk_exemplar.value, int):
pb_exemplar.as_int = sdk_exemplar.value
else:
raise ValueError("Exemplar value must be an int or float")
pb_exemplars.append(pb_exemplar)
return pb_exemplars

View File

@@ -0,0 +1,192 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from collections import defaultdict
from typing import List, Optional, Sequence
from opentelemetry.exporter.otlp.proto.common._internal import (
_encode_attributes,
_encode_instrumentation_scope,
_encode_resource,
_encode_span_id,
_encode_trace_id,
)
from opentelemetry.proto.collector.trace.v1.trace_service_pb2 import (
ExportTraceServiceRequest as PB2ExportTraceServiceRequest,
)
from opentelemetry.proto.trace.v1.trace_pb2 import (
ResourceSpans as PB2ResourceSpans,
)
from opentelemetry.proto.trace.v1.trace_pb2 import ScopeSpans as PB2ScopeSpans
from opentelemetry.proto.trace.v1.trace_pb2 import Span as PB2SPan
from opentelemetry.proto.trace.v1.trace_pb2 import SpanFlags as PB2SpanFlags
from opentelemetry.proto.trace.v1.trace_pb2 import Status as PB2Status
from opentelemetry.sdk.trace import Event, ReadableSpan
from opentelemetry.trace import Link, SpanKind
from opentelemetry.trace.span import SpanContext, Status, TraceState
# pylint: disable=E1101
_SPAN_KIND_MAP = {
SpanKind.INTERNAL: PB2SPan.SpanKind.SPAN_KIND_INTERNAL,
SpanKind.SERVER: PB2SPan.SpanKind.SPAN_KIND_SERVER,
SpanKind.CLIENT: PB2SPan.SpanKind.SPAN_KIND_CLIENT,
SpanKind.PRODUCER: PB2SPan.SpanKind.SPAN_KIND_PRODUCER,
SpanKind.CONSUMER: PB2SPan.SpanKind.SPAN_KIND_CONSUMER,
}
_logger = logging.getLogger(__name__)
def encode_spans(
sdk_spans: Sequence[ReadableSpan],
) -> PB2ExportTraceServiceRequest:
return PB2ExportTraceServiceRequest(
resource_spans=_encode_resource_spans(sdk_spans)
)
def _encode_resource_spans(
sdk_spans: Sequence[ReadableSpan],
) -> List[PB2ResourceSpans]:
# We need to inspect the spans and group + structure them as:
#
# Resource
# Instrumentation Library
# Spans
#
# First loop organizes the SDK spans in this structure. Protobuf messages
# are not hashable so we stick with SDK data in this phase.
#
# Second loop encodes the data into Protobuf format.
#
sdk_resource_spans = defaultdict(lambda: defaultdict(list))
for sdk_span in sdk_spans:
sdk_resource = sdk_span.resource
sdk_instrumentation = sdk_span.instrumentation_scope or None
pb2_span = _encode_span(sdk_span)
sdk_resource_spans[sdk_resource][sdk_instrumentation].append(pb2_span)
pb2_resource_spans = []
for sdk_resource, sdk_instrumentations in sdk_resource_spans.items():
scope_spans = []
for sdk_instrumentation, pb2_spans in sdk_instrumentations.items():
scope_spans.append(
PB2ScopeSpans(
scope=(_encode_instrumentation_scope(sdk_instrumentation)),
spans=pb2_spans,
schema_url=sdk_instrumentation.schema_url
if sdk_instrumentation
else None,
)
)
pb2_resource_spans.append(
PB2ResourceSpans(
resource=_encode_resource(sdk_resource),
scope_spans=scope_spans,
schema_url=sdk_resource.schema_url,
)
)
return pb2_resource_spans
def _span_flags(parent_span_context: Optional[SpanContext]) -> int:
flags = PB2SpanFlags.SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK
if parent_span_context and parent_span_context.is_remote:
flags |= PB2SpanFlags.SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK
return flags
def _encode_span(sdk_span: ReadableSpan) -> PB2SPan:
span_context = sdk_span.get_span_context()
return PB2SPan(
trace_id=_encode_trace_id(span_context.trace_id),
span_id=_encode_span_id(span_context.span_id),
trace_state=_encode_trace_state(span_context.trace_state),
parent_span_id=_encode_parent_id(sdk_span.parent),
name=sdk_span.name,
kind=_SPAN_KIND_MAP[sdk_span.kind],
start_time_unix_nano=sdk_span.start_time,
end_time_unix_nano=sdk_span.end_time,
attributes=_encode_attributes(sdk_span.attributes),
events=_encode_events(sdk_span.events),
links=_encode_links(sdk_span.links),
status=_encode_status(sdk_span.status),
dropped_attributes_count=sdk_span.dropped_attributes,
dropped_events_count=sdk_span.dropped_events,
dropped_links_count=sdk_span.dropped_links,
flags=_span_flags(sdk_span.parent),
)
def _encode_events(
events: Sequence[Event],
) -> Optional[List[PB2SPan.Event]]:
pb2_events = None
if events:
pb2_events = []
for event in events:
encoded_event = PB2SPan.Event(
name=event.name,
time_unix_nano=event.timestamp,
attributes=_encode_attributes(event.attributes),
dropped_attributes_count=event.dropped_attributes,
)
pb2_events.append(encoded_event)
return pb2_events
def _encode_links(links: Sequence[Link]) -> Sequence[PB2SPan.Link]:
pb2_links = None
if links:
pb2_links = []
for link in links:
encoded_link = PB2SPan.Link(
trace_id=_encode_trace_id(link.context.trace_id),
span_id=_encode_span_id(link.context.span_id),
attributes=_encode_attributes(link.attributes),
dropped_attributes_count=link.dropped_attributes,
flags=_span_flags(link.context),
)
pb2_links.append(encoded_link)
return pb2_links
def _encode_status(status: Status) -> Optional[PB2Status]:
pb2_status = None
if status is not None:
pb2_status = PB2Status(
code=status.status_code.value,
message=status.description,
)
return pb2_status
def _encode_trace_state(trace_state: TraceState) -> Optional[str]:
pb2_trace_state = None
if trace_state is not None:
pb2_trace_state = ",".join(
[f"{key}={value}" for key, value in (trace_state.items())]
)
return pb2_trace_state
def _encode_parent_id(context: Optional[SpanContext]) -> Optional[bytes]:
if context:
return _encode_span_id(context.span_id)
return None

View File

@@ -0,0 +1,20 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from opentelemetry.exporter.otlp.proto.common._internal._log_encoder import (
encode_logs,
)
__all__ = ["encode_logs"]

View File

@@ -0,0 +1,20 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from opentelemetry.exporter.otlp.proto.common._internal.metrics_encoder import (
encode_metrics,
)
__all__ = ["encode_metrics"]

View File

@@ -0,0 +1,20 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from opentelemetry.exporter.otlp.proto.common._internal.trace_encoder import (
encode_spans,
)
__all__ = ["encode_spans"]

View File

@@ -0,0 +1,15 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "1.38.0"

View File

@@ -0,0 +1,79 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This library allows to export tracing data to an OTLP collector.
Usage
-----
The **OTLP Span Exporter** allows to export `OpenTelemetry`_ traces to the
`OTLP`_ collector.
You can configure the exporter with the following environment variables:
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_TIMEOUT`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_PROTOCOL`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_HEADERS`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_ENDPOINT`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_COMPRESSION`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE`
- :envvar:`OTEL_EXPORTER_OTLP_TIMEOUT`
- :envvar:`OTEL_EXPORTER_OTLP_PROTOCOL`
- :envvar:`OTEL_EXPORTER_OTLP_HEADERS`
- :envvar:`OTEL_EXPORTER_OTLP_ENDPOINT`
- :envvar:`OTEL_EXPORTER_OTLP_COMPRESSION`
- :envvar:`OTEL_EXPORTER_OTLP_CERTIFICATE`
.. _OTLP: https://github.com/open-telemetry/opentelemetry-collector/
.. _OpenTelemetry: https://github.com/open-telemetry/opentelemetry-python/
.. code:: python
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.grpc.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
# Resource can be required for some backends, e.g. Jaeger
# If resource wouldn't be set - traces wouldn't appears in Jaeger
resource = Resource(attributes={
"service.name": "service"
})
trace.set_tracer_provider(TracerProvider(resource=resource))
tracer = trace.get_tracer(__name__)
otlp_exporter = OTLPSpanExporter(endpoint="http://localhost:4317", insecure=True)
span_processor = BatchSpanProcessor(otlp_exporter)
trace.get_tracer_provider().add_span_processor(span_processor)
with tracer.start_as_current_span("foo"):
print("Hello world!")
API
---
"""
from .version import __version__
_USER_AGENT_HEADER_VALUE = "OTel-OTLP-Exporter-Python/" + __version__
_OTLP_GRPC_CHANNEL_OPTIONS = [
# this will appear in the http User-Agent header
("grpc.primary_user_agent", _USER_AGENT_HEADER_VALUE)
]

View File

@@ -0,0 +1,127 @@
# Copyright The OpenTelemetry Authors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import environ
from typing import Dict, Literal, Optional, Sequence, Tuple, Union
from typing import Sequence as TypingSequence
from grpc import ChannelCredentials, Compression
from opentelemetry.exporter.otlp.proto.common._log_encoder import encode_logs
from opentelemetry.exporter.otlp.proto.grpc.exporter import (
OTLPExporterMixin,
_get_credentials,
environ_to_compression,
)
from opentelemetry.proto.collector.logs.v1.logs_service_pb2 import (
ExportLogsServiceRequest,
)
from opentelemetry.proto.collector.logs.v1.logs_service_pb2_grpc import (
LogsServiceStub,
)
from opentelemetry.sdk._logs import LogData
from opentelemetry.sdk._logs.export import LogExporter, LogExportResult
from opentelemetry.sdk.environment_variables import (
_OTEL_PYTHON_EXPORTER_OTLP_GRPC_LOGS_CREDENTIAL_PROVIDER,
OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE,
OTEL_EXPORTER_OTLP_LOGS_CLIENT_CERTIFICATE,
OTEL_EXPORTER_OTLP_LOGS_CLIENT_KEY,
OTEL_EXPORTER_OTLP_LOGS_COMPRESSION,
OTEL_EXPORTER_OTLP_LOGS_ENDPOINT,
OTEL_EXPORTER_OTLP_LOGS_HEADERS,
OTEL_EXPORTER_OTLP_LOGS_INSECURE,
OTEL_EXPORTER_OTLP_LOGS_TIMEOUT,
)
class OTLPLogExporter(
LogExporter,
OTLPExporterMixin[
Sequence[LogData],
ExportLogsServiceRequest,
LogExportResult,
LogsServiceStub,
],
):
def __init__(
self,
endpoint: Optional[str] = None,
insecure: Optional[bool] = None,
credentials: Optional[ChannelCredentials] = None,
headers: Optional[
Union[TypingSequence[Tuple[str, str]], Dict[str, str], str]
] = None,
timeout: Optional[float] = None,
compression: Optional[Compression] = None,
channel_options: Optional[Tuple[Tuple[str, str]]] = None,
):
insecure_logs = environ.get(OTEL_EXPORTER_OTLP_LOGS_INSECURE)
if insecure is None and insecure_logs is not None:
insecure = insecure_logs.lower() == "true"
if (
not insecure
and environ.get(OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE) is not None
):
credentials = _get_credentials(
credentials,
_OTEL_PYTHON_EXPORTER_OTLP_GRPC_LOGS_CREDENTIAL_PROVIDER,
OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE,
OTEL_EXPORTER_OTLP_LOGS_CLIENT_KEY,
OTEL_EXPORTER_OTLP_LOGS_CLIENT_CERTIFICATE,
)
environ_timeout = environ.get(OTEL_EXPORTER_OTLP_LOGS_TIMEOUT)
environ_timeout = (
float(environ_timeout) if environ_timeout is not None else None
)
compression = (
environ_to_compression(OTEL_EXPORTER_OTLP_LOGS_COMPRESSION)
if compression is None
else compression
)
OTLPExporterMixin.__init__(
self,
endpoint=endpoint or environ.get(OTEL_EXPORTER_OTLP_LOGS_ENDPOINT),
insecure=insecure,
credentials=credentials,
headers=headers or environ.get(OTEL_EXPORTER_OTLP_LOGS_HEADERS),
timeout=timeout or environ_timeout,
compression=compression,
stub=LogsServiceStub,
result=LogExportResult,
channel_options=channel_options,
)
def _translate_data(
self, data: Sequence[LogData]
) -> ExportLogsServiceRequest:
return encode_logs(data)
def export( # type: ignore [reportIncompatibleMethodOverride]
self,
batch: Sequence[LogData],
) -> Literal[LogExportResult.SUCCESS, LogExportResult.FAILURE]:
return OTLPExporterMixin._export(self, batch)
def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None:
OTLPExporterMixin.shutdown(self, timeout_millis=timeout_millis)
def force_flush(self, timeout_millis: float = 10_000) -> bool:
"""Nothing is buffered in this exporter, so this method does nothing."""
return True
@property
def _exporting(self) -> str:
return "logs"

View File

@@ -0,0 +1,453 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OTLP Exporter"""
import random
import threading
from abc import ABC, abstractmethod
from collections.abc import Sequence # noqa: F401
from logging import getLogger
from os import environ
from time import time
from typing import ( # noqa: F401
Any,
Callable,
Dict,
Generic,
List,
Literal,
NewType,
Optional,
Tuple,
Type,
TypeVar,
Union,
)
from typing import Sequence as TypingSequence
from urllib.parse import urlparse
from google.rpc.error_details_pb2 import RetryInfo
from typing_extensions import deprecated
from grpc import (
ChannelCredentials,
Compression,
RpcError,
StatusCode,
insecure_channel,
secure_channel,
ssl_channel_credentials,
)
from opentelemetry.exporter.otlp.proto.common._internal import (
_get_resource_data,
)
from opentelemetry.exporter.otlp.proto.grpc import (
_OTLP_GRPC_CHANNEL_OPTIONS,
)
from opentelemetry.proto.collector.logs.v1.logs_service_pb2 import (
ExportLogsServiceRequest,
)
from opentelemetry.proto.collector.logs.v1.logs_service_pb2_grpc import (
LogsServiceStub,
)
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import (
ExportMetricsServiceRequest,
)
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2_grpc import (
MetricsServiceStub,
)
from opentelemetry.proto.collector.trace.v1.trace_service_pb2 import (
ExportTraceServiceRequest,
)
from opentelemetry.proto.collector.trace.v1.trace_service_pb2_grpc import (
TraceServiceStub,
)
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
AnyValue,
ArrayValue,
KeyValue,
)
from opentelemetry.proto.resource.v1.resource_pb2 import Resource # noqa: F401
from opentelemetry.sdk._logs import LogData
from opentelemetry.sdk._logs.export import LogExportResult
from opentelemetry.sdk._shared_internal import DuplicateFilter
from opentelemetry.sdk.environment_variables import (
_OTEL_PYTHON_EXPORTER_OTLP_GRPC_CREDENTIAL_PROVIDER,
OTEL_EXPORTER_OTLP_CERTIFICATE,
OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
OTEL_EXPORTER_OTLP_CLIENT_KEY,
OTEL_EXPORTER_OTLP_COMPRESSION,
OTEL_EXPORTER_OTLP_ENDPOINT,
OTEL_EXPORTER_OTLP_HEADERS,
OTEL_EXPORTER_OTLP_INSECURE,
OTEL_EXPORTER_OTLP_TIMEOUT,
)
from opentelemetry.sdk.metrics.export import MetricExportResult, MetricsData
from opentelemetry.sdk.resources import Resource as SDKResource
from opentelemetry.sdk.trace import ReadableSpan
from opentelemetry.sdk.trace.export import SpanExportResult
from opentelemetry.util._importlib_metadata import entry_points
from opentelemetry.util.re import parse_env_headers
_RETRYABLE_ERROR_CODES = frozenset(
[
StatusCode.CANCELLED,
StatusCode.DEADLINE_EXCEEDED,
StatusCode.RESOURCE_EXHAUSTED,
StatusCode.ABORTED,
StatusCode.OUT_OF_RANGE,
StatusCode.UNAVAILABLE,
StatusCode.DATA_LOSS,
]
)
_MAX_RETRYS = 6
logger = getLogger(__name__)
# This prevents logs generated when a log fails to be written to generate another log which fails to be written etc. etc.
logger.addFilter(DuplicateFilter())
SDKDataT = TypeVar(
"SDKDataT",
TypingSequence[LogData],
MetricsData,
TypingSequence[ReadableSpan],
)
ResourceDataT = TypeVar("ResourceDataT")
TypingResourceT = TypeVar("TypingResourceT")
ExportServiceRequestT = TypeVar(
"ExportServiceRequestT",
ExportTraceServiceRequest,
ExportMetricsServiceRequest,
ExportLogsServiceRequest,
)
ExportResultT = TypeVar(
"ExportResultT",
LogExportResult,
MetricExportResult,
SpanExportResult,
)
ExportStubT = TypeVar(
"ExportStubT", TraceServiceStub, MetricsServiceStub, LogsServiceStub
)
_ENVIRON_TO_COMPRESSION = {
None: None,
"gzip": Compression.Gzip,
}
class InvalidCompressionValueException(Exception):
def __init__(self, environ_key: str, environ_value: str):
super().__init__(
'Invalid value "{}" for compression envvar {}'.format(
environ_value, environ_key
)
)
def environ_to_compression(environ_key: str) -> Optional[Compression]:
environ_value = (
environ[environ_key].lower().strip()
if environ_key in environ
else None
)
if (
environ_value not in _ENVIRON_TO_COMPRESSION
and environ_value is not None
):
raise InvalidCompressionValueException(environ_key, environ_value)
return _ENVIRON_TO_COMPRESSION[environ_value]
@deprecated(
"Use one of the encoders from opentelemetry-exporter-otlp-proto-common instead. Deprecated since version 1.18.0.",
)
def get_resource_data(
sdk_resource_scope_data: Dict[SDKResource, ResourceDataT],
resource_class: Callable[..., TypingResourceT],
name: str,
) -> List[TypingResourceT]:
return _get_resource_data(sdk_resource_scope_data, resource_class, name)
def _read_file(file_path: str) -> Optional[bytes]:
try:
with open(file_path, "rb") as file:
return file.read()
except FileNotFoundError as e:
logger.exception(
"Failed to read file: %s. Please check if the file exists and is accessible.",
e.filename,
)
return None
def _load_credentials(
certificate_file: Optional[str],
client_key_file: Optional[str],
client_certificate_file: Optional[str],
) -> ChannelCredentials:
root_certificates = (
_read_file(certificate_file) if certificate_file else None
)
private_key = _read_file(client_key_file) if client_key_file else None
certificate_chain = (
_read_file(client_certificate_file)
if client_certificate_file
else None
)
return ssl_channel_credentials(
root_certificates=root_certificates,
private_key=private_key,
certificate_chain=certificate_chain,
)
def _get_credentials(
creds: Optional[ChannelCredentials],
credential_entry_point_env_key: str,
certificate_file_env_key: str,
client_key_file_env_key: str,
client_certificate_file_env_key: str,
) -> ChannelCredentials:
if creds is not None:
return creds
_credential_env = environ.get(credential_entry_point_env_key)
if _credential_env:
try:
maybe_channel_creds = next(
iter(
entry_points(
group="opentelemetry_otlp_credential_provider",
name=_credential_env,
)
)
).load()()
except StopIteration:
raise RuntimeError(
f"Requested component '{_credential_env}' not found in "
f"entry point 'opentelemetry_otlp_credential_provider'"
)
if isinstance(maybe_channel_creds, ChannelCredentials):
return maybe_channel_creds
else:
raise RuntimeError(
f"Requested component '{_credential_env}' is of type {type(maybe_channel_creds)}"
f" must be of type `grpc.ChannelCredentials`."
)
certificate_file = environ.get(certificate_file_env_key)
if certificate_file:
client_key_file = environ.get(client_key_file_env_key)
client_certificate_file = environ.get(client_certificate_file_env_key)
return _load_credentials(
certificate_file, client_key_file, client_certificate_file
)
return ssl_channel_credentials()
# pylint: disable=no-member
class OTLPExporterMixin(
ABC, Generic[SDKDataT, ExportServiceRequestT, ExportResultT, ExportStubT]
):
"""OTLP span exporter
Args:
endpoint: OpenTelemetry Collector receiver endpoint
insecure: Connection type
credentials: ChannelCredentials object for server authentication
headers: Headers to send when exporting
timeout: Backend request timeout in seconds
compression: gRPC compression method to use
channel_options: gRPC channel options
"""
def __init__(
self,
stub: ExportStubT,
result: ExportResultT,
endpoint: Optional[str] = None,
insecure: Optional[bool] = None,
credentials: Optional[ChannelCredentials] = None,
headers: Optional[
Union[TypingSequence[Tuple[str, str]], Dict[str, str], str]
] = None,
timeout: Optional[float] = None,
compression: Optional[Compression] = None,
channel_options: Optional[Tuple[Tuple[str, str]]] = None,
):
super().__init__()
self._result = result
self._stub = stub
self._endpoint = endpoint or environ.get(
OTEL_EXPORTER_OTLP_ENDPOINT, "http://localhost:4317"
)
parsed_url = urlparse(self._endpoint)
if parsed_url.scheme == "https":
insecure = False
insecure_exporter = environ.get(OTEL_EXPORTER_OTLP_INSECURE)
if insecure is None:
if insecure_exporter is not None:
insecure = insecure_exporter.lower() == "true"
else:
insecure = parsed_url.scheme == "http"
if parsed_url.netloc:
self._endpoint = parsed_url.netloc
self._headers = headers or environ.get(OTEL_EXPORTER_OTLP_HEADERS)
if isinstance(self._headers, str):
temp_headers = parse_env_headers(self._headers, liberal=True)
self._headers = tuple(temp_headers.items())
elif isinstance(self._headers, dict):
self._headers = tuple(self._headers.items())
if self._headers is None:
self._headers = tuple()
if channel_options:
# merge the default channel options with the one passed as parameter
overridden_options = {
opt_name for (opt_name, _) in channel_options
}
default_options = tuple(
(opt_name, opt_value)
for opt_name, opt_value in _OTLP_GRPC_CHANNEL_OPTIONS
if opt_name not in overridden_options
)
self._channel_options = default_options + channel_options
else:
self._channel_options = tuple(_OTLP_GRPC_CHANNEL_OPTIONS)
self._timeout = timeout or float(
environ.get(OTEL_EXPORTER_OTLP_TIMEOUT, 10)
)
self._collector_kwargs = None
compression = (
environ_to_compression(OTEL_EXPORTER_OTLP_COMPRESSION)
if compression is None
else compression
) or Compression.NoCompression
if insecure:
self._channel = insecure_channel(
self._endpoint,
compression=compression,
options=self._channel_options,
)
else:
self._credentials = _get_credentials(
credentials,
_OTEL_PYTHON_EXPORTER_OTLP_GRPC_CREDENTIAL_PROVIDER,
OTEL_EXPORTER_OTLP_CERTIFICATE,
OTEL_EXPORTER_OTLP_CLIENT_KEY,
OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
)
self._channel = secure_channel(
self._endpoint,
self._credentials,
compression=compression,
options=self._channel_options,
)
self._client = self._stub(self._channel) # type: ignore [reportCallIssue]
self._shutdown_in_progress = threading.Event()
self._shutdown = False
@abstractmethod
def _translate_data(
self,
data: SDKDataT,
) -> ExportServiceRequestT:
pass
def _export(
self,
data: SDKDataT,
) -> ExportResultT:
if self._shutdown:
logger.warning("Exporter already shutdown, ignoring batch")
return self._result.FAILURE # type: ignore [reportReturnType]
# FIXME remove this check if the export type for traces
# gets updated to a class that represents the proto
# TracesData and use the code below instead.
deadline_sec = time() + self._timeout
for retry_num in range(_MAX_RETRYS):
try:
self._client.Export(
request=self._translate_data(data),
metadata=self._headers,
timeout=deadline_sec - time(),
)
return self._result.SUCCESS # type: ignore [reportReturnType]
except RpcError as error:
retry_info_bin = dict(error.trailing_metadata()).get( # type: ignore [reportAttributeAccessIssue]
"google.rpc.retryinfo-bin" # type: ignore [reportArgumentType]
)
# multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff.
backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2)
if retry_info_bin is not None:
retry_info = RetryInfo()
retry_info.ParseFromString(retry_info_bin)
backoff_seconds = (
retry_info.retry_delay.seconds
+ retry_info.retry_delay.nanos / 1.0e9
)
if (
error.code() not in _RETRYABLE_ERROR_CODES # type: ignore [reportAttributeAccessIssue]
or retry_num + 1 == _MAX_RETRYS
or backoff_seconds > (deadline_sec - time())
or self._shutdown
):
logger.error(
"Failed to export %s to %s, error code: %s",
self._exporting,
self._endpoint,
error.code(), # type: ignore [reportAttributeAccessIssue]
exc_info=error.code() == StatusCode.UNKNOWN, # type: ignore [reportAttributeAccessIssue]
)
return self._result.FAILURE # type: ignore [reportReturnType]
logger.warning(
"Transient error %s encountered while exporting %s to %s, retrying in %.2fs.",
error.code(), # type: ignore [reportAttributeAccessIssue]
self._exporting,
self._endpoint,
backoff_seconds,
)
shutdown = self._shutdown_in_progress.wait(backoff_seconds)
if shutdown:
logger.warning("Shutdown in progress, aborting retry.")
break
# Not possible to reach here but the linter is complaining.
return self._result.FAILURE # type: ignore [reportReturnType]
def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None:
if self._shutdown:
logger.warning("Exporter already shutdown, ignoring call")
return
self._shutdown = True
self._shutdown_in_progress.set()
self._channel.close()
@property
@abstractmethod
def _exporting(self) -> str:
"""
Returns a string that describes the overall exporter, to be used in
warning messages.
"""
pass

View File

@@ -0,0 +1,277 @@
# Copyright The OpenTelemetry Authors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
from dataclasses import replace
from logging import getLogger
from os import environ
from typing import Iterable, List, Tuple, Union
from typing import Sequence as TypingSequence
from grpc import ChannelCredentials, Compression
from opentelemetry.exporter.otlp.proto.common._internal.metrics_encoder import (
OTLPMetricExporterMixin,
)
from opentelemetry.exporter.otlp.proto.common.metrics_encoder import (
encode_metrics,
)
from opentelemetry.exporter.otlp.proto.grpc.exporter import ( # noqa: F401
OTLPExporterMixin,
_get_credentials,
environ_to_compression,
get_resource_data,
)
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import (
ExportMetricsServiceRequest,
)
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2_grpc import (
MetricsServiceStub,
)
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
InstrumentationScope,
)
from opentelemetry.proto.metrics.v1 import metrics_pb2 as pb2 # noqa: F401
from opentelemetry.sdk.environment_variables import (
_OTEL_PYTHON_EXPORTER_OTLP_GRPC_METRICS_CREDENTIAL_PROVIDER,
OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE,
OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE,
OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY,
OTEL_EXPORTER_OTLP_METRICS_COMPRESSION,
OTEL_EXPORTER_OTLP_METRICS_ENDPOINT,
OTEL_EXPORTER_OTLP_METRICS_HEADERS,
OTEL_EXPORTER_OTLP_METRICS_INSECURE,
OTEL_EXPORTER_OTLP_METRICS_TIMEOUT,
)
from opentelemetry.sdk.metrics._internal.aggregation import Aggregation
from opentelemetry.sdk.metrics.export import ( # noqa: F401
AggregationTemporality,
DataPointT,
Gauge,
Metric,
MetricExporter,
MetricExportResult,
MetricsData,
ResourceMetrics,
ScopeMetrics,
Sum,
)
from opentelemetry.sdk.metrics.export import ( # noqa: F401
ExponentialHistogram as ExponentialHistogramType,
)
from opentelemetry.sdk.metrics.export import ( # noqa: F401
Histogram as HistogramType,
)
_logger = getLogger(__name__)
class OTLPMetricExporter(
MetricExporter,
OTLPExporterMixin[
MetricsData,
ExportMetricsServiceRequest,
MetricExportResult,
MetricsServiceStub,
],
OTLPMetricExporterMixin,
):
"""OTLP metric exporter
Args:
endpoint: Target URL to which the exporter is going to send metrics
max_export_batch_size: Maximum number of data points to export in a single request. This is to deal with
gRPC's 4MB message size limit. If not set there is no limit to the number of data points in a request.
If it is set and the number of data points exceeds the max, the request will be split.
"""
def __init__(
self,
endpoint: str | None = None,
insecure: bool | None = None,
credentials: ChannelCredentials | None = None,
headers: Union[TypingSequence[Tuple[str, str]], dict[str, str], str]
| None = None,
timeout: float | None = None,
compression: Compression | None = None,
preferred_temporality: dict[type, AggregationTemporality]
| None = None,
preferred_aggregation: dict[type, Aggregation] | None = None,
max_export_batch_size: int | None = None,
channel_options: Tuple[Tuple[str, str]] | None = None,
):
insecure_metrics = environ.get(OTEL_EXPORTER_OTLP_METRICS_INSECURE)
if insecure is None and insecure_metrics is not None:
insecure = insecure_metrics.lower() == "true"
if (
not insecure
and environ.get(OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE) is not None
):
credentials = _get_credentials(
credentials,
_OTEL_PYTHON_EXPORTER_OTLP_GRPC_METRICS_CREDENTIAL_PROVIDER,
OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE,
OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY,
OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE,
)
environ_timeout = environ.get(OTEL_EXPORTER_OTLP_METRICS_TIMEOUT)
environ_timeout = (
float(environ_timeout) if environ_timeout is not None else None
)
compression = (
environ_to_compression(OTEL_EXPORTER_OTLP_METRICS_COMPRESSION)
if compression is None
else compression
)
self._common_configuration(
preferred_temporality, preferred_aggregation
)
OTLPExporterMixin.__init__(
self,
stub=MetricsServiceStub,
result=MetricExportResult,
endpoint=endpoint
or environ.get(OTEL_EXPORTER_OTLP_METRICS_ENDPOINT),
insecure=insecure,
credentials=credentials,
headers=headers or environ.get(OTEL_EXPORTER_OTLP_METRICS_HEADERS),
timeout=timeout or environ_timeout,
compression=compression,
channel_options=channel_options,
)
self._max_export_batch_size: int | None = max_export_batch_size
def _translate_data( # type: ignore [reportIncompatibleMethodOverride]
self, data: MetricsData
) -> ExportMetricsServiceRequest:
return encode_metrics(data)
def export(
self,
metrics_data: MetricsData,
timeout_millis: float = 10_000,
**kwargs,
) -> MetricExportResult:
# TODO(#2663): OTLPExporterMixin should pass timeout to gRPC
if self._max_export_batch_size is None:
return self._export(data=metrics_data)
export_result = MetricExportResult.SUCCESS
for split_metrics_data in self._split_metrics_data(metrics_data):
split_export_result = self._export(data=split_metrics_data)
if split_export_result is MetricExportResult.FAILURE:
export_result = MetricExportResult.FAILURE
return export_result
def _split_metrics_data(
self,
metrics_data: MetricsData,
) -> Iterable[MetricsData]:
assert self._max_export_batch_size is not None
batch_size: int = 0
split_resource_metrics: List[ResourceMetrics] = []
for resource_metrics in metrics_data.resource_metrics:
split_scope_metrics: List[ScopeMetrics] = []
split_resource_metrics.append(
replace(
resource_metrics,
scope_metrics=split_scope_metrics,
)
)
for scope_metrics in resource_metrics.scope_metrics:
split_metrics: List[Metric] = []
split_scope_metrics.append(
replace(
scope_metrics,
metrics=split_metrics,
)
)
for metric in scope_metrics.metrics:
split_data_points: List[DataPointT] = []
split_metrics.append(
replace(
metric,
data=replace(
metric.data,
data_points=split_data_points,
),
)
)
for data_point in metric.data.data_points:
split_data_points.append(data_point)
batch_size += 1
if batch_size >= self._max_export_batch_size:
yield MetricsData(
resource_metrics=split_resource_metrics
)
# Reset all the variables
batch_size = 0
split_data_points = []
split_metrics = [
replace(
metric,
data=replace(
metric.data,
data_points=split_data_points,
),
)
]
split_scope_metrics = [
replace(
scope_metrics,
metrics=split_metrics,
)
]
split_resource_metrics = [
replace(
resource_metrics,
scope_metrics=split_scope_metrics,
)
]
if not split_data_points:
# If data_points is empty remove the whole metric
split_metrics.pop()
if not split_metrics:
# If metrics is empty remove the whole scope_metrics
split_scope_metrics.pop()
if not split_scope_metrics:
# If scope_metrics is empty remove the whole resource_metrics
split_resource_metrics.pop()
if batch_size > 0:
yield MetricsData(resource_metrics=split_resource_metrics)
def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None:
OTLPExporterMixin.shutdown(self, timeout_millis=timeout_millis)
@property
def _exporting(self) -> str:
return "metrics"
def force_flush(self, timeout_millis: float = 10_000) -> bool:
"""Nothing is buffered in this exporter, so this method does nothing."""
return True

View File

@@ -0,0 +1,157 @@
# Copyright The OpenTelemetry Authors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OTLP Span Exporter"""
import logging
from os import environ
from typing import Dict, Optional, Sequence, Tuple, Union
from typing import Sequence as TypingSequence
from grpc import ChannelCredentials, Compression
from opentelemetry.exporter.otlp.proto.common.trace_encoder import (
encode_spans,
)
from opentelemetry.exporter.otlp.proto.grpc.exporter import ( # noqa: F401
OTLPExporterMixin,
_get_credentials,
environ_to_compression,
get_resource_data,
)
from opentelemetry.proto.collector.trace.v1.trace_service_pb2 import (
ExportTraceServiceRequest,
)
from opentelemetry.proto.collector.trace.v1.trace_service_pb2_grpc import (
TraceServiceStub,
)
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
InstrumentationScope,
)
from opentelemetry.proto.trace.v1.trace_pb2 import ( # noqa: F401
ResourceSpans,
ScopeSpans,
Status,
)
from opentelemetry.proto.trace.v1.trace_pb2 import ( # noqa: F401
Span as CollectorSpan,
)
from opentelemetry.sdk.environment_variables import (
_OTEL_PYTHON_EXPORTER_OTLP_GRPC_TRACES_CREDENTIAL_PROVIDER,
OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE,
OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE,
OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY,
OTEL_EXPORTER_OTLP_TRACES_COMPRESSION,
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT,
OTEL_EXPORTER_OTLP_TRACES_HEADERS,
OTEL_EXPORTER_OTLP_TRACES_INSECURE,
OTEL_EXPORTER_OTLP_TRACES_TIMEOUT,
)
from opentelemetry.sdk.trace import ReadableSpan
from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
logger = logging.getLogger(__name__)
# pylint: disable=no-member
class OTLPSpanExporter(
SpanExporter,
OTLPExporterMixin[
Sequence[ReadableSpan],
ExportTraceServiceRequest,
SpanExportResult,
TraceServiceStub,
],
):
# pylint: disable=unsubscriptable-object
"""OTLP span exporter
Args:
endpoint: OpenTelemetry Collector receiver endpoint
insecure: Connection type
credentials: Credentials object for server authentication
headers: Headers to send when exporting
timeout: Backend request timeout in seconds
compression: gRPC compression method to use
"""
def __init__(
self,
endpoint: Optional[str] = None,
insecure: Optional[bool] = None,
credentials: Optional[ChannelCredentials] = None,
headers: Optional[
Union[TypingSequence[Tuple[str, str]], Dict[str, str], str]
] = None,
timeout: Optional[float] = None,
compression: Optional[Compression] = None,
channel_options: Optional[Tuple[Tuple[str, str]]] = None,
):
insecure_spans = environ.get(OTEL_EXPORTER_OTLP_TRACES_INSECURE)
if insecure is None and insecure_spans is not None:
insecure = insecure_spans.lower() == "true"
if (
not insecure
and environ.get(OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE) is not None
):
credentials = _get_credentials(
credentials,
_OTEL_PYTHON_EXPORTER_OTLP_GRPC_TRACES_CREDENTIAL_PROVIDER,
OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE,
OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY,
OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE,
)
environ_timeout = environ.get(OTEL_EXPORTER_OTLP_TRACES_TIMEOUT)
environ_timeout = (
float(environ_timeout) if environ_timeout is not None else None
)
compression = (
environ_to_compression(OTEL_EXPORTER_OTLP_TRACES_COMPRESSION)
if compression is None
else compression
)
OTLPExporterMixin.__init__(
self,
stub=TraceServiceStub,
result=SpanExportResult,
endpoint=endpoint
or environ.get(OTEL_EXPORTER_OTLP_TRACES_ENDPOINT),
insecure=insecure,
credentials=credentials,
headers=headers or environ.get(OTEL_EXPORTER_OTLP_TRACES_HEADERS),
timeout=timeout or environ_timeout,
compression=compression,
channel_options=channel_options,
)
def _translate_data(
self, data: Sequence[ReadableSpan]
) -> ExportTraceServiceRequest:
return encode_spans(data)
def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
return self._export(spans)
def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None:
OTLPExporterMixin.shutdown(self, timeout_millis=timeout_millis)
def force_flush(self, timeout_millis: int = 30000) -> bool:
"""Nothing is buffered in this exporter, so this method does nothing."""
return True
@property
def _exporting(self):
return "traces"

View File

@@ -0,0 +1,15 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "1.38.0"

View File

@@ -0,0 +1,86 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This library allows to export tracing data to an OTLP collector.
Usage
-----
The **OTLP Span Exporter** allows to export `OpenTelemetry`_ traces to the
`OTLP`_ collector.
You can configure the exporter with the following environment variables:
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_TIMEOUT`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_PROTOCOL`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_HEADERS`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_ENDPOINT`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_COMPRESSION`
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE`
- :envvar:`OTEL_EXPORTER_OTLP_TIMEOUT`
- :envvar:`OTEL_EXPORTER_OTLP_PROTOCOL`
- :envvar:`OTEL_EXPORTER_OTLP_HEADERS`
- :envvar:`OTEL_EXPORTER_OTLP_ENDPOINT`
- :envvar:`OTEL_EXPORTER_OTLP_COMPRESSION`
- :envvar:`OTEL_EXPORTER_OTLP_CERTIFICATE`
.. _OTLP: https://github.com/open-telemetry/opentelemetry-collector/
.. _OpenTelemetry: https://github.com/open-telemetry/opentelemetry-python/
.. code:: python
from opentelemetry import trace
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
from opentelemetry.sdk.resources import Resource
from opentelemetry.sdk.trace import TracerProvider
from opentelemetry.sdk.trace.export import BatchSpanProcessor
# Resource can be required for some backends, e.g. Jaeger
# If resource wouldn't be set - traces wouldn't appears in Jaeger
resource = Resource(attributes={
"service.name": "service"
})
trace.set_tracer_provider(TracerProvider(resource=resource))
tracer = trace.get_tracer(__name__)
otlp_exporter = OTLPSpanExporter()
span_processor = BatchSpanProcessor(otlp_exporter)
trace.get_tracer_provider().add_span_processor(span_processor)
with tracer.start_as_current_span("foo"):
print("Hello world!")
API
---
"""
import enum
from .version import __version__
_OTLP_HTTP_HEADERS = {
"Content-Type": "application/x-protobuf",
"User-Agent": "OTel-OTLP-Exporter-Python/" + __version__,
}
class Compression(enum.Enum):
NoCompression = "none"
Deflate = "deflate"
Gzip = "gzip"

View File

@@ -0,0 +1,69 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from os import environ
from typing import Literal, Optional
import requests
from opentelemetry.sdk.environment_variables import (
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_CREDENTIAL_PROVIDER,
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_LOGS_CREDENTIAL_PROVIDER,
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_METRICS_CREDENTIAL_PROVIDER,
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_TRACES_CREDENTIAL_PROVIDER,
)
from opentelemetry.util._importlib_metadata import entry_points
def _is_retryable(resp: requests.Response) -> bool:
if resp.status_code == 408:
return True
if resp.status_code >= 500 and resp.status_code <= 599:
return True
return False
def _load_session_from_envvar(
cred_envvar: Literal[
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_LOGS_CREDENTIAL_PROVIDER,
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_TRACES_CREDENTIAL_PROVIDER,
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_METRICS_CREDENTIAL_PROVIDER,
],
) -> Optional[requests.Session]:
_credential_env = environ.get(
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_CREDENTIAL_PROVIDER
) or environ.get(cred_envvar)
if _credential_env:
try:
maybe_session = next(
iter(
entry_points(
group="opentelemetry_otlp_credential_provider",
name=_credential_env,
)
)
).load()()
except StopIteration:
raise RuntimeError(
f"Requested component '{_credential_env}' not found in "
f"entry point 'opentelemetry_otlp_credential_provider'"
)
if isinstance(maybe_session, requests.Session):
return maybe_session
else:
raise RuntimeError(
f"Requested component '{_credential_env}' is of type {type(maybe_session)}"
f" must be of type `requests.Session`."
)
return None

View File

@@ -0,0 +1,243 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import gzip
import logging
import random
import threading
import zlib
from io import BytesIO
from os import environ
from time import time
from typing import Dict, Optional, Sequence
import requests
from requests.exceptions import ConnectionError
from opentelemetry.exporter.otlp.proto.common._log_encoder import encode_logs
from opentelemetry.exporter.otlp.proto.http import (
_OTLP_HTTP_HEADERS,
Compression,
)
from opentelemetry.exporter.otlp.proto.http._common import (
_is_retryable,
_load_session_from_envvar,
)
from opentelemetry.sdk._logs import LogData
from opentelemetry.sdk._logs.export import (
LogExporter,
LogExportResult,
)
from opentelemetry.sdk._shared_internal import DuplicateFilter
from opentelemetry.sdk.environment_variables import (
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_LOGS_CREDENTIAL_PROVIDER,
OTEL_EXPORTER_OTLP_CERTIFICATE,
OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
OTEL_EXPORTER_OTLP_CLIENT_KEY,
OTEL_EXPORTER_OTLP_COMPRESSION,
OTEL_EXPORTER_OTLP_ENDPOINT,
OTEL_EXPORTER_OTLP_HEADERS,
OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE,
OTEL_EXPORTER_OTLP_LOGS_CLIENT_CERTIFICATE,
OTEL_EXPORTER_OTLP_LOGS_CLIENT_KEY,
OTEL_EXPORTER_OTLP_LOGS_COMPRESSION,
OTEL_EXPORTER_OTLP_LOGS_ENDPOINT,
OTEL_EXPORTER_OTLP_LOGS_HEADERS,
OTEL_EXPORTER_OTLP_LOGS_TIMEOUT,
OTEL_EXPORTER_OTLP_TIMEOUT,
)
from opentelemetry.util.re import parse_env_headers
_logger = logging.getLogger(__name__)
# This prevents logs generated when a log fails to be written to generate another log which fails to be written etc. etc.
_logger.addFilter(DuplicateFilter())
DEFAULT_COMPRESSION = Compression.NoCompression
DEFAULT_ENDPOINT = "http://localhost:4318/"
DEFAULT_LOGS_EXPORT_PATH = "v1/logs"
DEFAULT_TIMEOUT = 10 # in seconds
_MAX_RETRYS = 6
class OTLPLogExporter(LogExporter):
def __init__(
self,
endpoint: Optional[str] = None,
certificate_file: Optional[str] = None,
client_key_file: Optional[str] = None,
client_certificate_file: Optional[str] = None,
headers: Optional[Dict[str, str]] = None,
timeout: Optional[float] = None,
compression: Optional[Compression] = None,
session: Optional[requests.Session] = None,
):
self._shutdown_is_occuring = threading.Event()
self._endpoint = endpoint or environ.get(
OTEL_EXPORTER_OTLP_LOGS_ENDPOINT,
_append_logs_path(
environ.get(OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_ENDPOINT)
),
)
# Keeping these as instance variables because they are used in tests
self._certificate_file = certificate_file or environ.get(
OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE,
environ.get(OTEL_EXPORTER_OTLP_CERTIFICATE, True),
)
self._client_key_file = client_key_file or environ.get(
OTEL_EXPORTER_OTLP_LOGS_CLIENT_KEY,
environ.get(OTEL_EXPORTER_OTLP_CLIENT_KEY, None),
)
self._client_certificate_file = client_certificate_file or environ.get(
OTEL_EXPORTER_OTLP_LOGS_CLIENT_CERTIFICATE,
environ.get(OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, None),
)
self._client_cert = (
(self._client_certificate_file, self._client_key_file)
if self._client_certificate_file and self._client_key_file
else self._client_certificate_file
)
headers_string = environ.get(
OTEL_EXPORTER_OTLP_LOGS_HEADERS,
environ.get(OTEL_EXPORTER_OTLP_HEADERS, ""),
)
self._headers = headers or parse_env_headers(
headers_string, liberal=True
)
self._timeout = timeout or float(
environ.get(
OTEL_EXPORTER_OTLP_LOGS_TIMEOUT,
environ.get(OTEL_EXPORTER_OTLP_TIMEOUT, DEFAULT_TIMEOUT),
)
)
self._compression = compression or _compression_from_env()
self._session = (
session
or _load_session_from_envvar(
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_LOGS_CREDENTIAL_PROVIDER
)
or requests.Session()
)
self._session.headers.update(self._headers)
self._session.headers.update(_OTLP_HTTP_HEADERS)
# let users override our defaults
self._session.headers.update(self._headers)
if self._compression is not Compression.NoCompression:
self._session.headers.update(
{"Content-Encoding": self._compression.value}
)
self._shutdown = False
def _export(
self, serialized_data: bytes, timeout_sec: Optional[float] = None
):
data = serialized_data
if self._compression == Compression.Gzip:
gzip_data = BytesIO()
with gzip.GzipFile(fileobj=gzip_data, mode="w") as gzip_stream:
gzip_stream.write(serialized_data)
data = gzip_data.getvalue()
elif self._compression == Compression.Deflate:
data = zlib.compress(serialized_data)
if timeout_sec is None:
timeout_sec = self._timeout
# By default, keep-alive is enabled in Session's request
# headers. Backends may choose to close the connection
# while a post happens which causes an unhandled
# exception. This try/except will retry the post on such exceptions
try:
resp = self._session.post(
url=self._endpoint,
data=data,
verify=self._certificate_file,
timeout=timeout_sec,
cert=self._client_cert,
)
except ConnectionError:
resp = self._session.post(
url=self._endpoint,
data=data,
verify=self._certificate_file,
timeout=timeout_sec,
cert=self._client_cert,
)
return resp
def export(self, batch: Sequence[LogData]) -> LogExportResult:
if self._shutdown:
_logger.warning("Exporter already shutdown, ignoring batch")
return LogExportResult.FAILURE
serialized_data = encode_logs(batch).SerializeToString()
deadline_sec = time() + self._timeout
for retry_num in range(_MAX_RETRYS):
resp = self._export(serialized_data, deadline_sec - time())
if resp.ok:
return LogExportResult.SUCCESS
# multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff.
backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2)
if (
not _is_retryable(resp)
or retry_num + 1 == _MAX_RETRYS
or backoff_seconds > (deadline_sec - time())
or self._shutdown
):
_logger.error(
"Failed to export logs batch code: %s, reason: %s",
resp.status_code,
resp.text,
)
return LogExportResult.FAILURE
_logger.warning(
"Transient error %s encountered while exporting logs batch, retrying in %.2fs.",
resp.reason,
backoff_seconds,
)
shutdown = self._shutdown_is_occuring.wait(backoff_seconds)
if shutdown:
_logger.warning("Shutdown in progress, aborting retry.")
break
return LogExportResult.FAILURE
def force_flush(self, timeout_millis: float = 10_000) -> bool:
"""Nothing is buffered in this exporter, so this method does nothing."""
return True
def shutdown(self):
if self._shutdown:
_logger.warning("Exporter already shutdown, ignoring call")
return
self._shutdown = True
self._shutdown_is_occuring.set()
self._session.close()
def _compression_from_env() -> Compression:
compression = (
environ.get(
OTEL_EXPORTER_OTLP_LOGS_COMPRESSION,
environ.get(OTEL_EXPORTER_OTLP_COMPRESSION, "none"),
)
.lower()
.strip()
)
return Compression(compression)
def _append_logs_path(endpoint: str) -> str:
if endpoint.endswith("/"):
return endpoint + DEFAULT_LOGS_EXPORT_PATH
return endpoint + f"/{DEFAULT_LOGS_EXPORT_PATH}"

View File

@@ -0,0 +1,305 @@
# Copyright The OpenTelemetry Authors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import annotations
import gzip
import logging
import random
import threading
import zlib
from io import BytesIO
from os import environ
from time import time
from typing import ( # noqa: F401
Any,
Callable,
Dict,
List,
Mapping,
Optional,
Sequence,
)
import requests
from requests.exceptions import ConnectionError
from typing_extensions import deprecated
from opentelemetry.exporter.otlp.proto.common._internal import (
_get_resource_data,
)
from opentelemetry.exporter.otlp.proto.common._internal.metrics_encoder import (
OTLPMetricExporterMixin,
)
from opentelemetry.exporter.otlp.proto.common.metrics_encoder import (
encode_metrics,
)
from opentelemetry.exporter.otlp.proto.http import (
_OTLP_HTTP_HEADERS,
Compression,
)
from opentelemetry.exporter.otlp.proto.http._common import (
_is_retryable,
_load_session_from_envvar,
)
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import ( # noqa: F401
ExportMetricsServiceRequest,
)
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
AnyValue,
ArrayValue,
InstrumentationScope,
KeyValue,
KeyValueList,
)
from opentelemetry.proto.metrics.v1 import metrics_pb2 as pb2 # noqa: F401
from opentelemetry.proto.resource.v1.resource_pb2 import Resource # noqa: F401
from opentelemetry.proto.resource.v1.resource_pb2 import (
Resource as PB2Resource,
)
from opentelemetry.sdk.environment_variables import (
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_METRICS_CREDENTIAL_PROVIDER,
OTEL_EXPORTER_OTLP_CERTIFICATE,
OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
OTEL_EXPORTER_OTLP_CLIENT_KEY,
OTEL_EXPORTER_OTLP_COMPRESSION,
OTEL_EXPORTER_OTLP_ENDPOINT,
OTEL_EXPORTER_OTLP_HEADERS,
OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE,
OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE,
OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY,
OTEL_EXPORTER_OTLP_METRICS_COMPRESSION,
OTEL_EXPORTER_OTLP_METRICS_ENDPOINT,
OTEL_EXPORTER_OTLP_METRICS_HEADERS,
OTEL_EXPORTER_OTLP_METRICS_TIMEOUT,
OTEL_EXPORTER_OTLP_TIMEOUT,
)
from opentelemetry.sdk.metrics._internal.aggregation import Aggregation
from opentelemetry.sdk.metrics.export import ( # noqa: F401
AggregationTemporality,
Gauge,
MetricExporter,
MetricExportResult,
MetricsData,
Sum,
)
from opentelemetry.sdk.metrics.export import ( # noqa: F401
Histogram as HistogramType,
)
from opentelemetry.sdk.resources import Resource as SDKResource
from opentelemetry.util.re import parse_env_headers
_logger = logging.getLogger(__name__)
DEFAULT_COMPRESSION = Compression.NoCompression
DEFAULT_ENDPOINT = "http://localhost:4318/"
DEFAULT_METRICS_EXPORT_PATH = "v1/metrics"
DEFAULT_TIMEOUT = 10 # in seconds
_MAX_RETRYS = 6
class OTLPMetricExporter(MetricExporter, OTLPMetricExporterMixin):
def __init__(
self,
endpoint: str | None = None,
certificate_file: str | None = None,
client_key_file: str | None = None,
client_certificate_file: str | None = None,
headers: dict[str, str] | None = None,
timeout: float | None = None,
compression: Compression | None = None,
session: requests.Session | None = None,
preferred_temporality: dict[type, AggregationTemporality]
| None = None,
preferred_aggregation: dict[type, Aggregation] | None = None,
):
self._shutdown_in_progress = threading.Event()
self._endpoint = endpoint or environ.get(
OTEL_EXPORTER_OTLP_METRICS_ENDPOINT,
_append_metrics_path(
environ.get(OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_ENDPOINT)
),
)
self._certificate_file = certificate_file or environ.get(
OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE,
environ.get(OTEL_EXPORTER_OTLP_CERTIFICATE, True),
)
self._client_key_file = client_key_file or environ.get(
OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY,
environ.get(OTEL_EXPORTER_OTLP_CLIENT_KEY, None),
)
self._client_certificate_file = client_certificate_file or environ.get(
OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE,
environ.get(OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, None),
)
self._client_cert = (
(self._client_certificate_file, self._client_key_file)
if self._client_certificate_file and self._client_key_file
else self._client_certificate_file
)
headers_string = environ.get(
OTEL_EXPORTER_OTLP_METRICS_HEADERS,
environ.get(OTEL_EXPORTER_OTLP_HEADERS, ""),
)
self._headers = headers or parse_env_headers(
headers_string, liberal=True
)
self._timeout = timeout or float(
environ.get(
OTEL_EXPORTER_OTLP_METRICS_TIMEOUT,
environ.get(OTEL_EXPORTER_OTLP_TIMEOUT, DEFAULT_TIMEOUT),
)
)
self._compression = compression or _compression_from_env()
self._session = (
session
or _load_session_from_envvar(
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_METRICS_CREDENTIAL_PROVIDER
)
or requests.Session()
)
self._session.headers.update(self._headers)
self._session.headers.update(_OTLP_HTTP_HEADERS)
# let users override our defaults
self._session.headers.update(self._headers)
if self._compression is not Compression.NoCompression:
self._session.headers.update(
{"Content-Encoding": self._compression.value}
)
self._common_configuration(
preferred_temporality, preferred_aggregation
)
self._shutdown = False
def _export(
self, serialized_data: bytes, timeout_sec: Optional[float] = None
):
data = serialized_data
if self._compression == Compression.Gzip:
gzip_data = BytesIO()
with gzip.GzipFile(fileobj=gzip_data, mode="w") as gzip_stream:
gzip_stream.write(serialized_data)
data = gzip_data.getvalue()
elif self._compression == Compression.Deflate:
data = zlib.compress(serialized_data)
if timeout_sec is None:
timeout_sec = self._timeout
# By default, keep-alive is enabled in Session's request
# headers. Backends may choose to close the connection
# while a post happens which causes an unhandled
# exception. This try/except will retry the post on such exceptions
try:
resp = self._session.post(
url=self._endpoint,
data=data,
verify=self._certificate_file,
timeout=timeout_sec,
cert=self._client_cert,
)
except ConnectionError:
resp = self._session.post(
url=self._endpoint,
data=data,
verify=self._certificate_file,
timeout=timeout_sec,
cert=self._client_cert,
)
return resp
def export(
self,
metrics_data: MetricsData,
timeout_millis: Optional[float] = 10000,
**kwargs,
) -> MetricExportResult:
if self._shutdown:
_logger.warning("Exporter already shutdown, ignoring batch")
return MetricExportResult.FAILURE
serialized_data = encode_metrics(metrics_data).SerializeToString()
deadline_sec = time() + self._timeout
for retry_num in range(_MAX_RETRYS):
resp = self._export(serialized_data, deadline_sec - time())
if resp.ok:
return MetricExportResult.SUCCESS
# multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff.
backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2)
if (
not _is_retryable(resp)
or retry_num + 1 == _MAX_RETRYS
or backoff_seconds > (deadline_sec - time())
or self._shutdown
):
_logger.error(
"Failed to export metrics batch code: %s, reason: %s",
resp.status_code,
resp.text,
)
return MetricExportResult.FAILURE
_logger.warning(
"Transient error %s encountered while exporting metrics batch, retrying in %.2fs.",
resp.reason,
backoff_seconds,
)
shutdown = self._shutdown_in_progress.wait(backoff_seconds)
if shutdown:
_logger.warning("Shutdown in progress, aborting retry.")
break
return MetricExportResult.FAILURE
def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None:
if self._shutdown:
_logger.warning("Exporter already shutdown, ignoring call")
return
self._shutdown = True
self._shutdown_in_progress.set()
self._session.close()
@property
def _exporting(self) -> str:
return "metrics"
def force_flush(self, timeout_millis: float = 10_000) -> bool:
"""Nothing is buffered in this exporter, so this method does nothing."""
return True
@deprecated(
"Use one of the encoders from opentelemetry-exporter-otlp-proto-common instead. Deprecated since version 1.18.0.",
)
def get_resource_data(
sdk_resource_scope_data: Dict[SDKResource, Any], # ResourceDataT?
resource_class: Callable[..., PB2Resource],
name: str,
) -> List[PB2Resource]:
return _get_resource_data(sdk_resource_scope_data, resource_class, name)
def _compression_from_env() -> Compression:
compression = (
environ.get(
OTEL_EXPORTER_OTLP_METRICS_COMPRESSION,
environ.get(OTEL_EXPORTER_OTLP_COMPRESSION, "none"),
)
.lower()
.strip()
)
return Compression(compression)
def _append_metrics_path(endpoint: str) -> str:
if endpoint.endswith("/"):
return endpoint + DEFAULT_METRICS_EXPORT_PATH
return endpoint + f"/{DEFAULT_METRICS_EXPORT_PATH}"

View File

@@ -0,0 +1,238 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import gzip
import logging
import random
import threading
import zlib
from io import BytesIO
from os import environ
from time import time
from typing import Dict, Optional, Sequence
import requests
from requests.exceptions import ConnectionError
from opentelemetry.exporter.otlp.proto.common.trace_encoder import (
encode_spans,
)
from opentelemetry.exporter.otlp.proto.http import (
_OTLP_HTTP_HEADERS,
Compression,
)
from opentelemetry.exporter.otlp.proto.http._common import (
_is_retryable,
_load_session_from_envvar,
)
from opentelemetry.sdk.environment_variables import (
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_TRACES_CREDENTIAL_PROVIDER,
OTEL_EXPORTER_OTLP_CERTIFICATE,
OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
OTEL_EXPORTER_OTLP_CLIENT_KEY,
OTEL_EXPORTER_OTLP_COMPRESSION,
OTEL_EXPORTER_OTLP_ENDPOINT,
OTEL_EXPORTER_OTLP_HEADERS,
OTEL_EXPORTER_OTLP_TIMEOUT,
OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE,
OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE,
OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY,
OTEL_EXPORTER_OTLP_TRACES_COMPRESSION,
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT,
OTEL_EXPORTER_OTLP_TRACES_HEADERS,
OTEL_EXPORTER_OTLP_TRACES_TIMEOUT,
)
from opentelemetry.sdk.trace import ReadableSpan
from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
from opentelemetry.util.re import parse_env_headers
_logger = logging.getLogger(__name__)
DEFAULT_COMPRESSION = Compression.NoCompression
DEFAULT_ENDPOINT = "http://localhost:4318/"
DEFAULT_TRACES_EXPORT_PATH = "v1/traces"
DEFAULT_TIMEOUT = 10 # in seconds
_MAX_RETRYS = 6
class OTLPSpanExporter(SpanExporter):
def __init__(
self,
endpoint: Optional[str] = None,
certificate_file: Optional[str] = None,
client_key_file: Optional[str] = None,
client_certificate_file: Optional[str] = None,
headers: Optional[Dict[str, str]] = None,
timeout: Optional[float] = None,
compression: Optional[Compression] = None,
session: Optional[requests.Session] = None,
):
self._shutdown_in_progress = threading.Event()
self._endpoint = endpoint or environ.get(
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT,
_append_trace_path(
environ.get(OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_ENDPOINT)
),
)
self._certificate_file = certificate_file or environ.get(
OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE,
environ.get(OTEL_EXPORTER_OTLP_CERTIFICATE, True),
)
self._client_key_file = client_key_file or environ.get(
OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY,
environ.get(OTEL_EXPORTER_OTLP_CLIENT_KEY, None),
)
self._client_certificate_file = client_certificate_file or environ.get(
OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE,
environ.get(OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, None),
)
self._client_cert = (
(self._client_certificate_file, self._client_key_file)
if self._client_certificate_file and self._client_key_file
else self._client_certificate_file
)
headers_string = environ.get(
OTEL_EXPORTER_OTLP_TRACES_HEADERS,
environ.get(OTEL_EXPORTER_OTLP_HEADERS, ""),
)
self._headers = headers or parse_env_headers(
headers_string, liberal=True
)
self._timeout = timeout or float(
environ.get(
OTEL_EXPORTER_OTLP_TRACES_TIMEOUT,
environ.get(OTEL_EXPORTER_OTLP_TIMEOUT, DEFAULT_TIMEOUT),
)
)
self._compression = compression or _compression_from_env()
self._session = (
session
or _load_session_from_envvar(
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_TRACES_CREDENTIAL_PROVIDER
)
or requests.Session()
)
self._session.headers.update(self._headers)
self._session.headers.update(_OTLP_HTTP_HEADERS)
# let users override our defaults
self._session.headers.update(self._headers)
if self._compression is not Compression.NoCompression:
self._session.headers.update(
{"Content-Encoding": self._compression.value}
)
self._shutdown = False
def _export(
self, serialized_data: bytes, timeout_sec: Optional[float] = None
):
data = serialized_data
if self._compression == Compression.Gzip:
gzip_data = BytesIO()
with gzip.GzipFile(fileobj=gzip_data, mode="w") as gzip_stream:
gzip_stream.write(serialized_data)
data = gzip_data.getvalue()
elif self._compression == Compression.Deflate:
data = zlib.compress(serialized_data)
if timeout_sec is None:
timeout_sec = self._timeout
# By default, keep-alive is enabled in Session's request
# headers. Backends may choose to close the connection
# while a post happens which causes an unhandled
# exception. This try/except will retry the post on such exceptions
try:
resp = self._session.post(
url=self._endpoint,
data=data,
verify=self._certificate_file,
timeout=timeout_sec,
cert=self._client_cert,
)
except ConnectionError:
resp = self._session.post(
url=self._endpoint,
data=data,
verify=self._certificate_file,
timeout=timeout_sec,
cert=self._client_cert,
)
return resp
def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
if self._shutdown:
_logger.warning("Exporter already shutdown, ignoring batch")
return SpanExportResult.FAILURE
serialized_data = encode_spans(spans).SerializePartialToString()
deadline_sec = time() + self._timeout
for retry_num in range(_MAX_RETRYS):
resp = self._export(serialized_data, deadline_sec - time())
if resp.ok:
return SpanExportResult.SUCCESS
# multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff.
backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2)
if (
not _is_retryable(resp)
or retry_num + 1 == _MAX_RETRYS
or backoff_seconds > (deadline_sec - time())
or self._shutdown
):
_logger.error(
"Failed to export span batch code: %s, reason: %s",
resp.status_code,
resp.text,
)
return SpanExportResult.FAILURE
_logger.warning(
"Transient error %s encountered while exporting span batch, retrying in %.2fs.",
resp.reason,
backoff_seconds,
)
shutdown = self._shutdown_in_progress.wait(backoff_seconds)
if shutdown:
_logger.warning("Shutdown in progress, aborting retry.")
break
return SpanExportResult.FAILURE
def shutdown(self):
if self._shutdown:
_logger.warning("Exporter already shutdown, ignoring call")
return
self._shutdown = True
self._shutdown_in_progress.set()
self._session.close()
def force_flush(self, timeout_millis: int = 30000) -> bool:
"""Nothing is buffered in this exporter, so this method does nothing."""
return True
def _compression_from_env() -> Compression:
compression = (
environ.get(
OTEL_EXPORTER_OTLP_TRACES_COMPRESSION,
environ.get(OTEL_EXPORTER_OTLP_COMPRESSION, "none"),
)
.lower()
.strip()
)
return Compression(compression)
def _append_trace_path(endpoint: str) -> str:
if endpoint.endswith("/"):
return endpoint + DEFAULT_TRACES_EXPORT_PATH
return endpoint + f"/{DEFAULT_TRACES_EXPORT_PATH}"

View File

@@ -0,0 +1,66 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging # noqa: F401
from collections import abc # noqa: F401
from typing import Any, List, Optional, Sequence # noqa: F401
from opentelemetry.proto.collector.trace.v1.trace_service_pb2 import ( # noqa: F401
ExportTraceServiceRequest as PB2ExportTraceServiceRequest,
)
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
AnyValue as PB2AnyValue,
)
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
ArrayValue as PB2ArrayValue,
)
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
InstrumentationScope as PB2InstrumentationScope,
)
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
KeyValue as PB2KeyValue,
)
from opentelemetry.proto.resource.v1.resource_pb2 import ( # noqa: F401
Resource as PB2Resource,
)
from opentelemetry.proto.trace.v1.trace_pb2 import ( # noqa: F401
ResourceSpans as PB2ResourceSpans,
)
from opentelemetry.proto.trace.v1.trace_pb2 import ( # noqa: F401
ScopeSpans as PB2ScopeSpans,
)
from opentelemetry.proto.trace.v1.trace_pb2 import ( # noqa: F401
Span as PB2SPan,
)
from opentelemetry.proto.trace.v1.trace_pb2 import ( # noqa: F401
Status as PB2Status,
)
from opentelemetry.sdk.trace import (
Event, # noqa: F401
Resource, # noqa: F401
)
from opentelemetry.sdk.trace import Span as SDKSpan # noqa: F401
from opentelemetry.sdk.util.instrumentation import ( # noqa: F401
InstrumentationScope,
)
from opentelemetry.trace import (
Link, # noqa: F401
SpanKind, # noqa: F401
)
from opentelemetry.trace.span import ( # noqa: F401
SpanContext,
Status,
TraceState,
)
from opentelemetry.util.types import Attributes # noqa: F401

View File

@@ -0,0 +1,15 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "1.38.0"

View File

@@ -0,0 +1,15 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__version__ = "1.38.0"

View File

@@ -0,0 +1,132 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
The OpenTelemetry metrics API describes the classes used to generate
metrics.
The :class:`.MeterProvider` provides users access to the :class:`.Meter` which in
turn is used to create :class:`.Instrument` objects. The :class:`.Instrument` objects are
used to record measurements.
This module provides abstract (i.e. unimplemented) classes required for
metrics, and a concrete no-op implementation :class:`.NoOpMeter` that allows applications
to use the API package alone without a supporting implementation.
To get a meter, you need to provide the package name from which you are
calling the meter APIs to OpenTelemetry by calling `MeterProvider.get_meter`
with the calling instrumentation name and the version of your package.
The following code shows how to obtain a meter using the global :class:`.MeterProvider`::
from opentelemetry.metrics import get_meter
meter = get_meter("example-meter")
counter = meter.create_counter("example-counter")
.. versionadded:: 1.10.0
.. versionchanged:: 1.12.0rc
"""
from opentelemetry.metrics._internal import (
Meter,
MeterProvider,
NoOpMeter,
NoOpMeterProvider,
get_meter,
get_meter_provider,
set_meter_provider,
)
from opentelemetry.metrics._internal.instrument import (
Asynchronous,
CallbackOptions,
CallbackT,
Counter,
Histogram,
Instrument,
NoOpCounter,
NoOpHistogram,
NoOpObservableCounter,
NoOpObservableGauge,
NoOpObservableUpDownCounter,
NoOpUpDownCounter,
ObservableCounter,
ObservableGauge,
ObservableUpDownCounter,
Synchronous,
UpDownCounter,
)
from opentelemetry.metrics._internal.instrument import Gauge as _Gauge
from opentelemetry.metrics._internal.instrument import NoOpGauge as _NoOpGauge
from opentelemetry.metrics._internal.observation import Observation
for obj in [
Counter,
Synchronous,
Asynchronous,
CallbackOptions,
_Gauge,
_NoOpGauge,
get_meter_provider,
get_meter,
Histogram,
Meter,
MeterProvider,
Instrument,
NoOpCounter,
NoOpHistogram,
NoOpMeter,
NoOpMeterProvider,
NoOpObservableCounter,
NoOpObservableGauge,
NoOpObservableUpDownCounter,
NoOpUpDownCounter,
ObservableCounter,
ObservableGauge,
ObservableUpDownCounter,
Observation,
set_meter_provider,
UpDownCounter,
]:
obj.__module__ = __name__
__all__ = [
"CallbackOptions",
"MeterProvider",
"NoOpMeterProvider",
"Meter",
"Counter",
"_Gauge",
"_NoOpGauge",
"NoOpCounter",
"UpDownCounter",
"NoOpUpDownCounter",
"Histogram",
"NoOpHistogram",
"ObservableCounter",
"NoOpObservableCounter",
"ObservableUpDownCounter",
"Instrument",
"Synchronous",
"Asynchronous",
"NoOpObservableGauge",
"ObservableGauge",
"NoOpObservableUpDownCounter",
"get_meter",
"get_meter_provider",
"set_meter_provider",
"Observation",
"CallbackT",
"NoOpMeter",
]

View File

@@ -0,0 +1,889 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-ancestors
"""
The OpenTelemetry metrics API describes the classes used to generate
metrics.
The :class:`.MeterProvider` provides users access to the :class:`.Meter` which in
turn is used to create :class:`.Instrument` objects. The :class:`.Instrument` objects are
used to record measurements.
This module provides abstract (i.e. unimplemented) classes required for
metrics, and a concrete no-op implementation :class:`.NoOpMeter` that allows applications
to use the API package alone without a supporting implementation.
To get a meter, you need to provide the package name from which you are
calling the meter APIs to OpenTelemetry by calling `MeterProvider.get_meter`
with the calling instrumentation name and the version of your package.
The following code shows how to obtain a meter using the global :class:`.MeterProvider`::
from opentelemetry.metrics import get_meter
meter = get_meter("example-meter")
counter = meter.create_counter("example-counter")
.. versionadded:: 1.10.0
"""
import warnings
from abc import ABC, abstractmethod
from dataclasses import dataclass
from logging import getLogger
from os import environ
from threading import Lock
from typing import Dict, List, Optional, Sequence, Union, cast
from opentelemetry.environment_variables import OTEL_PYTHON_METER_PROVIDER
from opentelemetry.metrics._internal.instrument import (
CallbackT,
Counter,
Gauge,
Histogram,
NoOpCounter,
NoOpGauge,
NoOpHistogram,
NoOpObservableCounter,
NoOpObservableGauge,
NoOpObservableUpDownCounter,
NoOpUpDownCounter,
ObservableCounter,
ObservableGauge,
ObservableUpDownCounter,
UpDownCounter,
_MetricsHistogramAdvisory,
_ProxyCounter,
_ProxyGauge,
_ProxyHistogram,
_ProxyObservableCounter,
_ProxyObservableGauge,
_ProxyObservableUpDownCounter,
_ProxyUpDownCounter,
)
from opentelemetry.util._once import Once
from opentelemetry.util._providers import _load_provider
from opentelemetry.util.types import (
Attributes,
)
_logger = getLogger(__name__)
# pylint: disable=invalid-name
_ProxyInstrumentT = Union[
_ProxyCounter,
_ProxyHistogram,
_ProxyGauge,
_ProxyObservableCounter,
_ProxyObservableGauge,
_ProxyObservableUpDownCounter,
_ProxyUpDownCounter,
]
class MeterProvider(ABC):
"""
MeterProvider is the entry point of the API. It provides access to `Meter` instances.
"""
@abstractmethod
def get_meter(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[Attributes] = None,
) -> "Meter":
"""Returns a `Meter` for use by the given instrumentation library.
For any two calls it is undefined whether the same or different
`Meter` instances are returned, even for different library names.
This function may return different `Meter` types (e.g. a no-op meter
vs. a functional meter).
Args:
name: The name of the instrumenting module.
``__name__`` may not be used as this can result in
different meter names if the meters are in different files.
It is better to use a fixed string that can be imported where
needed and used consistently as the name of the meter.
This should *not* be the name of the module that is
instrumented but the name of the module doing the instrumentation.
E.g., instead of ``"requests"``, use
``"opentelemetry.instrumentation.requests"``.
version: Optional. The version string of the
instrumenting library. Usually this should be the same as
``importlib.metadata.version(instrumenting_library_name)``.
schema_url: Optional. Specifies the Schema URL of the emitted telemetry.
attributes: Optional. Attributes that are associated with the emitted telemetry.
"""
class NoOpMeterProvider(MeterProvider):
"""The default MeterProvider used when no MeterProvider implementation is available."""
def get_meter(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[Attributes] = None,
) -> "Meter":
"""Returns a NoOpMeter."""
return NoOpMeter(name, version=version, schema_url=schema_url)
class _ProxyMeterProvider(MeterProvider):
def __init__(self) -> None:
self._lock = Lock()
self._meters: List[_ProxyMeter] = []
self._real_meter_provider: Optional[MeterProvider] = None
def get_meter(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
attributes: Optional[Attributes] = None,
) -> "Meter":
with self._lock:
if self._real_meter_provider is not None:
return self._real_meter_provider.get_meter(
name, version, schema_url
)
meter = _ProxyMeter(name, version=version, schema_url=schema_url)
self._meters.append(meter)
return meter
def on_set_meter_provider(self, meter_provider: MeterProvider) -> None:
with self._lock:
self._real_meter_provider = meter_provider
for meter in self._meters:
meter.on_set_meter_provider(meter_provider)
@dataclass
class _InstrumentRegistrationStatus:
instrument_id: str
already_registered: bool
conflict: bool
current_advisory: Optional[_MetricsHistogramAdvisory]
class Meter(ABC):
"""Handles instrument creation.
This class provides methods for creating instruments which are then
used to produce measurements.
"""
def __init__(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
) -> None:
super().__init__()
self._name = name
self._version = version
self._schema_url = schema_url
self._instrument_ids: Dict[
str, Optional[_MetricsHistogramAdvisory]
] = {}
self._instrument_ids_lock = Lock()
@property
def name(self) -> str:
"""
The name of the instrumenting module.
"""
return self._name
@property
def version(self) -> Optional[str]:
"""
The version string of the instrumenting library.
"""
return self._version
@property
def schema_url(self) -> Optional[str]:
"""
Specifies the Schema URL of the emitted telemetry
"""
return self._schema_url
def _register_instrument(
self,
name: str,
type_: type,
unit: str,
description: str,
advisory: Optional[_MetricsHistogramAdvisory] = None,
) -> _InstrumentRegistrationStatus:
"""
Register an instrument with the name, type, unit and description as
identifying keys and the advisory as value.
Returns a tuple. The first value is the instrument id.
The second value is an `_InstrumentRegistrationStatus` where
`already_registered` is `True` if the instrument has been registered
already.
If `conflict` is set to True the `current_advisory` attribute contains
the registered instrument advisory.
"""
instrument_id = ",".join(
[name.strip().lower(), type_.__name__, unit, description]
)
already_registered = False
conflict = False
current_advisory = None
with self._instrument_ids_lock:
# we are not using get because None is a valid value
already_registered = instrument_id in self._instrument_ids
if already_registered:
current_advisory = self._instrument_ids[instrument_id]
conflict = current_advisory != advisory
else:
self._instrument_ids[instrument_id] = advisory
return _InstrumentRegistrationStatus(
instrument_id=instrument_id,
already_registered=already_registered,
conflict=conflict,
current_advisory=current_advisory,
)
@staticmethod
def _log_instrument_registration_conflict(
name: str,
instrumentation_type: str,
unit: str,
description: str,
status: _InstrumentRegistrationStatus,
) -> None:
_logger.warning(
"An instrument with name %s, type %s, unit %s and "
"description %s has been created already with a "
"different advisory value %s and will be used instead.",
name,
instrumentation_type,
unit,
description,
status.current_advisory,
)
@abstractmethod
def create_counter(
self,
name: str,
unit: str = "",
description: str = "",
) -> Counter:
"""Creates a `Counter` instrument
Args:
name: The name of the instrument to be created
unit: The unit for observations this instrument reports. For
example, ``By`` for bytes. UCUM units are recommended.
description: A description for this instrument and what it measures.
"""
@abstractmethod
def create_up_down_counter(
self,
name: str,
unit: str = "",
description: str = "",
) -> UpDownCounter:
"""Creates an `UpDownCounter` instrument
Args:
name: The name of the instrument to be created
unit: The unit for observations this instrument reports. For
example, ``By`` for bytes. UCUM units are recommended.
description: A description for this instrument and what it measures.
"""
@abstractmethod
def create_observable_counter(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> ObservableCounter:
"""Creates an `ObservableCounter` instrument
An observable counter observes a monotonically increasing count by calling provided
callbacks which accept a :class:`~opentelemetry.metrics.CallbackOptions` and return
multiple :class:`~opentelemetry.metrics.Observation`.
For example, an observable counter could be used to report system CPU
time periodically. Here is a basic implementation::
def cpu_time_callback(options: CallbackOptions) -> Iterable[Observation]:
observations = []
with open("/proc/stat") as procstat:
procstat.readline() # skip the first line
for line in procstat:
if not line.startswith("cpu"): break
cpu, *states = line.split()
observations.append(Observation(int(states[0]) // 100, {"cpu": cpu, "state": "user"}))
observations.append(Observation(int(states[1]) // 100, {"cpu": cpu, "state": "nice"}))
observations.append(Observation(int(states[2]) // 100, {"cpu": cpu, "state": "system"}))
# ... other states
return observations
meter.create_observable_counter(
"system.cpu.time",
callbacks=[cpu_time_callback],
unit="s",
description="CPU time"
)
To reduce memory usage, you can use generator callbacks instead of
building the full list::
def cpu_time_callback(options: CallbackOptions) -> Iterable[Observation]:
with open("/proc/stat") as procstat:
procstat.readline() # skip the first line
for line in procstat:
if not line.startswith("cpu"): break
cpu, *states = line.split()
yield Observation(int(states[0]) // 100, {"cpu": cpu, "state": "user"})
yield Observation(int(states[1]) // 100, {"cpu": cpu, "state": "nice"})
# ... other states
Alternatively, you can pass a sequence of generators directly instead of a sequence of
callbacks, which each should return iterables of :class:`~opentelemetry.metrics.Observation`::
def cpu_time_callback(states_to_include: set[str]) -> Iterable[Iterable[Observation]]:
# accept options sent in from OpenTelemetry
options = yield
while True:
observations = []
with open("/proc/stat") as procstat:
procstat.readline() # skip the first line
for line in procstat:
if not line.startswith("cpu"): break
cpu, *states = line.split()
if "user" in states_to_include:
observations.append(Observation(int(states[0]) // 100, {"cpu": cpu, "state": "user"}))
if "nice" in states_to_include:
observations.append(Observation(int(states[1]) // 100, {"cpu": cpu, "state": "nice"}))
# ... other states
# yield the observations and receive the options for next iteration
options = yield observations
meter.create_observable_counter(
"system.cpu.time",
callbacks=[cpu_time_callback({"user", "system"})],
unit="s",
description="CPU time"
)
The :class:`~opentelemetry.metrics.CallbackOptions` contain a timeout which the
callback should respect. For example if the callback does asynchronous work, like
making HTTP requests, it should respect the timeout::
def scrape_http_callback(options: CallbackOptions) -> Iterable[Observation]:
r = requests.get('http://scrapethis.com', timeout=options.timeout_millis / 10**3)
for value in r.json():
yield Observation(value)
Args:
name: The name of the instrument to be created
callbacks: A sequence of callbacks that return an iterable of
:class:`~opentelemetry.metrics.Observation`. Alternatively, can be a sequence of generators that each
yields iterables of :class:`~opentelemetry.metrics.Observation`.
unit: The unit for observations this instrument reports. For
example, ``By`` for bytes. UCUM units are recommended.
description: A description for this instrument and what it measures.
"""
@abstractmethod
def create_histogram(
self,
name: str,
unit: str = "",
description: str = "",
*,
explicit_bucket_boundaries_advisory: Optional[Sequence[float]] = None,
) -> Histogram:
"""Creates a :class:`~opentelemetry.metrics.Histogram` instrument
Args:
name: The name of the instrument to be created
unit: The unit for observations this instrument reports. For
example, ``By`` for bytes. UCUM units are recommended.
description: A description for this instrument and what it measures.
"""
def create_gauge( # type: ignore # pylint: disable=no-self-use
self,
name: str,
unit: str = "",
description: str = "",
) -> Gauge: # pyright: ignore[reportReturnType]
"""Creates a ``Gauge`` instrument
Args:
name: The name of the instrument to be created
unit: The unit for observations this instrument reports. For
example, ``By`` for bytes. UCUM units are recommended.
description: A description for this instrument and what it measures.
"""
warnings.warn("create_gauge() is not implemented and will be a no-op")
@abstractmethod
def create_observable_gauge(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> ObservableGauge:
"""Creates an `ObservableGauge` instrument
Args:
name: The name of the instrument to be created
callbacks: A sequence of callbacks that return an iterable of
:class:`~opentelemetry.metrics.Observation`. Alternatively, can be a generator that yields iterables
of :class:`~opentelemetry.metrics.Observation`.
unit: The unit for observations this instrument reports. For
example, ``By`` for bytes. UCUM units are recommended.
description: A description for this instrument and what it measures.
"""
@abstractmethod
def create_observable_up_down_counter(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> ObservableUpDownCounter:
"""Creates an `ObservableUpDownCounter` instrument
Args:
name: The name of the instrument to be created
callbacks: A sequence of callbacks that return an iterable of
:class:`~opentelemetry.metrics.Observation`. Alternatively, can be a generator that yields iterables
of :class:`~opentelemetry.metrics.Observation`.
unit: The unit for observations this instrument reports. For
example, ``By`` for bytes. UCUM units are recommended.
description: A description for this instrument and what it measures.
"""
class _ProxyMeter(Meter):
def __init__(
self,
name: str,
version: Optional[str] = None,
schema_url: Optional[str] = None,
) -> None:
super().__init__(name, version=version, schema_url=schema_url)
self._lock = Lock()
self._instruments: List[_ProxyInstrumentT] = []
self._real_meter: Optional[Meter] = None
def on_set_meter_provider(self, meter_provider: MeterProvider) -> None:
"""Called when a real meter provider is set on the creating _ProxyMeterProvider
Creates a real backing meter for this instance and notifies all created
instruments so they can create real backing instruments.
"""
real_meter = meter_provider.get_meter(
self._name, self._version, self._schema_url
)
with self._lock:
self._real_meter = real_meter
# notify all proxy instruments of the new meter so they can create
# real instruments to back themselves
for instrument in self._instruments:
instrument.on_meter_set(real_meter)
def create_counter(
self,
name: str,
unit: str = "",
description: str = "",
) -> Counter:
with self._lock:
if self._real_meter:
return self._real_meter.create_counter(name, unit, description)
proxy = _ProxyCounter(name, unit, description)
self._instruments.append(proxy)
return proxy
def create_up_down_counter(
self,
name: str,
unit: str = "",
description: str = "",
) -> UpDownCounter:
with self._lock:
if self._real_meter:
return self._real_meter.create_up_down_counter(
name, unit, description
)
proxy = _ProxyUpDownCounter(name, unit, description)
self._instruments.append(proxy)
return proxy
def create_observable_counter(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> ObservableCounter:
with self._lock:
if self._real_meter:
return self._real_meter.create_observable_counter(
name, callbacks, unit, description
)
proxy = _ProxyObservableCounter(
name, callbacks, unit=unit, description=description
)
self._instruments.append(proxy)
return proxy
def create_histogram(
self,
name: str,
unit: str = "",
description: str = "",
*,
explicit_bucket_boundaries_advisory: Optional[Sequence[float]] = None,
) -> Histogram:
with self._lock:
if self._real_meter:
return self._real_meter.create_histogram(
name,
unit,
description,
explicit_bucket_boundaries_advisory=explicit_bucket_boundaries_advisory,
)
proxy = _ProxyHistogram(
name, unit, description, explicit_bucket_boundaries_advisory
)
self._instruments.append(proxy)
return proxy
def create_gauge(
self,
name: str,
unit: str = "",
description: str = "",
) -> Gauge:
with self._lock:
if self._real_meter:
return self._real_meter.create_gauge(name, unit, description)
proxy = _ProxyGauge(name, unit, description)
self._instruments.append(proxy)
return proxy
def create_observable_gauge(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> ObservableGauge:
with self._lock:
if self._real_meter:
return self._real_meter.create_observable_gauge(
name, callbacks, unit, description
)
proxy = _ProxyObservableGauge(
name, callbacks, unit=unit, description=description
)
self._instruments.append(proxy)
return proxy
def create_observable_up_down_counter(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> ObservableUpDownCounter:
with self._lock:
if self._real_meter:
return self._real_meter.create_observable_up_down_counter(
name,
callbacks,
unit,
description,
)
proxy = _ProxyObservableUpDownCounter(
name, callbacks, unit=unit, description=description
)
self._instruments.append(proxy)
return proxy
class NoOpMeter(Meter):
"""The default Meter used when no Meter implementation is available.
All operations are no-op.
"""
def create_counter(
self,
name: str,
unit: str = "",
description: str = "",
) -> Counter:
"""Returns a no-op Counter."""
status = self._register_instrument(
name, NoOpCounter, unit, description
)
if status.conflict:
self._log_instrument_registration_conflict(
name,
Counter.__name__,
unit,
description,
status,
)
return NoOpCounter(name, unit=unit, description=description)
def create_gauge(
self,
name: str,
unit: str = "",
description: str = "",
) -> Gauge:
"""Returns a no-op Gauge."""
status = self._register_instrument(name, NoOpGauge, unit, description)
if status.conflict:
self._log_instrument_registration_conflict(
name,
Gauge.__name__,
unit,
description,
status,
)
return NoOpGauge(name, unit=unit, description=description)
def create_up_down_counter(
self,
name: str,
unit: str = "",
description: str = "",
) -> UpDownCounter:
"""Returns a no-op UpDownCounter."""
status = self._register_instrument(
name, NoOpUpDownCounter, unit, description
)
if status.conflict:
self._log_instrument_registration_conflict(
name,
UpDownCounter.__name__,
unit,
description,
status,
)
return NoOpUpDownCounter(name, unit=unit, description=description)
def create_observable_counter(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> ObservableCounter:
"""Returns a no-op ObservableCounter."""
status = self._register_instrument(
name, NoOpObservableCounter, unit, description
)
if status.conflict:
self._log_instrument_registration_conflict(
name,
ObservableCounter.__name__,
unit,
description,
status,
)
return NoOpObservableCounter(
name,
callbacks,
unit=unit,
description=description,
)
def create_histogram(
self,
name: str,
unit: str = "",
description: str = "",
*,
explicit_bucket_boundaries_advisory: Optional[Sequence[float]] = None,
) -> Histogram:
"""Returns a no-op Histogram."""
status = self._register_instrument(
name,
NoOpHistogram,
unit,
description,
_MetricsHistogramAdvisory(
explicit_bucket_boundaries=explicit_bucket_boundaries_advisory
),
)
if status.conflict:
self._log_instrument_registration_conflict(
name,
Histogram.__name__,
unit,
description,
status,
)
return NoOpHistogram(
name,
unit=unit,
description=description,
explicit_bucket_boundaries_advisory=explicit_bucket_boundaries_advisory,
)
def create_observable_gauge(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> ObservableGauge:
"""Returns a no-op ObservableGauge."""
status = self._register_instrument(
name, NoOpObservableGauge, unit, description
)
if status.conflict:
self._log_instrument_registration_conflict(
name,
ObservableGauge.__name__,
unit,
description,
status,
)
return NoOpObservableGauge(
name,
callbacks,
unit=unit,
description=description,
)
def create_observable_up_down_counter(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> ObservableUpDownCounter:
"""Returns a no-op ObservableUpDownCounter."""
status = self._register_instrument(
name, NoOpObservableUpDownCounter, unit, description
)
if status.conflict:
self._log_instrument_registration_conflict(
name,
ObservableUpDownCounter.__name__,
unit,
description,
status,
)
return NoOpObservableUpDownCounter(
name,
callbacks,
unit=unit,
description=description,
)
_METER_PROVIDER_SET_ONCE = Once()
_METER_PROVIDER: Optional[MeterProvider] = None
_PROXY_METER_PROVIDER = _ProxyMeterProvider()
def get_meter(
name: str,
version: str = "",
meter_provider: Optional[MeterProvider] = None,
schema_url: Optional[str] = None,
attributes: Optional[Attributes] = None,
) -> "Meter":
"""Returns a `Meter` for use by the given instrumentation library.
This function is a convenience wrapper for
`opentelemetry.metrics.MeterProvider.get_meter`.
If meter_provider is omitted the current configured one is used.
"""
if meter_provider is None:
meter_provider = get_meter_provider()
return meter_provider.get_meter(name, version, schema_url, attributes)
def _set_meter_provider(meter_provider: MeterProvider, log: bool) -> None:
def set_mp() -> None:
global _METER_PROVIDER # pylint: disable=global-statement
_METER_PROVIDER = meter_provider
# gives all proxies real instruments off the newly set meter provider
_PROXY_METER_PROVIDER.on_set_meter_provider(meter_provider)
did_set = _METER_PROVIDER_SET_ONCE.do_once(set_mp)
if log and not did_set:
_logger.warning("Overriding of current MeterProvider is not allowed")
def set_meter_provider(meter_provider: MeterProvider) -> None:
"""Sets the current global :class:`~.MeterProvider` object.
This can only be done once, a warning will be logged if any further attempt
is made.
"""
_set_meter_provider(meter_provider, log=True)
def get_meter_provider() -> MeterProvider:
"""Gets the current global :class:`~.MeterProvider` object."""
if _METER_PROVIDER is None:
if OTEL_PYTHON_METER_PROVIDER not in environ:
return _PROXY_METER_PROVIDER
meter_provider: MeterProvider = _load_provider( # type: ignore
OTEL_PYTHON_METER_PROVIDER, "meter_provider"
)
_set_meter_provider(meter_provider, log=False)
# _METER_PROVIDER will have been set by one thread
return cast("MeterProvider", _METER_PROVIDER)

View File

@@ -0,0 +1,530 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=too-many-ancestors
from abc import ABC, abstractmethod
from dataclasses import dataclass
from logging import getLogger
from re import compile as re_compile
from typing import (
Callable,
Dict,
Generator,
Generic,
Iterable,
Optional,
Sequence,
TypeVar,
Union,
)
# pylint: disable=unused-import; needed for typing and sphinx
from opentelemetry import metrics
from opentelemetry.context import Context
from opentelemetry.metrics._internal.observation import Observation
from opentelemetry.util.types import (
Attributes,
)
_logger = getLogger(__name__)
_name_regex = re_compile(r"[a-zA-Z][-_./a-zA-Z0-9]{0,254}")
_unit_regex = re_compile(r"[\x00-\x7F]{0,63}")
@dataclass(frozen=True)
class _MetricsHistogramAdvisory:
explicit_bucket_boundaries: Optional[Sequence[float]] = None
@dataclass(frozen=True)
class CallbackOptions:
"""Options for the callback
Args:
timeout_millis: Timeout for the callback's execution. If the callback does asynchronous
work (e.g. HTTP requests), it should respect this timeout.
"""
timeout_millis: float = 10_000
InstrumentT = TypeVar("InstrumentT", bound="Instrument")
# pylint: disable=invalid-name
CallbackT = Union[
Callable[[CallbackOptions], Iterable[Observation]],
Generator[Iterable[Observation], CallbackOptions, None],
]
class Instrument(ABC):
"""Abstract class that serves as base for all instruments."""
@abstractmethod
def __init__(
self,
name: str,
unit: str = "",
description: str = "",
) -> None:
pass
@staticmethod
def _check_name_unit_description(
name: str, unit: str, description: str
) -> Dict[str, Optional[str]]:
"""
Checks the following instrument name, unit and description for
compliance with the spec.
Returns a dict with keys "name", "unit" and "description", the
corresponding values will be the checked strings or `None` if the value
is invalid. If valid, the checked strings should be used instead of the
original values.
"""
result: Dict[str, Optional[str]] = {}
if _name_regex.fullmatch(name) is not None:
result["name"] = name
else:
result["name"] = None
if unit is None:
unit = ""
if _unit_regex.fullmatch(unit) is not None:
result["unit"] = unit
else:
result["unit"] = None
if description is None:
result["description"] = ""
else:
result["description"] = description
return result
class _ProxyInstrument(ABC, Generic[InstrumentT]):
def __init__(
self,
name: str,
unit: str = "",
description: str = "",
) -> None:
self._name = name
self._unit = unit
self._description = description
self._real_instrument: Optional[InstrumentT] = None
def on_meter_set(self, meter: "metrics.Meter") -> None:
"""Called when a real meter is set on the creating _ProxyMeter"""
# We don't need any locking on proxy instruments because it's OK if some
# measurements get dropped while a real backing instrument is being
# created.
self._real_instrument = self._create_real_instrument(meter)
@abstractmethod
def _create_real_instrument(self, meter: "metrics.Meter") -> InstrumentT:
"""Create an instance of the real instrument. Implement this."""
class _ProxyAsynchronousInstrument(_ProxyInstrument[InstrumentT]):
def __init__(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> None:
super().__init__(name, unit, description)
self._callbacks = callbacks
class Synchronous(Instrument):
"""Base class for all synchronous instruments"""
class Asynchronous(Instrument):
"""Base class for all asynchronous instruments"""
@abstractmethod
def __init__(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> None:
super().__init__(name, unit=unit, description=description)
class Counter(Synchronous):
"""A Counter is a synchronous `Instrument` which supports non-negative increments."""
@abstractmethod
def add(
self,
amount: Union[int, float],
attributes: Optional[Attributes] = None,
context: Optional[Context] = None,
) -> None:
pass
class NoOpCounter(Counter):
"""No-op implementation of `Counter`."""
def __init__(
self,
name: str,
unit: str = "",
description: str = "",
) -> None:
super().__init__(name, unit=unit, description=description)
def add(
self,
amount: Union[int, float],
attributes: Optional[Attributes] = None,
context: Optional[Context] = None,
) -> None:
return super().add(amount, attributes=attributes, context=context)
class _ProxyCounter(_ProxyInstrument[Counter], Counter):
def add(
self,
amount: Union[int, float],
attributes: Optional[Attributes] = None,
context: Optional[Context] = None,
) -> None:
if self._real_instrument:
self._real_instrument.add(amount, attributes, context)
def _create_real_instrument(self, meter: "metrics.Meter") -> Counter:
return meter.create_counter(
self._name,
self._unit,
self._description,
)
class UpDownCounter(Synchronous):
"""An UpDownCounter is a synchronous `Instrument` which supports increments and decrements."""
@abstractmethod
def add(
self,
amount: Union[int, float],
attributes: Optional[Attributes] = None,
context: Optional[Context] = None,
) -> None:
pass
class NoOpUpDownCounter(UpDownCounter):
"""No-op implementation of `UpDownCounter`."""
def __init__(
self,
name: str,
unit: str = "",
description: str = "",
) -> None:
super().__init__(name, unit=unit, description=description)
def add(
self,
amount: Union[int, float],
attributes: Optional[Attributes] = None,
context: Optional[Context] = None,
) -> None:
return super().add(amount, attributes=attributes, context=context)
class _ProxyUpDownCounter(_ProxyInstrument[UpDownCounter], UpDownCounter):
def add(
self,
amount: Union[int, float],
attributes: Optional[Attributes] = None,
context: Optional[Context] = None,
) -> None:
if self._real_instrument:
self._real_instrument.add(amount, attributes, context)
def _create_real_instrument(self, meter: "metrics.Meter") -> UpDownCounter:
return meter.create_up_down_counter(
self._name,
self._unit,
self._description,
)
class ObservableCounter(Asynchronous):
"""An ObservableCounter is an asynchronous `Instrument` which reports monotonically
increasing value(s) when the instrument is being observed.
"""
class NoOpObservableCounter(ObservableCounter):
"""No-op implementation of `ObservableCounter`."""
def __init__(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> None:
super().__init__(
name,
callbacks,
unit=unit,
description=description,
)
class _ProxyObservableCounter(
_ProxyAsynchronousInstrument[ObservableCounter], ObservableCounter
):
def _create_real_instrument(
self, meter: "metrics.Meter"
) -> ObservableCounter:
return meter.create_observable_counter(
self._name,
self._callbacks,
self._unit,
self._description,
)
class ObservableUpDownCounter(Asynchronous):
"""An ObservableUpDownCounter is an asynchronous `Instrument` which reports additive value(s) (e.g.
the process heap size - it makes sense to report the heap size from multiple processes and sum them
up, so we get the total heap usage) when the instrument is being observed.
"""
class NoOpObservableUpDownCounter(ObservableUpDownCounter):
"""No-op implementation of `ObservableUpDownCounter`."""
def __init__(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> None:
super().__init__(
name,
callbacks,
unit=unit,
description=description,
)
class _ProxyObservableUpDownCounter(
_ProxyAsynchronousInstrument[ObservableUpDownCounter],
ObservableUpDownCounter,
):
def _create_real_instrument(
self, meter: "metrics.Meter"
) -> ObservableUpDownCounter:
return meter.create_observable_up_down_counter(
self._name,
self._callbacks,
self._unit,
self._description,
)
class Histogram(Synchronous):
"""Histogram is a synchronous `Instrument` which can be used to report arbitrary values
that are likely to be statistically meaningful. It is intended for statistics such as
histograms, summaries, and percentile.
"""
@abstractmethod
def __init__(
self,
name: str,
unit: str = "",
description: str = "",
explicit_bucket_boundaries_advisory: Optional[Sequence[float]] = None,
) -> None:
pass
@abstractmethod
def record(
self,
amount: Union[int, float],
attributes: Optional[Attributes] = None,
context: Optional[Context] = None,
) -> None:
pass
class NoOpHistogram(Histogram):
"""No-op implementation of `Histogram`."""
def __init__(
self,
name: str,
unit: str = "",
description: str = "",
explicit_bucket_boundaries_advisory: Optional[Sequence[float]] = None,
) -> None:
super().__init__(
name,
unit=unit,
description=description,
explicit_bucket_boundaries_advisory=explicit_bucket_boundaries_advisory,
)
def record(
self,
amount: Union[int, float],
attributes: Optional[Attributes] = None,
context: Optional[Context] = None,
) -> None:
return super().record(amount, attributes=attributes, context=context)
class _ProxyHistogram(_ProxyInstrument[Histogram], Histogram):
def __init__(
self,
name: str,
unit: str = "",
description: str = "",
explicit_bucket_boundaries_advisory: Optional[Sequence[float]] = None,
) -> None:
super().__init__(name, unit=unit, description=description)
self._explicit_bucket_boundaries_advisory = (
explicit_bucket_boundaries_advisory
)
def record(
self,
amount: Union[int, float],
attributes: Optional[Attributes] = None,
context: Optional[Context] = None,
) -> None:
if self._real_instrument:
self._real_instrument.record(amount, attributes, context)
def _create_real_instrument(self, meter: "metrics.Meter") -> Histogram:
return meter.create_histogram(
self._name,
self._unit,
self._description,
explicit_bucket_boundaries_advisory=self._explicit_bucket_boundaries_advisory,
)
class ObservableGauge(Asynchronous):
"""Asynchronous Gauge is an asynchronous `Instrument` which reports non-additive value(s) (e.g.
the room temperature - it makes no sense to report the temperature value from multiple rooms
and sum them up) when the instrument is being observed.
"""
class NoOpObservableGauge(ObservableGauge):
"""No-op implementation of `ObservableGauge`."""
def __init__(
self,
name: str,
callbacks: Optional[Sequence[CallbackT]] = None,
unit: str = "",
description: str = "",
) -> None:
super().__init__(
name,
callbacks,
unit=unit,
description=description,
)
class _ProxyObservableGauge(
_ProxyAsynchronousInstrument[ObservableGauge],
ObservableGauge,
):
def _create_real_instrument(
self, meter: "metrics.Meter"
) -> ObservableGauge:
return meter.create_observable_gauge(
self._name,
self._callbacks,
self._unit,
self._description,
)
class Gauge(Synchronous):
"""A Gauge is a synchronous `Instrument` which can be used to record non-additive values as they occur."""
@abstractmethod
def set(
self,
amount: Union[int, float],
attributes: Optional[Attributes] = None,
context: Optional[Context] = None,
) -> None:
pass
class NoOpGauge(Gauge):
"""No-op implementation of ``Gauge``."""
def __init__(
self,
name: str,
unit: str = "",
description: str = "",
) -> None:
super().__init__(name, unit=unit, description=description)
def set(
self,
amount: Union[int, float],
attributes: Optional[Attributes] = None,
context: Optional[Context] = None,
) -> None:
return super().set(amount, attributes=attributes, context=context)
class _ProxyGauge(
_ProxyInstrument[Gauge],
Gauge,
):
def set(
self,
amount: Union[int, float],
attributes: Optional[Attributes] = None,
context: Optional[Context] = None,
) -> None:
if self._real_instrument:
self._real_instrument.set(amount, attributes, context)
def _create_real_instrument(self, meter: "metrics.Meter") -> Gauge:
return meter.create_gauge(
self._name,
self._unit,
self._description,
)

View File

@@ -0,0 +1,63 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Optional, Union
from opentelemetry.context import Context
from opentelemetry.util.types import Attributes
class Observation:
"""A measurement observed in an asynchronous instrument
Return/yield instances of this class from asynchronous instrument callbacks.
Args:
value: The float or int measured value
attributes: The measurement's attributes
context: The measurement's context
"""
def __init__(
self,
value: Union[int, float],
attributes: Attributes = None,
context: Optional[Context] = None,
) -> None:
self._value = value
self._attributes = attributes
self._context = context
@property
def value(self) -> Union[float, int]:
return self._value
@property
def attributes(self) -> Attributes:
return self._attributes
@property
def context(self) -> Optional[Context]:
return self._context
def __eq__(self, other: object) -> bool:
return (
isinstance(other, Observation)
and self.value == other.value
and self.attributes == other.attributes
and self.context == other.context
)
def __repr__(self) -> str:
return f"Observation(value={self.value}, attributes={self.attributes}, context={self.context})"

View File

@@ -0,0 +1,174 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
API for propagation of context.
The propagators for the
``opentelemetry.propagators.composite.CompositePropagator`` can be defined
via configuration in the ``OTEL_PROPAGATORS`` environment variable. This
variable should be set to a comma-separated string of names of values for the
``opentelemetry_propagator`` entry point. For example, setting
``OTEL_PROPAGATORS`` to ``tracecontext,baggage`` (which is the default value)
would instantiate
``opentelemetry.propagators.composite.CompositePropagator`` with 2
propagators, one of type
``opentelemetry.trace.propagation.tracecontext.TraceContextTextMapPropagator``
and other of type ``opentelemetry.baggage.propagation.W3CBaggagePropagator``.
Notice that these propagator classes are defined as
``opentelemetry_propagator`` entry points in the ``pyproject.toml`` file of
``opentelemetry``.
Example::
import flask
import requests
from opentelemetry import propagate
PROPAGATOR = propagate.get_global_textmap()
def get_header_from_flask_request(request, key):
return request.headers.get_all(key)
def set_header_into_requests_request(request: requests.Request,
key: str, value: str):
request.headers[key] = value
def example_route():
context = PROPAGATOR.extract(
get_header_from_flask_request,
flask.request
)
request_to_downstream = requests.Request(
"GET", "http://httpbin.org/get"
)
PROPAGATOR.inject(
set_header_into_requests_request,
request_to_downstream,
context=context
)
session = requests.Session()
session.send(request_to_downstream.prepare())
.. _Propagation API Specification:
https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/context/api-propagators.md
"""
from logging import getLogger
from os import environ
from typing import List, Optional
from opentelemetry.context.context import Context
from opentelemetry.environment_variables import OTEL_PROPAGATORS
from opentelemetry.propagators import composite, textmap
from opentelemetry.util._importlib_metadata import entry_points
logger = getLogger(__name__)
def extract(
carrier: textmap.CarrierT,
context: Optional[Context] = None,
getter: textmap.Getter[textmap.CarrierT] = textmap.default_getter,
) -> Context:
"""Uses the configured propagator to extract a Context from the carrier.
Args:
getter: an object which contains a get function that can retrieve zero
or more values from the carrier and a keys function that can get all the keys
from carrier.
carrier: and object which contains values that are
used to construct a Context. This object
must be paired with an appropriate getter
which understands how to extract a value from it.
context: an optional Context to use. Defaults to root
context if not set.
"""
return get_global_textmap().extract(carrier, context, getter=getter)
def inject(
carrier: textmap.CarrierT,
context: Optional[Context] = None,
setter: textmap.Setter[textmap.CarrierT] = textmap.default_setter,
) -> None:
"""Uses the configured propagator to inject a Context into the carrier.
Args:
carrier: the medium used by Propagators to read
values from and write values to.
Should be paired with setter, which
should know how to set header values on the carrier.
context: An optional Context to use. Defaults to current
context if not set.
setter: An optional `Setter` object that can set values
on the carrier.
"""
get_global_textmap().inject(carrier, context=context, setter=setter)
propagators: List[textmap.TextMapPropagator] = []
# Single use variable here to hack black and make lint pass
environ_propagators = environ.get(
OTEL_PROPAGATORS,
"tracecontext,baggage",
)
for propagator in environ_propagators.split(","):
propagator = propagator.strip()
if propagator.lower() == "none":
logger.debug(
"OTEL_PROPAGATORS environment variable contains none, removing all propagators"
)
propagators = []
break
try:
propagators.append(
next( # type: ignore
iter( # type: ignore
entry_points( # type: ignore[misc]
group="opentelemetry_propagator",
name=propagator,
)
)
).load()()
)
except StopIteration:
raise ValueError(
f"Propagator {propagator} not found. It is either misspelled or not installed."
)
except Exception: # pylint: disable=broad-exception-caught
logger.exception("Failed to load propagator: %s", propagator)
raise
_HTTP_TEXT_FORMAT: textmap.TextMapPropagator = composite.CompositePropagator(
propagators
)
def get_global_textmap() -> textmap.TextMapPropagator:
return _HTTP_TEXT_FORMAT
def set_global_textmap(
http_text_format: textmap.TextMapPropagator,
) -> None:
global _HTTP_TEXT_FORMAT # pylint:disable=global-statement
_HTTP_TEXT_FORMAT = http_text_format

View File

@@ -0,0 +1,93 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
import typing
from typing_extensions import deprecated
from opentelemetry.context.context import Context
from opentelemetry.propagators import textmap
logger = logging.getLogger(__name__)
class CompositePropagator(textmap.TextMapPropagator):
"""CompositePropagator provides a mechanism for combining multiple
propagators into a single one.
Args:
propagators: the list of propagators to use
"""
def __init__(
self, propagators: typing.Sequence[textmap.TextMapPropagator]
) -> None:
self._propagators = propagators
def extract(
self,
carrier: textmap.CarrierT,
context: typing.Optional[Context] = None,
getter: textmap.Getter[textmap.CarrierT] = textmap.default_getter,
) -> Context:
"""Run each of the configured propagators with the given context and carrier.
Propagators are run in the order they are configured, if multiple
propagators write the same context key, the propagator later in the list
will override previous propagators.
See `opentelemetry.propagators.textmap.TextMapPropagator.extract`
"""
for propagator in self._propagators:
context = propagator.extract(carrier, context, getter=getter)
return context # type: ignore
def inject(
self,
carrier: textmap.CarrierT,
context: typing.Optional[Context] = None,
setter: textmap.Setter[textmap.CarrierT] = textmap.default_setter,
) -> None:
"""Run each of the configured propagators with the given context and carrier.
Propagators are run in the order they are configured, if multiple
propagators write the same carrier key, the propagator later in the list
will override previous propagators.
See `opentelemetry.propagators.textmap.TextMapPropagator.inject`
"""
for propagator in self._propagators:
propagator.inject(carrier, context, setter=setter)
@property
def fields(self) -> typing.Set[str]:
"""Returns a set with the fields set in `inject`.
See
`opentelemetry.propagators.textmap.TextMapPropagator.fields`
"""
composite_fields = set()
for propagator in self._propagators:
for field in propagator.fields:
composite_fields.add(field)
return composite_fields
@deprecated(
"You should use CompositePropagator. Deprecated since version 1.2.0."
)
class CompositeHTTPPropagator(CompositePropagator):
"""CompositeHTTPPropagator provides a mechanism for combining multiple
propagators into a single one.
"""

View File

@@ -0,0 +1,197 @@
# Copyright The OpenTelemetry Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import abc
import typing
from opentelemetry.context.context import Context
CarrierT = typing.TypeVar("CarrierT")
# pylint: disable=invalid-name
CarrierValT = typing.Union[typing.List[str], str]
class Getter(abc.ABC, typing.Generic[CarrierT]):
"""This class implements a Getter that enables extracting propagated
fields from a carrier.
"""
@abc.abstractmethod
def get(
self, carrier: CarrierT, key: str
) -> typing.Optional[typing.List[str]]:
"""Function that can retrieve zero
or more values from the carrier. In the case that
the value does not exist, returns None.
Args:
carrier: An object which contains values that are used to
construct a Context.
key: key of a field in carrier.
Returns: first value of the propagation key or None if the key doesn't
exist.
"""
@abc.abstractmethod
def keys(self, carrier: CarrierT) -> typing.List[str]:
"""Function that can retrieve all the keys in a carrier object.
Args:
carrier: An object which contains values that are
used to construct a Context.
Returns:
list of keys from the carrier.
"""
class Setter(abc.ABC, typing.Generic[CarrierT]):
"""This class implements a Setter that enables injecting propagated
fields into a carrier.
"""
@abc.abstractmethod
def set(self, carrier: CarrierT, key: str, value: str) -> None:
"""Function that can set a value into a carrier""
Args:
carrier: An object which contains values that are used to
construct a Context.
key: key of a field in carrier.
value: value for a field in carrier.
"""
class DefaultGetter(Getter[typing.Mapping[str, CarrierValT]]):
def get(
self, carrier: typing.Mapping[str, CarrierValT], key: str
) -> typing.Optional[typing.List[str]]:
"""Getter implementation to retrieve a value from a dictionary.
Args:
carrier: dictionary in which to get value
key: the key used to get the value
Returns:
A list with a single string with the value if it exists, else None.
"""
val = carrier.get(key, None)
if val is None:
return None
if isinstance(val, typing.Iterable) and not isinstance(val, str):
return list(val)
return [val]
def keys(
self, carrier: typing.Mapping[str, CarrierValT]
) -> typing.List[str]:
"""Keys implementation that returns all keys from a dictionary."""
return list(carrier.keys())
default_getter: Getter[CarrierT] = DefaultGetter() # type: ignore
class DefaultSetter(Setter[typing.MutableMapping[str, CarrierValT]]):
def set(
self,
carrier: typing.MutableMapping[str, CarrierValT],
key: str,
value: CarrierValT,
) -> None:
"""Setter implementation to set a value into a dictionary.
Args:
carrier: dictionary in which to set value
key: the key used to set the value
value: the value to set
"""
carrier[key] = value
default_setter: Setter[CarrierT] = DefaultSetter() # type: ignore
class TextMapPropagator(abc.ABC):
"""This class provides an interface that enables extracting and injecting
context into headers of HTTP requests. HTTP frameworks and clients
can integrate with TextMapPropagator by providing the object containing the
headers, and a getter and setter function for the extraction and
injection of values, respectively.
"""
@abc.abstractmethod
def extract(
self,
carrier: CarrierT,
context: typing.Optional[Context] = None,
getter: Getter[CarrierT] = default_getter,
) -> Context:
"""Create a Context from values in the carrier.
The extract function should retrieve values from the carrier
object using getter, and use values to populate a
Context value and return it.
Args:
getter: a function that can retrieve zero
or more values from the carrier. In the case that
the value does not exist, return an empty list.
carrier: and object which contains values that are
used to construct a Context. This object
must be paired with an appropriate getter
which understands how to extract a value from it.
context: an optional Context to use. Defaults to root
context if not set.
Returns:
A Context with configuration found in the carrier.
"""
@abc.abstractmethod
def inject(
self,
carrier: CarrierT,
context: typing.Optional[Context] = None,
setter: Setter[CarrierT] = default_setter,
) -> None:
"""Inject values from a Context into a carrier.
inject enables the propagation of values into HTTP clients or
other objects which perform an HTTP request. Implementations
should use the `Setter` 's set method to set values on the
carrier.
Args:
carrier: An object that a place to define HTTP headers.
Should be paired with setter, which should
know how to set header values on the carrier.
context: an optional Context to use. Defaults to current
context if not set.
setter: An optional `Setter` object that can set values
on the carrier.
"""
@property
@abc.abstractmethod
def fields(self) -> typing.Set[str]:
"""
Gets the fields set in the carrier by the `inject` method.
If the carrier is reused, its fields that correspond with the ones
present in this attribute should be deleted before calling `inject`.
Returns:
A set with the fields set in `inject`.
"""

View File

@@ -0,0 +1,34 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/collector/logs/v1/logs_service.proto
# Protobuf Python Version: 5.26.1
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from opentelemetry.proto.logs.v1 import logs_pb2 as opentelemetry_dot_proto_dot_logs_dot_v1_dot_logs__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n8opentelemetry/proto/collector/logs/v1/logs_service.proto\x12%opentelemetry.proto.collector.logs.v1\x1a&opentelemetry/proto/logs/v1/logs.proto\"\\\n\x18\x45xportLogsServiceRequest\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs\"u\n\x19\x45xportLogsServiceResponse\x12X\n\x0fpartial_success\x18\x01 \x01(\x0b\x32?.opentelemetry.proto.collector.logs.v1.ExportLogsPartialSuccess\"O\n\x18\x45xportLogsPartialSuccess\x12\x1c\n\x14rejected_log_records\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\x06\x45xport\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse\"\x00\x42\x98\x01\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServiceProtoP\x01Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\xaa\x02%OpenTelemetry.Proto.Collector.Logs.V1b\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.collector.logs.v1.logs_service_pb2', _globals)
if not _descriptor._USE_C_DESCRIPTORS:
_globals['DESCRIPTOR']._loaded_options = None
_globals['DESCRIPTOR']._serialized_options = b'\n(io.opentelemetry.proto.collector.logs.v1B\020LogsServiceProtoP\001Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\252\002%OpenTelemetry.Proto.Collector.Logs.V1'
_globals['_EXPORTLOGSSERVICEREQUEST']._serialized_start=139
_globals['_EXPORTLOGSSERVICEREQUEST']._serialized_end=231
_globals['_EXPORTLOGSSERVICERESPONSE']._serialized_start=233
_globals['_EXPORTLOGSSERVICERESPONSE']._serialized_end=350
_globals['_EXPORTLOGSPARTIALSUCCESS']._serialized_start=352
_globals['_EXPORTLOGSPARTIALSUCCESS']._serialized_end=431
_globals['_LOGSSERVICE']._serialized_start=434
_globals['_LOGSSERVICE']._serialized_end=591
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,117 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Copyright 2020, OpenTelemetry Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import opentelemetry.proto.logs.v1.logs_pb2
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class ExportLogsServiceRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
RESOURCE_LOGS_FIELD_NUMBER: builtins.int
@property
def resource_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs]:
"""An array of ResourceLogs.
For data coming from a single resource this array will typically contain one
element. Intermediary nodes (such as OpenTelemetry Collector) that receive
data from multiple origins typically batch the data before forwarding further and
in that case this array will contain multiple elements.
"""
def __init__(
self,
*,
resource_logs: collections.abc.Iterable[opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["resource_logs", b"resource_logs"]) -> None: ...
global___ExportLogsServiceRequest = ExportLogsServiceRequest
@typing_extensions.final
class ExportLogsServiceResponse(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int
@property
def partial_success(self) -> global___ExportLogsPartialSuccess:
"""The details of a partially successful export request.
If the request is only partially accepted
(i.e. when the server accepts only parts of the data and rejects the rest)
the server MUST initialize the `partial_success` field and MUST
set the `rejected_<signal>` with the number of items it rejected.
Servers MAY also make use of the `partial_success` field to convey
warnings/suggestions to senders even when the request was fully accepted.
In such cases, the `rejected_<signal>` MUST have a value of `0` and
the `error_message` MUST be non-empty.
A `partial_success` message with an empty value (rejected_<signal> = 0 and
`error_message` = "") is equivalent to it not being set/present. Senders
SHOULD interpret it the same way as in the full success case.
"""
def __init__(
self,
*,
partial_success: global___ExportLogsPartialSuccess | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> None: ...
global___ExportLogsServiceResponse = ExportLogsServiceResponse
@typing_extensions.final
class ExportLogsPartialSuccess(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
REJECTED_LOG_RECORDS_FIELD_NUMBER: builtins.int
ERROR_MESSAGE_FIELD_NUMBER: builtins.int
rejected_log_records: builtins.int
"""The number of rejected log records.
A `rejected_<signal>` field holding a `0` value indicates that the
request was fully accepted.
"""
error_message: builtins.str
"""A developer-facing human-readable message in English. It should be used
either to explain why the server rejected parts of the data during a partial
success or to convey warnings/suggestions during a full success. The message
should offer guidance on how users can address such issues.
error_message is an optional field. An error_message with an empty value
is equivalent to it not being set.
"""
def __init__(
self,
*,
rejected_log_records: builtins.int = ...,
error_message: builtins.str = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["error_message", b"error_message", "rejected_log_records", b"rejected_log_records"]) -> None: ...
global___ExportLogsPartialSuccess = ExportLogsPartialSuccess

View File

@@ -0,0 +1,110 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import warnings
from opentelemetry.proto.collector.logs.v1 import logs_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2
GRPC_GENERATED_VERSION = '1.63.2'
GRPC_VERSION = grpc.__version__
EXPECTED_ERROR_RELEASE = '1.65.0'
SCHEDULED_RELEASE_DATE = 'June 25, 2024'
_version_not_supported = False
try:
from grpc._utilities import first_version_is_lower
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
except ImportError:
_version_not_supported = True
if _version_not_supported:
warnings.warn(
f'The grpc package installed is at version {GRPC_VERSION},'
+ f' but the generated code in opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py depends on'
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
+ f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
+ f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
RuntimeWarning
)
class LogsServiceStub(object):
"""Service that can be used to push logs between one Application instrumented with
OpenTelemetry and an collector, or between an collector and a central collector (in this
case logs are sent/received to/from multiple Applications).
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Export = channel.unary_unary(
'/opentelemetry.proto.collector.logs.v1.LogsService/Export',
request_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString,
response_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString,
_registered_method=True)
class LogsServiceServicer(object):
"""Service that can be used to push logs between one Application instrumented with
OpenTelemetry and an collector, or between an collector and a central collector (in this
case logs are sent/received to/from multiple Applications).
"""
def Export(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_LogsServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Export': grpc.unary_unary_rpc_method_handler(
servicer.Export,
request_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.FromString,
response_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'opentelemetry.proto.collector.logs.v1.LogsService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class LogsService(object):
"""Service that can be used to push logs between one Application instrumented with
OpenTelemetry and an collector, or between an collector and a central collector (in this
case logs are sent/received to/from multiple Applications).
"""
@staticmethod
def Export(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(
request,
target,
'/opentelemetry.proto.collector.logs.v1.LogsService/Export',
opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString,
opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True)

View File

@@ -0,0 +1,34 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/collector/metrics/v1/metrics_service.proto
# Protobuf Python Version: 5.26.1
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from opentelemetry.proto.metrics.v1 import metrics_pb2 as opentelemetry_dot_proto_dot_metrics_dot_v1_dot_metrics__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n>opentelemetry/proto/collector/metrics/v1/metrics_service.proto\x12(opentelemetry.proto.collector.metrics.v1\x1a,opentelemetry/proto/metrics/v1/metrics.proto\"h\n\x1b\x45xportMetricsServiceRequest\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics\"~\n\x1c\x45xportMetricsServiceResponse\x12^\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsPartialSuccess\"R\n\x1b\x45xportMetricsPartialSuccess\x12\x1c\n\x14rejected_data_points\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xac\x01\n\x0eMetricsService\x12\x99\x01\n\x06\x45xport\x12\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest\x1a\x46.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceResponse\"\x00\x42\xa4\x01\n+io.opentelemetry.proto.collector.metrics.v1B\x13MetricsServiceProtoP\x01Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\xaa\x02(OpenTelemetry.Proto.Collector.Metrics.V1b\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.collector.metrics.v1.metrics_service_pb2', _globals)
if not _descriptor._USE_C_DESCRIPTORS:
_globals['DESCRIPTOR']._loaded_options = None
_globals['DESCRIPTOR']._serialized_options = b'\n+io.opentelemetry.proto.collector.metrics.v1B\023MetricsServiceProtoP\001Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\252\002(OpenTelemetry.Proto.Collector.Metrics.V1'
_globals['_EXPORTMETRICSSERVICEREQUEST']._serialized_start=154
_globals['_EXPORTMETRICSSERVICEREQUEST']._serialized_end=258
_globals['_EXPORTMETRICSSERVICERESPONSE']._serialized_start=260
_globals['_EXPORTMETRICSSERVICERESPONSE']._serialized_end=386
_globals['_EXPORTMETRICSPARTIALSUCCESS']._serialized_start=388
_globals['_EXPORTMETRICSPARTIALSUCCESS']._serialized_end=470
_globals['_METRICSSERVICE']._serialized_start=473
_globals['_METRICSSERVICE']._serialized_end=645
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,117 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Copyright 2019, OpenTelemetry Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import opentelemetry.proto.metrics.v1.metrics_pb2
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class ExportMetricsServiceRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
RESOURCE_METRICS_FIELD_NUMBER: builtins.int
@property
def resource_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics]:
"""An array of ResourceMetrics.
For data coming from a single resource this array will typically contain one
element. Intermediary nodes (such as OpenTelemetry Collector) that receive
data from multiple origins typically batch the data before forwarding further and
in that case this array will contain multiple elements.
"""
def __init__(
self,
*,
resource_metrics: collections.abc.Iterable[opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics] | None = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["resource_metrics", b"resource_metrics"]) -> None: ...
global___ExportMetricsServiceRequest = ExportMetricsServiceRequest
@typing_extensions.final
class ExportMetricsServiceResponse(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int
@property
def partial_success(self) -> global___ExportMetricsPartialSuccess:
"""The details of a partially successful export request.
If the request is only partially accepted
(i.e. when the server accepts only parts of the data and rejects the rest)
the server MUST initialize the `partial_success` field and MUST
set the `rejected_<signal>` with the number of items it rejected.
Servers MAY also make use of the `partial_success` field to convey
warnings/suggestions to senders even when the request was fully accepted.
In such cases, the `rejected_<signal>` MUST have a value of `0` and
the `error_message` MUST be non-empty.
A `partial_success` message with an empty value (rejected_<signal> = 0 and
`error_message` = "") is equivalent to it not being set/present. Senders
SHOULD interpret it the same way as in the full success case.
"""
def __init__(
self,
*,
partial_success: global___ExportMetricsPartialSuccess | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> None: ...
global___ExportMetricsServiceResponse = ExportMetricsServiceResponse
@typing_extensions.final
class ExportMetricsPartialSuccess(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
REJECTED_DATA_POINTS_FIELD_NUMBER: builtins.int
ERROR_MESSAGE_FIELD_NUMBER: builtins.int
rejected_data_points: builtins.int
"""The number of rejected data points.
A `rejected_<signal>` field holding a `0` value indicates that the
request was fully accepted.
"""
error_message: builtins.str
"""A developer-facing human-readable message in English. It should be used
either to explain why the server rejected parts of the data during a partial
success or to convey warnings/suggestions during a full success. The message
should offer guidance on how users can address such issues.
error_message is an optional field. An error_message with an empty value
is equivalent to it not being set.
"""
def __init__(
self,
*,
rejected_data_points: builtins.int = ...,
error_message: builtins.str = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["error_message", b"error_message", "rejected_data_points", b"rejected_data_points"]) -> None: ...
global___ExportMetricsPartialSuccess = ExportMetricsPartialSuccess

View File

@@ -0,0 +1,110 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import warnings
from opentelemetry.proto.collector.metrics.v1 import metrics_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2
GRPC_GENERATED_VERSION = '1.63.2'
GRPC_VERSION = grpc.__version__
EXPECTED_ERROR_RELEASE = '1.65.0'
SCHEDULED_RELEASE_DATE = 'June 25, 2024'
_version_not_supported = False
try:
from grpc._utilities import first_version_is_lower
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
except ImportError:
_version_not_supported = True
if _version_not_supported:
warnings.warn(
f'The grpc package installed is at version {GRPC_VERSION},'
+ f' but the generated code in opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py depends on'
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
+ f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
+ f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
RuntimeWarning
)
class MetricsServiceStub(object):
"""Service that can be used to push metrics between one Application
instrumented with OpenTelemetry and a collector, or between a collector and a
central collector.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Export = channel.unary_unary(
'/opentelemetry.proto.collector.metrics.v1.MetricsService/Export',
request_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString,
response_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString,
_registered_method=True)
class MetricsServiceServicer(object):
"""Service that can be used to push metrics between one Application
instrumented with OpenTelemetry and a collector, or between a collector and a
central collector.
"""
def Export(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_MetricsServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Export': grpc.unary_unary_rpc_method_handler(
servicer.Export,
request_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.FromString,
response_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'opentelemetry.proto.collector.metrics.v1.MetricsService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class MetricsService(object):
"""Service that can be used to push metrics between one Application
instrumented with OpenTelemetry and a collector, or between a collector and a
central collector.
"""
@staticmethod
def Export(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(
request,
target,
'/opentelemetry.proto.collector.metrics.v1.MetricsService/Export',
opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString,
opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True)

View File

@@ -0,0 +1,34 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/collector/profiles/v1development/profiles_service.proto
# Protobuf Python Version: 5.26.1
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import symbol_database as _symbol_database
from google.protobuf.internal import builder as _builder
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from opentelemetry.proto.profiles.v1development import profiles_pb2 as opentelemetry_dot_proto_dot_profiles_dot_v1development_dot_profiles__pb2
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nKopentelemetry/proto/collector/profiles/v1development/profiles_service.proto\x12\x34opentelemetry.proto.collector.profiles.v1development\x1a\x39opentelemetry/proto/profiles/v1development/profiles.proto\"\xcb\x01\n\x1c\x45xportProfilesServiceRequest\x12W\n\x11resource_profiles\x18\x01 \x03(\x0b\x32<.opentelemetry.proto.profiles.v1development.ResourceProfiles\x12R\n\ndictionary\x18\x02 \x01(\x0b\x32>.opentelemetry.proto.profiles.v1development.ProfilesDictionary\"\x8c\x01\n\x1d\x45xportProfilesServiceResponse\x12k\n\x0fpartial_success\x18\x01 \x01(\x0b\x32R.opentelemetry.proto.collector.profiles.v1development.ExportProfilesPartialSuccess\"P\n\x1c\x45xportProfilesPartialSuccess\x12\x19\n\x11rejected_profiles\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xc7\x01\n\x0fProfilesService\x12\xb3\x01\n\x06\x45xport\x12R.opentelemetry.proto.collector.profiles.v1development.ExportProfilesServiceRequest\x1aS.opentelemetry.proto.collector.profiles.v1development.ExportProfilesServiceResponse\"\x00\x42\xc9\x01\n7io.opentelemetry.proto.collector.profiles.v1developmentB\x14ProfilesServiceProtoP\x01Z?go.opentelemetry.io/proto/otlp/collector/profiles/v1development\xaa\x02\x34OpenTelemetry.Proto.Collector.Profiles.V1Developmentb\x06proto3')
_globals = globals()
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.collector.profiles.v1development.profiles_service_pb2', _globals)
if not _descriptor._USE_C_DESCRIPTORS:
_globals['DESCRIPTOR']._loaded_options = None
_globals['DESCRIPTOR']._serialized_options = b'\n7io.opentelemetry.proto.collector.profiles.v1developmentB\024ProfilesServiceProtoP\001Z?go.opentelemetry.io/proto/otlp/collector/profiles/v1development\252\0024OpenTelemetry.Proto.Collector.Profiles.V1Development'
_globals['_EXPORTPROFILESSERVICEREQUEST']._serialized_start=193
_globals['_EXPORTPROFILESSERVICEREQUEST']._serialized_end=396
_globals['_EXPORTPROFILESSERVICERESPONSE']._serialized_start=399
_globals['_EXPORTPROFILESSERVICERESPONSE']._serialized_end=539
_globals['_EXPORTPROFILESPARTIALSUCCESS']._serialized_start=541
_globals['_EXPORTPROFILESPARTIALSUCCESS']._serialized_end=621
_globals['_PROFILESSERVICE']._serialized_start=624
_globals['_PROFILESSERVICE']._serialized_end=823
# @@protoc_insertion_point(module_scope)

View File

@@ -0,0 +1,123 @@
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
Copyright 2023, OpenTelemetry Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import builtins
import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
import opentelemetry.proto.profiles.v1development.profiles_pb2
import sys
if sys.version_info >= (3, 8):
import typing as typing_extensions
else:
import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
@typing_extensions.final
class ExportProfilesServiceRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
RESOURCE_PROFILES_FIELD_NUMBER: builtins.int
DICTIONARY_FIELD_NUMBER: builtins.int
@property
def resource_profiles(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.profiles.v1development.profiles_pb2.ResourceProfiles]:
"""An array of ResourceProfiles.
For data coming from a single resource this array will typically contain one
element. Intermediary nodes (such as OpenTelemetry Collector) that receive
data from multiple origins typically batch the data before forwarding further and
in that case this array will contain multiple elements.
"""
@property
def dictionary(self) -> opentelemetry.proto.profiles.v1development.profiles_pb2.ProfilesDictionary:
"""The reference table containing all data shared by profiles across the message being sent."""
def __init__(
self,
*,
resource_profiles: collections.abc.Iterable[opentelemetry.proto.profiles.v1development.profiles_pb2.ResourceProfiles] | None = ...,
dictionary: opentelemetry.proto.profiles.v1development.profiles_pb2.ProfilesDictionary | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["dictionary", b"dictionary"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["dictionary", b"dictionary", "resource_profiles", b"resource_profiles"]) -> None: ...
global___ExportProfilesServiceRequest = ExportProfilesServiceRequest
@typing_extensions.final
class ExportProfilesServiceResponse(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int
@property
def partial_success(self) -> global___ExportProfilesPartialSuccess:
"""The details of a partially successful export request.
If the request is only partially accepted
(i.e. when the server accepts only parts of the data and rejects the rest)
the server MUST initialize the `partial_success` field and MUST
set the `rejected_<signal>` with the number of items it rejected.
Servers MAY also make use of the `partial_success` field to convey
warnings/suggestions to senders even when the request was fully accepted.
In such cases, the `rejected_<signal>` MUST have a value of `0` and
the `error_message` MUST be non-empty.
A `partial_success` message with an empty value (rejected_<signal> = 0 and
`error_message` = "") is equivalent to it not being set/present. Senders
SHOULD interpret it the same way as in the full success case.
"""
def __init__(
self,
*,
partial_success: global___ExportProfilesPartialSuccess | None = ...,
) -> None: ...
def HasField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> builtins.bool: ...
def ClearField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> None: ...
global___ExportProfilesServiceResponse = ExportProfilesServiceResponse
@typing_extensions.final
class ExportProfilesPartialSuccess(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
REJECTED_PROFILES_FIELD_NUMBER: builtins.int
ERROR_MESSAGE_FIELD_NUMBER: builtins.int
rejected_profiles: builtins.int
"""The number of rejected profiles.
A `rejected_<signal>` field holding a `0` value indicates that the
request was fully accepted.
"""
error_message: builtins.str
"""A developer-facing human-readable message in English. It should be used
either to explain why the server rejected parts of the data during a partial
success or to convey warnings/suggestions during a full success. The message
should offer guidance on how users can address such issues.
error_message is an optional field. An error_message with an empty value
is equivalent to it not being set.
"""
def __init__(
self,
*,
rejected_profiles: builtins.int = ...,
error_message: builtins.str = ...,
) -> None: ...
def ClearField(self, field_name: typing_extensions.Literal["error_message", b"error_message", "rejected_profiles", b"rejected_profiles"]) -> None: ...
global___ExportProfilesPartialSuccess = ExportProfilesPartialSuccess

View File

@@ -0,0 +1,107 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
import warnings
from opentelemetry.proto.collector.profiles.v1development import profiles_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2
GRPC_GENERATED_VERSION = '1.63.2'
GRPC_VERSION = grpc.__version__
EXPECTED_ERROR_RELEASE = '1.65.0'
SCHEDULED_RELEASE_DATE = 'June 25, 2024'
_version_not_supported = False
try:
from grpc._utilities import first_version_is_lower
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
except ImportError:
_version_not_supported = True
if _version_not_supported:
warnings.warn(
f'The grpc package installed is at version {GRPC_VERSION},'
+ f' but the generated code in opentelemetry/proto/collector/profiles/v1development/profiles_service_pb2_grpc.py depends on'
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
+ f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
+ f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
RuntimeWarning
)
class ProfilesServiceStub(object):
"""Service that can be used to push profiles between one Application instrumented with
OpenTelemetry and a collector, or between a collector and a central collector.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Export = channel.unary_unary(
'/opentelemetry.proto.collector.profiles.v1development.ProfilesService/Export',
request_serializer=opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceRequest.SerializeToString,
response_deserializer=opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceResponse.FromString,
_registered_method=True)
class ProfilesServiceServicer(object):
"""Service that can be used to push profiles between one Application instrumented with
OpenTelemetry and a collector, or between a collector and a central collector.
"""
def Export(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_ProfilesServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'Export': grpc.unary_unary_rpc_method_handler(
servicer.Export,
request_deserializer=opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceRequest.FromString,
response_serializer=opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'opentelemetry.proto.collector.profiles.v1development.ProfilesService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class ProfilesService(object):
"""Service that can be used to push profiles between one Application instrumented with
OpenTelemetry and a collector, or between a collector and a central collector.
"""
@staticmethod
def Export(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(
request,
target,
'/opentelemetry.proto.collector.profiles.v1development.ProfilesService/Export',
opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceRequest.SerializeToString,
opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceResponse.FromString,
options,
channel_credentials,
insecure,
call_credentials,
compression,
wait_for_ready,
timeout,
metadata,
_registered_method=True)

Some files were not shown because too many files have changed in this diff Show More