增加环绕侦察场景适配
This commit is contained in:
@@ -17,6 +17,8 @@ from logging import getLogger
|
||||
from os import environ
|
||||
from typing import Optional, cast
|
||||
|
||||
from typing_extensions import deprecated
|
||||
|
||||
from opentelemetry._logs import LogRecord
|
||||
from opentelemetry._logs.severity import SeverityNumber
|
||||
from opentelemetry.environment_variables import (
|
||||
@@ -30,6 +32,10 @@ from opentelemetry.util.types import AnyValue, _ExtendedAttributes
|
||||
_logger = getLogger(__name__)
|
||||
|
||||
|
||||
@deprecated(
|
||||
"You should use `LogRecord` with the `event_name` field set instead. "
|
||||
"Deprecated since version 1.39.0 and will be removed in a future release."
|
||||
)
|
||||
class Event(LogRecord):
|
||||
def __init__(
|
||||
self,
|
||||
@@ -59,6 +65,10 @@ class Event(LogRecord):
|
||||
self.name = name
|
||||
|
||||
|
||||
@deprecated(
|
||||
"You should use `Logger` instead. "
|
||||
"Deprecated since version 1.39.0 and will be removed in a future release."
|
||||
)
|
||||
class EventLogger(ABC):
|
||||
def __init__(
|
||||
self,
|
||||
@@ -77,11 +87,19 @@ class EventLogger(ABC):
|
||||
"""Emits a :class:`Event` representing an event."""
|
||||
|
||||
|
||||
@deprecated(
|
||||
"You should use `NoOpLogger` instead. "
|
||||
"Deprecated since version 1.39.0 and will be removed in a future release."
|
||||
)
|
||||
class NoOpEventLogger(EventLogger):
|
||||
def emit(self, event: Event) -> None:
|
||||
pass
|
||||
|
||||
|
||||
@deprecated(
|
||||
"You should use `ProxyLogger` instead. "
|
||||
"Deprecated since version 1.39.0 and will be removed in a future release."
|
||||
)
|
||||
class ProxyEventLogger(EventLogger):
|
||||
def __init__(
|
||||
self,
|
||||
@@ -118,6 +136,10 @@ class ProxyEventLogger(EventLogger):
|
||||
self._event_logger.emit(event)
|
||||
|
||||
|
||||
@deprecated(
|
||||
"You should use `LoggerProvider` instead. "
|
||||
"Deprecated since version 1.39.0 and will be removed in a future release."
|
||||
)
|
||||
class EventLoggerProvider(ABC):
|
||||
@abstractmethod
|
||||
def get_event_logger(
|
||||
@@ -130,6 +152,10 @@ class EventLoggerProvider(ABC):
|
||||
"""Returns an EventLoggerProvider for use."""
|
||||
|
||||
|
||||
@deprecated(
|
||||
"You should use `NoOpLoggerProvider` instead. "
|
||||
"Deprecated since version 1.39.0 and will be removed in a future release."
|
||||
)
|
||||
class NoOpEventLoggerProvider(EventLoggerProvider):
|
||||
def get_event_logger(
|
||||
self,
|
||||
@@ -143,6 +169,10 @@ class NoOpEventLoggerProvider(EventLoggerProvider):
|
||||
)
|
||||
|
||||
|
||||
@deprecated(
|
||||
"You should use `ProxyLoggerProvider` instead. "
|
||||
"Deprecated since version 1.39.0 and will be removed in a future release."
|
||||
)
|
||||
class ProxyEventLoggerProvider(EventLoggerProvider):
|
||||
def get_event_logger(
|
||||
self,
|
||||
@@ -171,6 +201,10 @@ _EVENT_LOGGER_PROVIDER: Optional[EventLoggerProvider] = None
|
||||
_PROXY_EVENT_LOGGER_PROVIDER = ProxyEventLoggerProvider()
|
||||
|
||||
|
||||
@deprecated(
|
||||
"You should use `get_logger_provider` instead. "
|
||||
"Deprecated since version 1.39.0 and will be removed in a future release."
|
||||
)
|
||||
def get_event_logger_provider() -> EventLoggerProvider:
|
||||
global _EVENT_LOGGER_PROVIDER # pylint: disable=global-variable-not-assigned
|
||||
if _EVENT_LOGGER_PROVIDER is None:
|
||||
@@ -201,12 +235,20 @@ def _set_event_logger_provider(
|
||||
)
|
||||
|
||||
|
||||
@deprecated(
|
||||
"You should use `set_logger_provider` instead. "
|
||||
"Deprecated since version 1.39.0 and will be removed in a future release."
|
||||
)
|
||||
def set_event_logger_provider(
|
||||
event_logger_provider: EventLoggerProvider,
|
||||
) -> None:
|
||||
_set_event_logger_provider(event_logger_provider, log=True)
|
||||
|
||||
|
||||
@deprecated(
|
||||
"You should use `get_logger` instead. "
|
||||
"Deprecated since version 1.39.0 and will be removed in a future release."
|
||||
)
|
||||
def get_event_logger(
|
||||
name: str,
|
||||
version: Optional[str] = None,
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -118,7 +118,7 @@ def _clean_attribute(
|
||||
return None
|
||||
|
||||
|
||||
def _clean_extended_attribute_value(
|
||||
def _clean_extended_attribute_value( # pylint: disable=too-many-branches
|
||||
value: types.AnyValue, max_len: Optional[int]
|
||||
) -> types.AnyValue:
|
||||
# for primitive types just return the value and eventually shorten the string length
|
||||
@@ -180,11 +180,19 @@ def _clean_extended_attribute_value(
|
||||
# Freeze mutable sequences defensively
|
||||
return tuple(cleaned_seq)
|
||||
|
||||
raise TypeError(
|
||||
f"Invalid type {type(value).__name__} for attribute value. "
|
||||
f"Expected one of {[valid_type.__name__ for valid_type in _VALID_ANY_VALUE_TYPES]} or a "
|
||||
"sequence of those types",
|
||||
)
|
||||
# Some applications such as Django add values to log records whose types fall outside the
|
||||
# primitive types and `_VALID_ANY_VALUE_TYPES`, i.e., they are not of type `AnyValue`.
|
||||
# Rather than attempt to whitelist every possible instrumentation, we stringify those values here
|
||||
# so they can still be represented as attributes, falling back to the original TypeError only if
|
||||
# converting to string raises.
|
||||
try:
|
||||
return str(value)
|
||||
except Exception:
|
||||
raise TypeError(
|
||||
f"Invalid type {type(value).__name__} for attribute value. "
|
||||
f"Expected one of {[valid_type.__name__ for valid_type in _VALID_ANY_VALUE_TYPES]} or a "
|
||||
"sequence of those types",
|
||||
)
|
||||
|
||||
|
||||
def _clean_extended_attribute(
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -30,51 +30,55 @@ from opentelemetry.proto.logs.v1.logs_pb2 import (
|
||||
ResourceLogs,
|
||||
ScopeLogs,
|
||||
)
|
||||
from opentelemetry.sdk._logs import LogData
|
||||
from opentelemetry.sdk._logs import ReadableLogRecord
|
||||
|
||||
|
||||
def encode_logs(batch: Sequence[LogData]) -> ExportLogsServiceRequest:
|
||||
def encode_logs(
|
||||
batch: Sequence[ReadableLogRecord],
|
||||
) -> ExportLogsServiceRequest:
|
||||
return ExportLogsServiceRequest(resource_logs=_encode_resource_logs(batch))
|
||||
|
||||
|
||||
def _encode_log(log_data: LogData) -> PB2LogRecord:
|
||||
def _encode_log(readable_log_record: ReadableLogRecord) -> PB2LogRecord:
|
||||
span_id = (
|
||||
None
|
||||
if log_data.log_record.span_id == 0
|
||||
else _encode_span_id(log_data.log_record.span_id)
|
||||
if readable_log_record.log_record.span_id == 0
|
||||
else _encode_span_id(readable_log_record.log_record.span_id)
|
||||
)
|
||||
trace_id = (
|
||||
None
|
||||
if log_data.log_record.trace_id == 0
|
||||
else _encode_trace_id(log_data.log_record.trace_id)
|
||||
if readable_log_record.log_record.trace_id == 0
|
||||
else _encode_trace_id(readable_log_record.log_record.trace_id)
|
||||
)
|
||||
body = log_data.log_record.body
|
||||
body = readable_log_record.log_record.body
|
||||
return PB2LogRecord(
|
||||
time_unix_nano=log_data.log_record.timestamp,
|
||||
observed_time_unix_nano=log_data.log_record.observed_timestamp,
|
||||
time_unix_nano=readable_log_record.log_record.timestamp,
|
||||
observed_time_unix_nano=readable_log_record.log_record.observed_timestamp,
|
||||
span_id=span_id,
|
||||
trace_id=trace_id,
|
||||
flags=int(log_data.log_record.trace_flags),
|
||||
flags=int(readable_log_record.log_record.trace_flags),
|
||||
body=_encode_value(body, allow_null=True),
|
||||
severity_text=log_data.log_record.severity_text,
|
||||
severity_text=readable_log_record.log_record.severity_text,
|
||||
attributes=_encode_attributes(
|
||||
log_data.log_record.attributes, allow_null=True
|
||||
readable_log_record.log_record.attributes, allow_null=True
|
||||
),
|
||||
dropped_attributes_count=log_data.log_record.dropped_attributes,
|
||||
dropped_attributes_count=readable_log_record.dropped_attributes,
|
||||
severity_number=getattr(
|
||||
log_data.log_record.severity_number, "value", None
|
||||
readable_log_record.log_record.severity_number, "value", None
|
||||
),
|
||||
event_name=log_data.log_record.event_name,
|
||||
event_name=readable_log_record.log_record.event_name,
|
||||
)
|
||||
|
||||
|
||||
def _encode_resource_logs(batch: Sequence[LogData]) -> List[ResourceLogs]:
|
||||
def _encode_resource_logs(
|
||||
batch: Sequence[ReadableLogRecord],
|
||||
) -> List[ResourceLogs]:
|
||||
sdk_resource_logs = defaultdict(lambda: defaultdict(list))
|
||||
|
||||
for sdk_log in batch:
|
||||
sdk_resource = sdk_log.log_record.resource
|
||||
sdk_instrumentation = sdk_log.instrumentation_scope or None
|
||||
pb2_log = _encode_log(sdk_log)
|
||||
for readable_log in batch:
|
||||
sdk_resource = readable_log.resource
|
||||
sdk_instrumentation = readable_log.instrumentation_scope or None
|
||||
pb2_log = _encode_log(readable_log)
|
||||
|
||||
sdk_resource_logs[sdk_resource][sdk_instrumentation].append(pb2_log)
|
||||
|
||||
|
||||
@@ -115,7 +115,7 @@ class OTLPMetricExporterMixin:
|
||||
"CUMULATIVE"
|
||||
):
|
||||
_logger.warning(
|
||||
"Unrecognized OTEL_EXPORTER_METRICS_TEMPORALITY_PREFERENCE"
|
||||
"Unrecognized OTEL_EXPORTER_OTLP_METRICS_TEMPORALITY_PREFERENCE"
|
||||
" value found: "
|
||||
"%s, "
|
||||
"using CUMULATIVE",
|
||||
|
||||
Binary file not shown.
@@ -12,4 +12,4 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
__version__ = "1.38.0"
|
||||
__version__ = "1.39.1"
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -28,8 +28,11 @@ from opentelemetry.proto.collector.logs.v1.logs_service_pb2 import (
|
||||
from opentelemetry.proto.collector.logs.v1.logs_service_pb2_grpc import (
|
||||
LogsServiceStub,
|
||||
)
|
||||
from opentelemetry.sdk._logs import LogData
|
||||
from opentelemetry.sdk._logs.export import LogExporter, LogExportResult
|
||||
from opentelemetry.sdk._logs import ReadableLogRecord
|
||||
from opentelemetry.sdk._logs.export import (
|
||||
LogRecordExporter,
|
||||
LogRecordExportResult,
|
||||
)
|
||||
from opentelemetry.sdk.environment_variables import (
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_GRPC_LOGS_CREDENTIAL_PROVIDER,
|
||||
OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE,
|
||||
@@ -44,11 +47,11 @@ from opentelemetry.sdk.environment_variables import (
|
||||
|
||||
|
||||
class OTLPLogExporter(
|
||||
LogExporter,
|
||||
LogRecordExporter,
|
||||
OTLPExporterMixin[
|
||||
Sequence[LogData],
|
||||
Sequence[ReadableLogRecord],
|
||||
ExportLogsServiceRequest,
|
||||
LogExportResult,
|
||||
LogRecordExportResult,
|
||||
LogsServiceStub,
|
||||
],
|
||||
):
|
||||
@@ -100,19 +103,19 @@ class OTLPLogExporter(
|
||||
timeout=timeout or environ_timeout,
|
||||
compression=compression,
|
||||
stub=LogsServiceStub,
|
||||
result=LogExportResult,
|
||||
result=LogRecordExportResult,
|
||||
channel_options=channel_options,
|
||||
)
|
||||
|
||||
def _translate_data(
|
||||
self, data: Sequence[LogData]
|
||||
self, data: Sequence[ReadableLogRecord]
|
||||
) -> ExportLogsServiceRequest:
|
||||
return encode_logs(data)
|
||||
|
||||
def export( # type: ignore [reportIncompatibleMethodOverride]
|
||||
self,
|
||||
batch: Sequence[LogData],
|
||||
) -> Literal[LogExportResult.SUCCESS, LogExportResult.FAILURE]:
|
||||
batch: Sequence[ReadableLogRecord],
|
||||
) -> Literal[LogRecordExportResult.SUCCESS, LogRecordExportResult.FAILURE]:
|
||||
return OTLPExporterMixin._export(self, batch)
|
||||
|
||||
def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None:
|
||||
|
||||
@@ -80,8 +80,8 @@ from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
|
||||
KeyValue,
|
||||
)
|
||||
from opentelemetry.proto.resource.v1.resource_pb2 import Resource # noqa: F401
|
||||
from opentelemetry.sdk._logs import LogData
|
||||
from opentelemetry.sdk._logs.export import LogExportResult
|
||||
from opentelemetry.sdk._logs import ReadableLogRecord
|
||||
from opentelemetry.sdk._logs.export import LogRecordExportResult
|
||||
from opentelemetry.sdk._shared_internal import DuplicateFilter
|
||||
from opentelemetry.sdk.environment_variables import (
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_GRPC_CREDENTIAL_PROVIDER,
|
||||
@@ -118,7 +118,7 @@ logger = getLogger(__name__)
|
||||
logger.addFilter(DuplicateFilter())
|
||||
SDKDataT = TypeVar(
|
||||
"SDKDataT",
|
||||
TypingSequence[LogData],
|
||||
TypingSequence[ReadableLogRecord],
|
||||
MetricsData,
|
||||
TypingSequence[ReadableSpan],
|
||||
)
|
||||
@@ -132,7 +132,7 @@ ExportServiceRequestT = TypeVar(
|
||||
)
|
||||
ExportResultT = TypeVar(
|
||||
"ExportResultT",
|
||||
LogExportResult,
|
||||
LogRecordExportResult,
|
||||
MetricExportResult,
|
||||
SpanExportResult,
|
||||
)
|
||||
|
||||
Binary file not shown.
@@ -12,4 +12,4 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
__version__ = "1.38.0"
|
||||
__version__ = "1.39.1"
|
||||
|
||||
Binary file not shown.
@@ -1,86 +0,0 @@
|
||||
# Copyright The OpenTelemetry Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
"""
|
||||
This library allows to export tracing data to an OTLP collector.
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
The **OTLP Span Exporter** allows to export `OpenTelemetry`_ traces to the
|
||||
`OTLP`_ collector.
|
||||
|
||||
You can configure the exporter with the following environment variables:
|
||||
|
||||
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_TIMEOUT`
|
||||
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_PROTOCOL`
|
||||
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_HEADERS`
|
||||
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_ENDPOINT`
|
||||
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_COMPRESSION`
|
||||
- :envvar:`OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE`
|
||||
- :envvar:`OTEL_EXPORTER_OTLP_TIMEOUT`
|
||||
- :envvar:`OTEL_EXPORTER_OTLP_PROTOCOL`
|
||||
- :envvar:`OTEL_EXPORTER_OTLP_HEADERS`
|
||||
- :envvar:`OTEL_EXPORTER_OTLP_ENDPOINT`
|
||||
- :envvar:`OTEL_EXPORTER_OTLP_COMPRESSION`
|
||||
- :envvar:`OTEL_EXPORTER_OTLP_CERTIFICATE`
|
||||
|
||||
.. _OTLP: https://github.com/open-telemetry/opentelemetry-collector/
|
||||
.. _OpenTelemetry: https://github.com/open-telemetry/opentelemetry-python/
|
||||
|
||||
.. code:: python
|
||||
|
||||
from opentelemetry import trace
|
||||
from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
from opentelemetry.sdk.trace import TracerProvider
|
||||
from opentelemetry.sdk.trace.export import BatchSpanProcessor
|
||||
|
||||
# Resource can be required for some backends, e.g. Jaeger
|
||||
# If resource wouldn't be set - traces wouldn't appears in Jaeger
|
||||
resource = Resource(attributes={
|
||||
"service.name": "service"
|
||||
})
|
||||
|
||||
trace.set_tracer_provider(TracerProvider(resource=resource))
|
||||
tracer = trace.get_tracer(__name__)
|
||||
|
||||
otlp_exporter = OTLPSpanExporter()
|
||||
|
||||
span_processor = BatchSpanProcessor(otlp_exporter)
|
||||
|
||||
trace.get_tracer_provider().add_span_processor(span_processor)
|
||||
|
||||
with tracer.start_as_current_span("foo"):
|
||||
print("Hello world!")
|
||||
|
||||
API
|
||||
---
|
||||
"""
|
||||
|
||||
import enum
|
||||
|
||||
from .version import __version__
|
||||
|
||||
_OTLP_HTTP_HEADERS = {
|
||||
"Content-Type": "application/x-protobuf",
|
||||
"User-Agent": "OTel-OTLP-Exporter-Python/" + __version__,
|
||||
}
|
||||
|
||||
|
||||
class Compression(enum.Enum):
|
||||
NoCompression = "none"
|
||||
Deflate = "deflate"
|
||||
Gzip = "gzip"
|
||||
@@ -1,69 +0,0 @@
|
||||
# Copyright The OpenTelemetry Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
from os import environ
|
||||
from typing import Literal, Optional
|
||||
|
||||
import requests
|
||||
|
||||
from opentelemetry.sdk.environment_variables import (
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_CREDENTIAL_PROVIDER,
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_LOGS_CREDENTIAL_PROVIDER,
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_METRICS_CREDENTIAL_PROVIDER,
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_TRACES_CREDENTIAL_PROVIDER,
|
||||
)
|
||||
from opentelemetry.util._importlib_metadata import entry_points
|
||||
|
||||
|
||||
def _is_retryable(resp: requests.Response) -> bool:
|
||||
if resp.status_code == 408:
|
||||
return True
|
||||
if resp.status_code >= 500 and resp.status_code <= 599:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _load_session_from_envvar(
|
||||
cred_envvar: Literal[
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_LOGS_CREDENTIAL_PROVIDER,
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_TRACES_CREDENTIAL_PROVIDER,
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_METRICS_CREDENTIAL_PROVIDER,
|
||||
],
|
||||
) -> Optional[requests.Session]:
|
||||
_credential_env = environ.get(
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_CREDENTIAL_PROVIDER
|
||||
) or environ.get(cred_envvar)
|
||||
if _credential_env:
|
||||
try:
|
||||
maybe_session = next(
|
||||
iter(
|
||||
entry_points(
|
||||
group="opentelemetry_otlp_credential_provider",
|
||||
name=_credential_env,
|
||||
)
|
||||
)
|
||||
).load()()
|
||||
except StopIteration:
|
||||
raise RuntimeError(
|
||||
f"Requested component '{_credential_env}' not found in "
|
||||
f"entry point 'opentelemetry_otlp_credential_provider'"
|
||||
)
|
||||
if isinstance(maybe_session, requests.Session):
|
||||
return maybe_session
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f"Requested component '{_credential_env}' is of type {type(maybe_session)}"
|
||||
f" must be of type `requests.Session`."
|
||||
)
|
||||
return None
|
||||
@@ -1,243 +0,0 @@
|
||||
# Copyright The OpenTelemetry Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import gzip
|
||||
import logging
|
||||
import random
|
||||
import threading
|
||||
import zlib
|
||||
from io import BytesIO
|
||||
from os import environ
|
||||
from time import time
|
||||
from typing import Dict, Optional, Sequence
|
||||
|
||||
import requests
|
||||
from requests.exceptions import ConnectionError
|
||||
|
||||
from opentelemetry.exporter.otlp.proto.common._log_encoder import encode_logs
|
||||
from opentelemetry.exporter.otlp.proto.http import (
|
||||
_OTLP_HTTP_HEADERS,
|
||||
Compression,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.http._common import (
|
||||
_is_retryable,
|
||||
_load_session_from_envvar,
|
||||
)
|
||||
from opentelemetry.sdk._logs import LogData
|
||||
from opentelemetry.sdk._logs.export import (
|
||||
LogExporter,
|
||||
LogExportResult,
|
||||
)
|
||||
from opentelemetry.sdk._shared_internal import DuplicateFilter
|
||||
from opentelemetry.sdk.environment_variables import (
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_LOGS_CREDENTIAL_PROVIDER,
|
||||
OTEL_EXPORTER_OTLP_CERTIFICATE,
|
||||
OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
|
||||
OTEL_EXPORTER_OTLP_CLIENT_KEY,
|
||||
OTEL_EXPORTER_OTLP_COMPRESSION,
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT,
|
||||
OTEL_EXPORTER_OTLP_HEADERS,
|
||||
OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE,
|
||||
OTEL_EXPORTER_OTLP_LOGS_CLIENT_CERTIFICATE,
|
||||
OTEL_EXPORTER_OTLP_LOGS_CLIENT_KEY,
|
||||
OTEL_EXPORTER_OTLP_LOGS_COMPRESSION,
|
||||
OTEL_EXPORTER_OTLP_LOGS_ENDPOINT,
|
||||
OTEL_EXPORTER_OTLP_LOGS_HEADERS,
|
||||
OTEL_EXPORTER_OTLP_LOGS_TIMEOUT,
|
||||
OTEL_EXPORTER_OTLP_TIMEOUT,
|
||||
)
|
||||
from opentelemetry.util.re import parse_env_headers
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
# This prevents logs generated when a log fails to be written to generate another log which fails to be written etc. etc.
|
||||
_logger.addFilter(DuplicateFilter())
|
||||
|
||||
|
||||
DEFAULT_COMPRESSION = Compression.NoCompression
|
||||
DEFAULT_ENDPOINT = "http://localhost:4318/"
|
||||
DEFAULT_LOGS_EXPORT_PATH = "v1/logs"
|
||||
DEFAULT_TIMEOUT = 10 # in seconds
|
||||
_MAX_RETRYS = 6
|
||||
|
||||
|
||||
class OTLPLogExporter(LogExporter):
|
||||
def __init__(
|
||||
self,
|
||||
endpoint: Optional[str] = None,
|
||||
certificate_file: Optional[str] = None,
|
||||
client_key_file: Optional[str] = None,
|
||||
client_certificate_file: Optional[str] = None,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
timeout: Optional[float] = None,
|
||||
compression: Optional[Compression] = None,
|
||||
session: Optional[requests.Session] = None,
|
||||
):
|
||||
self._shutdown_is_occuring = threading.Event()
|
||||
self._endpoint = endpoint or environ.get(
|
||||
OTEL_EXPORTER_OTLP_LOGS_ENDPOINT,
|
||||
_append_logs_path(
|
||||
environ.get(OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_ENDPOINT)
|
||||
),
|
||||
)
|
||||
# Keeping these as instance variables because they are used in tests
|
||||
self._certificate_file = certificate_file or environ.get(
|
||||
OTEL_EXPORTER_OTLP_LOGS_CERTIFICATE,
|
||||
environ.get(OTEL_EXPORTER_OTLP_CERTIFICATE, True),
|
||||
)
|
||||
self._client_key_file = client_key_file or environ.get(
|
||||
OTEL_EXPORTER_OTLP_LOGS_CLIENT_KEY,
|
||||
environ.get(OTEL_EXPORTER_OTLP_CLIENT_KEY, None),
|
||||
)
|
||||
self._client_certificate_file = client_certificate_file or environ.get(
|
||||
OTEL_EXPORTER_OTLP_LOGS_CLIENT_CERTIFICATE,
|
||||
environ.get(OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, None),
|
||||
)
|
||||
self._client_cert = (
|
||||
(self._client_certificate_file, self._client_key_file)
|
||||
if self._client_certificate_file and self._client_key_file
|
||||
else self._client_certificate_file
|
||||
)
|
||||
headers_string = environ.get(
|
||||
OTEL_EXPORTER_OTLP_LOGS_HEADERS,
|
||||
environ.get(OTEL_EXPORTER_OTLP_HEADERS, ""),
|
||||
)
|
||||
self._headers = headers or parse_env_headers(
|
||||
headers_string, liberal=True
|
||||
)
|
||||
self._timeout = timeout or float(
|
||||
environ.get(
|
||||
OTEL_EXPORTER_OTLP_LOGS_TIMEOUT,
|
||||
environ.get(OTEL_EXPORTER_OTLP_TIMEOUT, DEFAULT_TIMEOUT),
|
||||
)
|
||||
)
|
||||
self._compression = compression or _compression_from_env()
|
||||
self._session = (
|
||||
session
|
||||
or _load_session_from_envvar(
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_LOGS_CREDENTIAL_PROVIDER
|
||||
)
|
||||
or requests.Session()
|
||||
)
|
||||
self._session.headers.update(self._headers)
|
||||
self._session.headers.update(_OTLP_HTTP_HEADERS)
|
||||
# let users override our defaults
|
||||
self._session.headers.update(self._headers)
|
||||
if self._compression is not Compression.NoCompression:
|
||||
self._session.headers.update(
|
||||
{"Content-Encoding": self._compression.value}
|
||||
)
|
||||
self._shutdown = False
|
||||
|
||||
def _export(
|
||||
self, serialized_data: bytes, timeout_sec: Optional[float] = None
|
||||
):
|
||||
data = serialized_data
|
||||
if self._compression == Compression.Gzip:
|
||||
gzip_data = BytesIO()
|
||||
with gzip.GzipFile(fileobj=gzip_data, mode="w") as gzip_stream:
|
||||
gzip_stream.write(serialized_data)
|
||||
data = gzip_data.getvalue()
|
||||
elif self._compression == Compression.Deflate:
|
||||
data = zlib.compress(serialized_data)
|
||||
|
||||
if timeout_sec is None:
|
||||
timeout_sec = self._timeout
|
||||
|
||||
# By default, keep-alive is enabled in Session's request
|
||||
# headers. Backends may choose to close the connection
|
||||
# while a post happens which causes an unhandled
|
||||
# exception. This try/except will retry the post on such exceptions
|
||||
try:
|
||||
resp = self._session.post(
|
||||
url=self._endpoint,
|
||||
data=data,
|
||||
verify=self._certificate_file,
|
||||
timeout=timeout_sec,
|
||||
cert=self._client_cert,
|
||||
)
|
||||
except ConnectionError:
|
||||
resp = self._session.post(
|
||||
url=self._endpoint,
|
||||
data=data,
|
||||
verify=self._certificate_file,
|
||||
timeout=timeout_sec,
|
||||
cert=self._client_cert,
|
||||
)
|
||||
return resp
|
||||
|
||||
def export(self, batch: Sequence[LogData]) -> LogExportResult:
|
||||
if self._shutdown:
|
||||
_logger.warning("Exporter already shutdown, ignoring batch")
|
||||
return LogExportResult.FAILURE
|
||||
|
||||
serialized_data = encode_logs(batch).SerializeToString()
|
||||
deadline_sec = time() + self._timeout
|
||||
for retry_num in range(_MAX_RETRYS):
|
||||
resp = self._export(serialized_data, deadline_sec - time())
|
||||
if resp.ok:
|
||||
return LogExportResult.SUCCESS
|
||||
# multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff.
|
||||
backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2)
|
||||
if (
|
||||
not _is_retryable(resp)
|
||||
or retry_num + 1 == _MAX_RETRYS
|
||||
or backoff_seconds > (deadline_sec - time())
|
||||
or self._shutdown
|
||||
):
|
||||
_logger.error(
|
||||
"Failed to export logs batch code: %s, reason: %s",
|
||||
resp.status_code,
|
||||
resp.text,
|
||||
)
|
||||
return LogExportResult.FAILURE
|
||||
_logger.warning(
|
||||
"Transient error %s encountered while exporting logs batch, retrying in %.2fs.",
|
||||
resp.reason,
|
||||
backoff_seconds,
|
||||
)
|
||||
shutdown = self._shutdown_is_occuring.wait(backoff_seconds)
|
||||
if shutdown:
|
||||
_logger.warning("Shutdown in progress, aborting retry.")
|
||||
break
|
||||
return LogExportResult.FAILURE
|
||||
|
||||
def force_flush(self, timeout_millis: float = 10_000) -> bool:
|
||||
"""Nothing is buffered in this exporter, so this method does nothing."""
|
||||
return True
|
||||
|
||||
def shutdown(self):
|
||||
if self._shutdown:
|
||||
_logger.warning("Exporter already shutdown, ignoring call")
|
||||
return
|
||||
self._shutdown = True
|
||||
self._shutdown_is_occuring.set()
|
||||
self._session.close()
|
||||
|
||||
|
||||
def _compression_from_env() -> Compression:
|
||||
compression = (
|
||||
environ.get(
|
||||
OTEL_EXPORTER_OTLP_LOGS_COMPRESSION,
|
||||
environ.get(OTEL_EXPORTER_OTLP_COMPRESSION, "none"),
|
||||
)
|
||||
.lower()
|
||||
.strip()
|
||||
)
|
||||
return Compression(compression)
|
||||
|
||||
|
||||
def _append_logs_path(endpoint: str) -> str:
|
||||
if endpoint.endswith("/"):
|
||||
return endpoint + DEFAULT_LOGS_EXPORT_PATH
|
||||
return endpoint + f"/{DEFAULT_LOGS_EXPORT_PATH}"
|
||||
@@ -1,305 +0,0 @@
|
||||
# Copyright The OpenTelemetry Authors
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
from __future__ import annotations
|
||||
|
||||
import gzip
|
||||
import logging
|
||||
import random
|
||||
import threading
|
||||
import zlib
|
||||
from io import BytesIO
|
||||
from os import environ
|
||||
from time import time
|
||||
from typing import ( # noqa: F401
|
||||
Any,
|
||||
Callable,
|
||||
Dict,
|
||||
List,
|
||||
Mapping,
|
||||
Optional,
|
||||
Sequence,
|
||||
)
|
||||
|
||||
import requests
|
||||
from requests.exceptions import ConnectionError
|
||||
from typing_extensions import deprecated
|
||||
|
||||
from opentelemetry.exporter.otlp.proto.common._internal import (
|
||||
_get_resource_data,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.common._internal.metrics_encoder import (
|
||||
OTLPMetricExporterMixin,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.common.metrics_encoder import (
|
||||
encode_metrics,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.http import (
|
||||
_OTLP_HTTP_HEADERS,
|
||||
Compression,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.http._common import (
|
||||
_is_retryable,
|
||||
_load_session_from_envvar,
|
||||
)
|
||||
from opentelemetry.proto.collector.metrics.v1.metrics_service_pb2 import ( # noqa: F401
|
||||
ExportMetricsServiceRequest,
|
||||
)
|
||||
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
|
||||
AnyValue,
|
||||
ArrayValue,
|
||||
InstrumentationScope,
|
||||
KeyValue,
|
||||
KeyValueList,
|
||||
)
|
||||
from opentelemetry.proto.metrics.v1 import metrics_pb2 as pb2 # noqa: F401
|
||||
from opentelemetry.proto.resource.v1.resource_pb2 import Resource # noqa: F401
|
||||
from opentelemetry.proto.resource.v1.resource_pb2 import (
|
||||
Resource as PB2Resource,
|
||||
)
|
||||
from opentelemetry.sdk.environment_variables import (
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_METRICS_CREDENTIAL_PROVIDER,
|
||||
OTEL_EXPORTER_OTLP_CERTIFICATE,
|
||||
OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
|
||||
OTEL_EXPORTER_OTLP_CLIENT_KEY,
|
||||
OTEL_EXPORTER_OTLP_COMPRESSION,
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT,
|
||||
OTEL_EXPORTER_OTLP_HEADERS,
|
||||
OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE,
|
||||
OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE,
|
||||
OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY,
|
||||
OTEL_EXPORTER_OTLP_METRICS_COMPRESSION,
|
||||
OTEL_EXPORTER_OTLP_METRICS_ENDPOINT,
|
||||
OTEL_EXPORTER_OTLP_METRICS_HEADERS,
|
||||
OTEL_EXPORTER_OTLP_METRICS_TIMEOUT,
|
||||
OTEL_EXPORTER_OTLP_TIMEOUT,
|
||||
)
|
||||
from opentelemetry.sdk.metrics._internal.aggregation import Aggregation
|
||||
from opentelemetry.sdk.metrics.export import ( # noqa: F401
|
||||
AggregationTemporality,
|
||||
Gauge,
|
||||
MetricExporter,
|
||||
MetricExportResult,
|
||||
MetricsData,
|
||||
Sum,
|
||||
)
|
||||
from opentelemetry.sdk.metrics.export import ( # noqa: F401
|
||||
Histogram as HistogramType,
|
||||
)
|
||||
from opentelemetry.sdk.resources import Resource as SDKResource
|
||||
from opentelemetry.util.re import parse_env_headers
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
DEFAULT_COMPRESSION = Compression.NoCompression
|
||||
DEFAULT_ENDPOINT = "http://localhost:4318/"
|
||||
DEFAULT_METRICS_EXPORT_PATH = "v1/metrics"
|
||||
DEFAULT_TIMEOUT = 10 # in seconds
|
||||
_MAX_RETRYS = 6
|
||||
|
||||
|
||||
class OTLPMetricExporter(MetricExporter, OTLPMetricExporterMixin):
|
||||
def __init__(
|
||||
self,
|
||||
endpoint: str | None = None,
|
||||
certificate_file: str | None = None,
|
||||
client_key_file: str | None = None,
|
||||
client_certificate_file: str | None = None,
|
||||
headers: dict[str, str] | None = None,
|
||||
timeout: float | None = None,
|
||||
compression: Compression | None = None,
|
||||
session: requests.Session | None = None,
|
||||
preferred_temporality: dict[type, AggregationTemporality]
|
||||
| None = None,
|
||||
preferred_aggregation: dict[type, Aggregation] | None = None,
|
||||
):
|
||||
self._shutdown_in_progress = threading.Event()
|
||||
self._endpoint = endpoint or environ.get(
|
||||
OTEL_EXPORTER_OTLP_METRICS_ENDPOINT,
|
||||
_append_metrics_path(
|
||||
environ.get(OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_ENDPOINT)
|
||||
),
|
||||
)
|
||||
self._certificate_file = certificate_file or environ.get(
|
||||
OTEL_EXPORTER_OTLP_METRICS_CERTIFICATE,
|
||||
environ.get(OTEL_EXPORTER_OTLP_CERTIFICATE, True),
|
||||
)
|
||||
self._client_key_file = client_key_file or environ.get(
|
||||
OTEL_EXPORTER_OTLP_METRICS_CLIENT_KEY,
|
||||
environ.get(OTEL_EXPORTER_OTLP_CLIENT_KEY, None),
|
||||
)
|
||||
self._client_certificate_file = client_certificate_file or environ.get(
|
||||
OTEL_EXPORTER_OTLP_METRICS_CLIENT_CERTIFICATE,
|
||||
environ.get(OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, None),
|
||||
)
|
||||
self._client_cert = (
|
||||
(self._client_certificate_file, self._client_key_file)
|
||||
if self._client_certificate_file and self._client_key_file
|
||||
else self._client_certificate_file
|
||||
)
|
||||
headers_string = environ.get(
|
||||
OTEL_EXPORTER_OTLP_METRICS_HEADERS,
|
||||
environ.get(OTEL_EXPORTER_OTLP_HEADERS, ""),
|
||||
)
|
||||
self._headers = headers or parse_env_headers(
|
||||
headers_string, liberal=True
|
||||
)
|
||||
self._timeout = timeout or float(
|
||||
environ.get(
|
||||
OTEL_EXPORTER_OTLP_METRICS_TIMEOUT,
|
||||
environ.get(OTEL_EXPORTER_OTLP_TIMEOUT, DEFAULT_TIMEOUT),
|
||||
)
|
||||
)
|
||||
self._compression = compression or _compression_from_env()
|
||||
self._session = (
|
||||
session
|
||||
or _load_session_from_envvar(
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_METRICS_CREDENTIAL_PROVIDER
|
||||
)
|
||||
or requests.Session()
|
||||
)
|
||||
self._session.headers.update(self._headers)
|
||||
self._session.headers.update(_OTLP_HTTP_HEADERS)
|
||||
# let users override our defaults
|
||||
self._session.headers.update(self._headers)
|
||||
if self._compression is not Compression.NoCompression:
|
||||
self._session.headers.update(
|
||||
{"Content-Encoding": self._compression.value}
|
||||
)
|
||||
|
||||
self._common_configuration(
|
||||
preferred_temporality, preferred_aggregation
|
||||
)
|
||||
self._shutdown = False
|
||||
|
||||
def _export(
|
||||
self, serialized_data: bytes, timeout_sec: Optional[float] = None
|
||||
):
|
||||
data = serialized_data
|
||||
if self._compression == Compression.Gzip:
|
||||
gzip_data = BytesIO()
|
||||
with gzip.GzipFile(fileobj=gzip_data, mode="w") as gzip_stream:
|
||||
gzip_stream.write(serialized_data)
|
||||
data = gzip_data.getvalue()
|
||||
elif self._compression == Compression.Deflate:
|
||||
data = zlib.compress(serialized_data)
|
||||
|
||||
if timeout_sec is None:
|
||||
timeout_sec = self._timeout
|
||||
|
||||
# By default, keep-alive is enabled in Session's request
|
||||
# headers. Backends may choose to close the connection
|
||||
# while a post happens which causes an unhandled
|
||||
# exception. This try/except will retry the post on such exceptions
|
||||
try:
|
||||
resp = self._session.post(
|
||||
url=self._endpoint,
|
||||
data=data,
|
||||
verify=self._certificate_file,
|
||||
timeout=timeout_sec,
|
||||
cert=self._client_cert,
|
||||
)
|
||||
except ConnectionError:
|
||||
resp = self._session.post(
|
||||
url=self._endpoint,
|
||||
data=data,
|
||||
verify=self._certificate_file,
|
||||
timeout=timeout_sec,
|
||||
cert=self._client_cert,
|
||||
)
|
||||
return resp
|
||||
|
||||
def export(
|
||||
self,
|
||||
metrics_data: MetricsData,
|
||||
timeout_millis: Optional[float] = 10000,
|
||||
**kwargs,
|
||||
) -> MetricExportResult:
|
||||
if self._shutdown:
|
||||
_logger.warning("Exporter already shutdown, ignoring batch")
|
||||
return MetricExportResult.FAILURE
|
||||
serialized_data = encode_metrics(metrics_data).SerializeToString()
|
||||
deadline_sec = time() + self._timeout
|
||||
for retry_num in range(_MAX_RETRYS):
|
||||
resp = self._export(serialized_data, deadline_sec - time())
|
||||
if resp.ok:
|
||||
return MetricExportResult.SUCCESS
|
||||
# multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff.
|
||||
backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2)
|
||||
if (
|
||||
not _is_retryable(resp)
|
||||
or retry_num + 1 == _MAX_RETRYS
|
||||
or backoff_seconds > (deadline_sec - time())
|
||||
or self._shutdown
|
||||
):
|
||||
_logger.error(
|
||||
"Failed to export metrics batch code: %s, reason: %s",
|
||||
resp.status_code,
|
||||
resp.text,
|
||||
)
|
||||
return MetricExportResult.FAILURE
|
||||
_logger.warning(
|
||||
"Transient error %s encountered while exporting metrics batch, retrying in %.2fs.",
|
||||
resp.reason,
|
||||
backoff_seconds,
|
||||
)
|
||||
shutdown = self._shutdown_in_progress.wait(backoff_seconds)
|
||||
if shutdown:
|
||||
_logger.warning("Shutdown in progress, aborting retry.")
|
||||
break
|
||||
return MetricExportResult.FAILURE
|
||||
|
||||
def shutdown(self, timeout_millis: float = 30_000, **kwargs) -> None:
|
||||
if self._shutdown:
|
||||
_logger.warning("Exporter already shutdown, ignoring call")
|
||||
return
|
||||
self._shutdown = True
|
||||
self._shutdown_in_progress.set()
|
||||
self._session.close()
|
||||
|
||||
@property
|
||||
def _exporting(self) -> str:
|
||||
return "metrics"
|
||||
|
||||
def force_flush(self, timeout_millis: float = 10_000) -> bool:
|
||||
"""Nothing is buffered in this exporter, so this method does nothing."""
|
||||
return True
|
||||
|
||||
|
||||
@deprecated(
|
||||
"Use one of the encoders from opentelemetry-exporter-otlp-proto-common instead. Deprecated since version 1.18.0.",
|
||||
)
|
||||
def get_resource_data(
|
||||
sdk_resource_scope_data: Dict[SDKResource, Any], # ResourceDataT?
|
||||
resource_class: Callable[..., PB2Resource],
|
||||
name: str,
|
||||
) -> List[PB2Resource]:
|
||||
return _get_resource_data(sdk_resource_scope_data, resource_class, name)
|
||||
|
||||
|
||||
def _compression_from_env() -> Compression:
|
||||
compression = (
|
||||
environ.get(
|
||||
OTEL_EXPORTER_OTLP_METRICS_COMPRESSION,
|
||||
environ.get(OTEL_EXPORTER_OTLP_COMPRESSION, "none"),
|
||||
)
|
||||
.lower()
|
||||
.strip()
|
||||
)
|
||||
return Compression(compression)
|
||||
|
||||
|
||||
def _append_metrics_path(endpoint: str) -> str:
|
||||
if endpoint.endswith("/"):
|
||||
return endpoint + DEFAULT_METRICS_EXPORT_PATH
|
||||
return endpoint + f"/{DEFAULT_METRICS_EXPORT_PATH}"
|
||||
@@ -1,238 +0,0 @@
|
||||
# Copyright The OpenTelemetry Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import gzip
|
||||
import logging
|
||||
import random
|
||||
import threading
|
||||
import zlib
|
||||
from io import BytesIO
|
||||
from os import environ
|
||||
from time import time
|
||||
from typing import Dict, Optional, Sequence
|
||||
|
||||
import requests
|
||||
from requests.exceptions import ConnectionError
|
||||
|
||||
from opentelemetry.exporter.otlp.proto.common.trace_encoder import (
|
||||
encode_spans,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.http import (
|
||||
_OTLP_HTTP_HEADERS,
|
||||
Compression,
|
||||
)
|
||||
from opentelemetry.exporter.otlp.proto.http._common import (
|
||||
_is_retryable,
|
||||
_load_session_from_envvar,
|
||||
)
|
||||
from opentelemetry.sdk.environment_variables import (
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_TRACES_CREDENTIAL_PROVIDER,
|
||||
OTEL_EXPORTER_OTLP_CERTIFICATE,
|
||||
OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE,
|
||||
OTEL_EXPORTER_OTLP_CLIENT_KEY,
|
||||
OTEL_EXPORTER_OTLP_COMPRESSION,
|
||||
OTEL_EXPORTER_OTLP_ENDPOINT,
|
||||
OTEL_EXPORTER_OTLP_HEADERS,
|
||||
OTEL_EXPORTER_OTLP_TIMEOUT,
|
||||
OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE,
|
||||
OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE,
|
||||
OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY,
|
||||
OTEL_EXPORTER_OTLP_TRACES_COMPRESSION,
|
||||
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT,
|
||||
OTEL_EXPORTER_OTLP_TRACES_HEADERS,
|
||||
OTEL_EXPORTER_OTLP_TRACES_TIMEOUT,
|
||||
)
|
||||
from opentelemetry.sdk.trace import ReadableSpan
|
||||
from opentelemetry.sdk.trace.export import SpanExporter, SpanExportResult
|
||||
from opentelemetry.util.re import parse_env_headers
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
DEFAULT_COMPRESSION = Compression.NoCompression
|
||||
DEFAULT_ENDPOINT = "http://localhost:4318/"
|
||||
DEFAULT_TRACES_EXPORT_PATH = "v1/traces"
|
||||
DEFAULT_TIMEOUT = 10 # in seconds
|
||||
_MAX_RETRYS = 6
|
||||
|
||||
|
||||
class OTLPSpanExporter(SpanExporter):
|
||||
def __init__(
|
||||
self,
|
||||
endpoint: Optional[str] = None,
|
||||
certificate_file: Optional[str] = None,
|
||||
client_key_file: Optional[str] = None,
|
||||
client_certificate_file: Optional[str] = None,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
timeout: Optional[float] = None,
|
||||
compression: Optional[Compression] = None,
|
||||
session: Optional[requests.Session] = None,
|
||||
):
|
||||
self._shutdown_in_progress = threading.Event()
|
||||
self._endpoint = endpoint or environ.get(
|
||||
OTEL_EXPORTER_OTLP_TRACES_ENDPOINT,
|
||||
_append_trace_path(
|
||||
environ.get(OTEL_EXPORTER_OTLP_ENDPOINT, DEFAULT_ENDPOINT)
|
||||
),
|
||||
)
|
||||
self._certificate_file = certificate_file or environ.get(
|
||||
OTEL_EXPORTER_OTLP_TRACES_CERTIFICATE,
|
||||
environ.get(OTEL_EXPORTER_OTLP_CERTIFICATE, True),
|
||||
)
|
||||
self._client_key_file = client_key_file or environ.get(
|
||||
OTEL_EXPORTER_OTLP_TRACES_CLIENT_KEY,
|
||||
environ.get(OTEL_EXPORTER_OTLP_CLIENT_KEY, None),
|
||||
)
|
||||
self._client_certificate_file = client_certificate_file or environ.get(
|
||||
OTEL_EXPORTER_OTLP_TRACES_CLIENT_CERTIFICATE,
|
||||
environ.get(OTEL_EXPORTER_OTLP_CLIENT_CERTIFICATE, None),
|
||||
)
|
||||
self._client_cert = (
|
||||
(self._client_certificate_file, self._client_key_file)
|
||||
if self._client_certificate_file and self._client_key_file
|
||||
else self._client_certificate_file
|
||||
)
|
||||
headers_string = environ.get(
|
||||
OTEL_EXPORTER_OTLP_TRACES_HEADERS,
|
||||
environ.get(OTEL_EXPORTER_OTLP_HEADERS, ""),
|
||||
)
|
||||
self._headers = headers or parse_env_headers(
|
||||
headers_string, liberal=True
|
||||
)
|
||||
self._timeout = timeout or float(
|
||||
environ.get(
|
||||
OTEL_EXPORTER_OTLP_TRACES_TIMEOUT,
|
||||
environ.get(OTEL_EXPORTER_OTLP_TIMEOUT, DEFAULT_TIMEOUT),
|
||||
)
|
||||
)
|
||||
self._compression = compression or _compression_from_env()
|
||||
self._session = (
|
||||
session
|
||||
or _load_session_from_envvar(
|
||||
_OTEL_PYTHON_EXPORTER_OTLP_HTTP_TRACES_CREDENTIAL_PROVIDER
|
||||
)
|
||||
or requests.Session()
|
||||
)
|
||||
self._session.headers.update(self._headers)
|
||||
self._session.headers.update(_OTLP_HTTP_HEADERS)
|
||||
# let users override our defaults
|
||||
self._session.headers.update(self._headers)
|
||||
if self._compression is not Compression.NoCompression:
|
||||
self._session.headers.update(
|
||||
{"Content-Encoding": self._compression.value}
|
||||
)
|
||||
self._shutdown = False
|
||||
|
||||
def _export(
|
||||
self, serialized_data: bytes, timeout_sec: Optional[float] = None
|
||||
):
|
||||
data = serialized_data
|
||||
if self._compression == Compression.Gzip:
|
||||
gzip_data = BytesIO()
|
||||
with gzip.GzipFile(fileobj=gzip_data, mode="w") as gzip_stream:
|
||||
gzip_stream.write(serialized_data)
|
||||
data = gzip_data.getvalue()
|
||||
elif self._compression == Compression.Deflate:
|
||||
data = zlib.compress(serialized_data)
|
||||
|
||||
if timeout_sec is None:
|
||||
timeout_sec = self._timeout
|
||||
|
||||
# By default, keep-alive is enabled in Session's request
|
||||
# headers. Backends may choose to close the connection
|
||||
# while a post happens which causes an unhandled
|
||||
# exception. This try/except will retry the post on such exceptions
|
||||
try:
|
||||
resp = self._session.post(
|
||||
url=self._endpoint,
|
||||
data=data,
|
||||
verify=self._certificate_file,
|
||||
timeout=timeout_sec,
|
||||
cert=self._client_cert,
|
||||
)
|
||||
except ConnectionError:
|
||||
resp = self._session.post(
|
||||
url=self._endpoint,
|
||||
data=data,
|
||||
verify=self._certificate_file,
|
||||
timeout=timeout_sec,
|
||||
cert=self._client_cert,
|
||||
)
|
||||
return resp
|
||||
|
||||
def export(self, spans: Sequence[ReadableSpan]) -> SpanExportResult:
|
||||
if self._shutdown:
|
||||
_logger.warning("Exporter already shutdown, ignoring batch")
|
||||
return SpanExportResult.FAILURE
|
||||
|
||||
serialized_data = encode_spans(spans).SerializePartialToString()
|
||||
deadline_sec = time() + self._timeout
|
||||
for retry_num in range(_MAX_RETRYS):
|
||||
resp = self._export(serialized_data, deadline_sec - time())
|
||||
if resp.ok:
|
||||
return SpanExportResult.SUCCESS
|
||||
# multiplying by a random number between .8 and 1.2 introduces a +/20% jitter to each backoff.
|
||||
backoff_seconds = 2**retry_num * random.uniform(0.8, 1.2)
|
||||
if (
|
||||
not _is_retryable(resp)
|
||||
or retry_num + 1 == _MAX_RETRYS
|
||||
or backoff_seconds > (deadline_sec - time())
|
||||
or self._shutdown
|
||||
):
|
||||
_logger.error(
|
||||
"Failed to export span batch code: %s, reason: %s",
|
||||
resp.status_code,
|
||||
resp.text,
|
||||
)
|
||||
return SpanExportResult.FAILURE
|
||||
_logger.warning(
|
||||
"Transient error %s encountered while exporting span batch, retrying in %.2fs.",
|
||||
resp.reason,
|
||||
backoff_seconds,
|
||||
)
|
||||
shutdown = self._shutdown_in_progress.wait(backoff_seconds)
|
||||
if shutdown:
|
||||
_logger.warning("Shutdown in progress, aborting retry.")
|
||||
break
|
||||
return SpanExportResult.FAILURE
|
||||
|
||||
def shutdown(self):
|
||||
if self._shutdown:
|
||||
_logger.warning("Exporter already shutdown, ignoring call")
|
||||
return
|
||||
self._shutdown = True
|
||||
self._shutdown_in_progress.set()
|
||||
self._session.close()
|
||||
|
||||
def force_flush(self, timeout_millis: int = 30000) -> bool:
|
||||
"""Nothing is buffered in this exporter, so this method does nothing."""
|
||||
return True
|
||||
|
||||
|
||||
def _compression_from_env() -> Compression:
|
||||
compression = (
|
||||
environ.get(
|
||||
OTEL_EXPORTER_OTLP_TRACES_COMPRESSION,
|
||||
environ.get(OTEL_EXPORTER_OTLP_COMPRESSION, "none"),
|
||||
)
|
||||
.lower()
|
||||
.strip()
|
||||
)
|
||||
return Compression(compression)
|
||||
|
||||
|
||||
def _append_trace_path(endpoint: str) -> str:
|
||||
if endpoint.endswith("/"):
|
||||
return endpoint + DEFAULT_TRACES_EXPORT_PATH
|
||||
return endpoint + f"/{DEFAULT_TRACES_EXPORT_PATH}"
|
||||
@@ -1,66 +0,0 @@
|
||||
# Copyright The OpenTelemetry Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
import logging # noqa: F401
|
||||
from collections import abc # noqa: F401
|
||||
from typing import Any, List, Optional, Sequence # noqa: F401
|
||||
|
||||
from opentelemetry.proto.collector.trace.v1.trace_service_pb2 import ( # noqa: F401
|
||||
ExportTraceServiceRequest as PB2ExportTraceServiceRequest,
|
||||
)
|
||||
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
|
||||
AnyValue as PB2AnyValue,
|
||||
)
|
||||
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
|
||||
ArrayValue as PB2ArrayValue,
|
||||
)
|
||||
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
|
||||
InstrumentationScope as PB2InstrumentationScope,
|
||||
)
|
||||
from opentelemetry.proto.common.v1.common_pb2 import ( # noqa: F401
|
||||
KeyValue as PB2KeyValue,
|
||||
)
|
||||
from opentelemetry.proto.resource.v1.resource_pb2 import ( # noqa: F401
|
||||
Resource as PB2Resource,
|
||||
)
|
||||
from opentelemetry.proto.trace.v1.trace_pb2 import ( # noqa: F401
|
||||
ResourceSpans as PB2ResourceSpans,
|
||||
)
|
||||
from opentelemetry.proto.trace.v1.trace_pb2 import ( # noqa: F401
|
||||
ScopeSpans as PB2ScopeSpans,
|
||||
)
|
||||
from opentelemetry.proto.trace.v1.trace_pb2 import ( # noqa: F401
|
||||
Span as PB2SPan,
|
||||
)
|
||||
from opentelemetry.proto.trace.v1.trace_pb2 import ( # noqa: F401
|
||||
Status as PB2Status,
|
||||
)
|
||||
from opentelemetry.sdk.trace import (
|
||||
Event, # noqa: F401
|
||||
Resource, # noqa: F401
|
||||
)
|
||||
from opentelemetry.sdk.trace import Span as SDKSpan # noqa: F401
|
||||
from opentelemetry.sdk.util.instrumentation import ( # noqa: F401
|
||||
InstrumentationScope,
|
||||
)
|
||||
from opentelemetry.trace import (
|
||||
Link, # noqa: F401
|
||||
SpanKind, # noqa: F401
|
||||
)
|
||||
from opentelemetry.trace.span import ( # noqa: F401
|
||||
SpanContext,
|
||||
Status,
|
||||
TraceState,
|
||||
)
|
||||
from opentelemetry.util.types import Attributes # noqa: F401
|
||||
@@ -1,15 +0,0 @@
|
||||
# Copyright The OpenTelemetry Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
__version__ = "1.38.0"
|
||||
@@ -1,15 +0,0 @@
|
||||
# Copyright The OpenTelemetry Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
__version__ = "1.38.0"
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -12,4 +12,4 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
__version__ = "1.38.0"
|
||||
__version__ = "1.39.1"
|
||||
|
||||
@@ -22,13 +22,13 @@ from __future__ import annotations
|
||||
import logging
|
||||
import logging.config
|
||||
import os
|
||||
import warnings
|
||||
from abc import ABC, abstractmethod
|
||||
from os import environ
|
||||
from typing import Any, Callable, Mapping, Sequence, Type, Union
|
||||
|
||||
from typing_extensions import Literal
|
||||
|
||||
from opentelemetry._events import set_event_logger_provider
|
||||
from opentelemetry._logs import set_logger_provider
|
||||
from opentelemetry.environment_variables import (
|
||||
OTEL_LOGS_EXPORTER,
|
||||
@@ -37,9 +37,11 @@ from opentelemetry.environment_variables import (
|
||||
OTEL_TRACES_EXPORTER,
|
||||
)
|
||||
from opentelemetry.metrics import set_meter_provider
|
||||
from opentelemetry.sdk._events import EventLoggerProvider
|
||||
from opentelemetry.sdk._logs import LoggerProvider, LoggingHandler
|
||||
from opentelemetry.sdk._logs.export import BatchLogRecordProcessor, LogExporter
|
||||
from opentelemetry.sdk._logs.export import (
|
||||
BatchLogRecordProcessor,
|
||||
LogRecordExporter,
|
||||
)
|
||||
from opentelemetry.sdk.environment_variables import (
|
||||
_OTEL_PYTHON_LOGGING_AUTO_INSTRUMENTATION_ENABLED,
|
||||
OTEL_EXPORTER_OTLP_LOGS_PROTOCOL,
|
||||
@@ -97,7 +99,7 @@ ExporterArgsMap = Mapping[
|
||||
Type[SpanExporter],
|
||||
Type[MetricExporter],
|
||||
Type[MetricReader],
|
||||
Type[LogExporter],
|
||||
Type[LogRecordExporter],
|
||||
],
|
||||
Mapping[str, Any],
|
||||
]
|
||||
@@ -250,7 +252,7 @@ def _init_metrics(
|
||||
|
||||
|
||||
def _init_logging(
|
||||
exporters: dict[str, Type[LogExporter]],
|
||||
exporters: dict[str, Type[LogRecordExporter]],
|
||||
resource: Resource | None = None,
|
||||
setup_logging_handler: bool = True,
|
||||
exporter_args_map: ExporterArgsMap | None = None,
|
||||
@@ -265,8 +267,19 @@ def _init_logging(
|
||||
BatchLogRecordProcessor(exporter_class(**exporter_args))
|
||||
)
|
||||
|
||||
event_logger_provider = EventLoggerProvider(logger_provider=provider)
|
||||
set_event_logger_provider(event_logger_provider)
|
||||
# silence warnings from internal users until we drop the deprecated Events API
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", category=DeprecationWarning)
|
||||
# pylint: disable=import-outside-toplevel
|
||||
from opentelemetry._events import ( # noqa: PLC0415
|
||||
set_event_logger_provider,
|
||||
)
|
||||
from opentelemetry.sdk._events import ( # noqa: PLC0415
|
||||
EventLoggerProvider,
|
||||
)
|
||||
|
||||
event_logger_provider = EventLoggerProvider(logger_provider=provider)
|
||||
set_event_logger_provider(event_logger_provider)
|
||||
|
||||
if setup_logging_handler:
|
||||
# Add OTel handler
|
||||
@@ -309,7 +322,7 @@ def _import_exporters(
|
||||
) -> tuple[
|
||||
dict[str, Type[SpanExporter]],
|
||||
dict[str, Union[Type[MetricExporter], Type[MetricReader]]],
|
||||
dict[str, Type[LogExporter]],
|
||||
dict[str, Type[LogRecordExporter]],
|
||||
]:
|
||||
trace_exporters = {}
|
||||
metric_exporters = {}
|
||||
@@ -345,7 +358,7 @@ def _import_exporters(
|
||||
) in _import_config_components(
|
||||
log_exporter_names, "opentelemetry_logs_exporter"
|
||||
):
|
||||
if issubclass(exporter_impl, LogExporter):
|
||||
if issubclass(exporter_impl, LogRecordExporter):
|
||||
log_exporters[exporter_name] = exporter_impl
|
||||
else:
|
||||
raise RuntimeError(f"{exporter_name} is not a log exporter")
|
||||
|
||||
@@ -13,26 +13,31 @@
|
||||
# limitations under the License.
|
||||
|
||||
import logging
|
||||
import warnings
|
||||
from time import time_ns
|
||||
from typing import Optional
|
||||
|
||||
from typing_extensions import deprecated
|
||||
|
||||
from opentelemetry import trace
|
||||
from opentelemetry._events import Event
|
||||
from opentelemetry._events import EventLogger as APIEventLogger
|
||||
from opentelemetry._events import EventLoggerProvider as APIEventLoggerProvider
|
||||
from opentelemetry._logs import NoOpLogger, SeverityNumber, get_logger_provider
|
||||
from opentelemetry.sdk._logs import (
|
||||
LogDeprecatedInitWarning,
|
||||
Logger,
|
||||
LoggerProvider,
|
||||
from opentelemetry._logs import (
|
||||
LogRecord,
|
||||
NoOpLogger,
|
||||
SeverityNumber,
|
||||
get_logger_provider,
|
||||
)
|
||||
from opentelemetry.sdk._logs import Logger, LoggerProvider
|
||||
from opentelemetry.util.types import _ExtendedAttributes
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@deprecated(
|
||||
"You should use `Logger` instead. "
|
||||
"Deprecated since version 1.39.0 and will be removed in a future release."
|
||||
)
|
||||
class EventLogger(APIEventLogger):
|
||||
def __init__(
|
||||
self,
|
||||
@@ -58,25 +63,24 @@ class EventLogger(APIEventLogger):
|
||||
return
|
||||
span_context = trace.get_current_span().get_span_context()
|
||||
|
||||
# silence deprecation warnings from internal users
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", category=LogDeprecatedInitWarning)
|
||||
|
||||
log_record = LogRecord(
|
||||
timestamp=event.timestamp or time_ns(),
|
||||
observed_timestamp=None,
|
||||
trace_id=event.trace_id or span_context.trace_id,
|
||||
span_id=event.span_id or span_context.span_id,
|
||||
trace_flags=event.trace_flags or span_context.trace_flags,
|
||||
severity_text=None,
|
||||
severity_number=event.severity_number or SeverityNumber.INFO,
|
||||
body=event.body,
|
||||
resource=getattr(self._logger, "resource", None),
|
||||
attributes=event.attributes,
|
||||
)
|
||||
log_record = LogRecord(
|
||||
timestamp=event.timestamp or time_ns(),
|
||||
observed_timestamp=None,
|
||||
trace_id=event.trace_id or span_context.trace_id,
|
||||
span_id=event.span_id or span_context.span_id,
|
||||
trace_flags=event.trace_flags or span_context.trace_flags,
|
||||
severity_text=None,
|
||||
severity_number=event.severity_number or SeverityNumber.INFO,
|
||||
body=event.body,
|
||||
attributes=event.attributes,
|
||||
)
|
||||
self._logger.emit(log_record)
|
||||
|
||||
|
||||
@deprecated(
|
||||
"You should use `LoggerProvider` instead. "
|
||||
"Deprecated since version 1.39.0 and will be removed in a future release."
|
||||
)
|
||||
class EventLoggerProvider(APIEventLoggerProvider):
|
||||
def __init__(self, logger_provider: Optional[LoggerProvider] = None):
|
||||
self._logger_provider = logger_provider or get_logger_provider()
|
||||
|
||||
@@ -12,27 +12,28 @@
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
|
||||
from opentelemetry.sdk._logs._internal import (
|
||||
LogData,
|
||||
LogDeprecatedInitWarning,
|
||||
LogDroppedAttributesWarning,
|
||||
Logger,
|
||||
LoggerProvider,
|
||||
LoggingHandler,
|
||||
LogLimits,
|
||||
LogRecord,
|
||||
LogRecordDroppedAttributesWarning,
|
||||
LogRecordLimits,
|
||||
LogRecordProcessor,
|
||||
ReadableLogRecord,
|
||||
ReadWriteLogRecord,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"LogData",
|
||||
"Logger",
|
||||
"LoggerProvider",
|
||||
"LoggingHandler",
|
||||
"LogLimits",
|
||||
"LogRecord",
|
||||
"LogRecordLimits",
|
||||
"LogRecordProcessor",
|
||||
"LogDeprecatedInitWarning",
|
||||
"LogDroppedAttributesWarning",
|
||||
"LogRecordDroppedAttributesWarning",
|
||||
"ReadableLogRecord",
|
||||
"ReadWriteLogRecord",
|
||||
]
|
||||
|
||||
Binary file not shown.
@@ -22,6 +22,7 @@ import logging
|
||||
import threading
|
||||
import traceback
|
||||
import warnings
|
||||
from dataclasses import dataclass, field
|
||||
from os import environ
|
||||
from threading import Lock
|
||||
from time import time_ns
|
||||
@@ -31,8 +32,8 @@ from typing_extensions import deprecated
|
||||
|
||||
from opentelemetry._logs import Logger as APILogger
|
||||
from opentelemetry._logs import LoggerProvider as APILoggerProvider
|
||||
from opentelemetry._logs import LogRecord as APILogRecord
|
||||
from opentelemetry._logs import (
|
||||
LogRecord,
|
||||
NoOpLogger,
|
||||
SeverityNumber,
|
||||
get_logger,
|
||||
@@ -54,13 +55,9 @@ from opentelemetry.semconv.attributes import exception_attributes
|
||||
from opentelemetry.trace import (
|
||||
format_span_id,
|
||||
format_trace_id,
|
||||
get_current_span,
|
||||
)
|
||||
from opentelemetry.trace.span import TraceFlags
|
||||
from opentelemetry.util.types import AnyValue, _ExtendedAttributes
|
||||
|
||||
_logger = logging.getLogger(__name__)
|
||||
|
||||
_DEFAULT_OTEL_ATTRIBUTE_COUNT_LIMIT = 128
|
||||
_ENV_VALUE_UNSET = ""
|
||||
|
||||
@@ -72,7 +69,7 @@ class BytesEncoder(json.JSONEncoder):
|
||||
return super().default(o)
|
||||
|
||||
|
||||
class LogDroppedAttributesWarning(UserWarning):
|
||||
class LogRecordDroppedAttributesWarning(UserWarning):
|
||||
"""Custom warning to indicate dropped log attributes due to limits.
|
||||
|
||||
This class is used to filter and handle these specific warnings separately
|
||||
@@ -81,22 +78,17 @@ class LogDroppedAttributesWarning(UserWarning):
|
||||
"""
|
||||
|
||||
|
||||
warnings.simplefilter("once", LogDroppedAttributesWarning)
|
||||
warnings.simplefilter("once", LogRecordDroppedAttributesWarning)
|
||||
|
||||
|
||||
class LogDeprecatedInitWarning(UserWarning):
|
||||
"""Custom warning to indicate that deprecated and soon to be deprecated Log classes was used.
|
||||
|
||||
This class is used to filter and handle these specific warnings separately
|
||||
from other warnings, ensuring that they are only shown once without
|
||||
interfering with default user warnings.
|
||||
"""
|
||||
@deprecated(
|
||||
"Use LogRecordDroppedAttributesWarning. Since logs are not stable yet this WILL be removed in future releases."
|
||||
)
|
||||
class LogDroppedAttributesWarning(LogRecordDroppedAttributesWarning):
|
||||
pass
|
||||
|
||||
|
||||
warnings.simplefilter("once", LogDeprecatedInitWarning)
|
||||
|
||||
|
||||
class LogLimits:
|
||||
class LogRecordLimits:
|
||||
"""This class is based on a SpanLimits class in the Tracing module.
|
||||
|
||||
This class represents the limits that should be enforced on recorded data such as events, links, attributes etc.
|
||||
@@ -176,202 +168,125 @@ class LogLimits:
|
||||
return value
|
||||
|
||||
|
||||
class LogRecord(APILogRecord):
|
||||
"""A LogRecord instance represents an event being logged.
|
||||
@deprecated(
|
||||
"Use LogRecordLimits. Since logs are not stable yet this WILL be removed in future releases."
|
||||
)
|
||||
class LogLimits(LogRecordLimits):
|
||||
pass
|
||||
|
||||
LogRecord instances are created and emitted via `Logger`
|
||||
every time something is logged. They contain all the information
|
||||
pertinent to the event being logged.
|
||||
"""
|
||||
|
||||
@overload
|
||||
def __init__(
|
||||
self,
|
||||
timestamp: int | None = None,
|
||||
observed_timestamp: int | None = None,
|
||||
context: Context | None = None,
|
||||
severity_text: str | None = None,
|
||||
severity_number: SeverityNumber | None = None,
|
||||
body: AnyValue | None = None,
|
||||
resource: Resource | None = None,
|
||||
attributes: _ExtendedAttributes | None = None,
|
||||
limits: LogLimits | None = None,
|
||||
event_name: str | None = None,
|
||||
): ...
|
||||
@dataclass(frozen=True)
|
||||
class ReadableLogRecord:
|
||||
"""Readable LogRecord should be kept exactly in-sync with ReadWriteLogRecord, only difference is the frozen=True param."""
|
||||
|
||||
@overload
|
||||
@deprecated(
|
||||
"LogRecord init with `trace_id`, `span_id`, and/or `trace_flags` is deprecated since 1.35.0. Use `context` instead." # noqa: E501
|
||||
)
|
||||
def __init__(
|
||||
self,
|
||||
timestamp: int | None = None,
|
||||
observed_timestamp: int | None = None,
|
||||
trace_id: int | None = None,
|
||||
span_id: int | None = None,
|
||||
trace_flags: TraceFlags | None = None,
|
||||
severity_text: str | None = None,
|
||||
severity_number: SeverityNumber | None = None,
|
||||
body: AnyValue | None = None,
|
||||
resource: Resource | None = None,
|
||||
attributes: _ExtendedAttributes | None = None,
|
||||
limits: LogLimits | None = None,
|
||||
): ...
|
||||
log_record: LogRecord
|
||||
resource: Resource
|
||||
instrumentation_scope: InstrumentationScope | None = None
|
||||
limits: LogRecordLimits | None = None
|
||||
|
||||
def __init__( # pylint:disable=too-many-locals
|
||||
self,
|
||||
timestamp: int | None = None,
|
||||
observed_timestamp: int | None = None,
|
||||
context: Context | None = None,
|
||||
trace_id: int | None = None,
|
||||
span_id: int | None = None,
|
||||
trace_flags: TraceFlags | None = None,
|
||||
severity_text: str | None = None,
|
||||
severity_number: SeverityNumber | None = None,
|
||||
body: AnyValue | None = None,
|
||||
resource: Resource | None = None,
|
||||
attributes: _ExtendedAttributes | None = None,
|
||||
limits: LogLimits | None = None,
|
||||
event_name: str | None = None,
|
||||
):
|
||||
warnings.warn(
|
||||
"LogRecord will be removed in 1.39.0 and replaced by ReadWriteLogRecord and ReadableLogRecord",
|
||||
LogDeprecatedInitWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
if not context:
|
||||
context = get_current()
|
||||
|
||||
if trace_id or span_id or trace_flags:
|
||||
warnings.warn(
|
||||
"LogRecord init with `trace_id`, `span_id`, and/or `trace_flags` is deprecated since 1.35.0. Use `context` instead.",
|
||||
LogDeprecatedInitWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
span = get_current_span(context)
|
||||
span_context = span.get_span_context()
|
||||
|
||||
# Use default LogLimits if none provided
|
||||
if limits is None:
|
||||
limits = LogLimits()
|
||||
|
||||
super().__init__(
|
||||
**{
|
||||
"timestamp": timestamp,
|
||||
"observed_timestamp": observed_timestamp,
|
||||
"context": context,
|
||||
"trace_id": trace_id or span_context.trace_id,
|
||||
"span_id": span_id or span_context.span_id,
|
||||
"trace_flags": trace_flags or span_context.trace_flags,
|
||||
"severity_text": severity_text,
|
||||
"severity_number": severity_number,
|
||||
"body": body,
|
||||
"attributes": BoundedAttributes(
|
||||
maxlen=limits.max_attributes,
|
||||
attributes=attributes if bool(attributes) else None,
|
||||
immutable=False,
|
||||
max_value_len=limits.max_attribute_length,
|
||||
extended_attributes=True,
|
||||
),
|
||||
"event_name": event_name,
|
||||
}
|
||||
)
|
||||
self.resource = (
|
||||
resource if isinstance(resource, Resource) else Resource.create({})
|
||||
)
|
||||
if self.dropped_attributes > 0:
|
||||
warnings.warn(
|
||||
"Log record attributes were dropped due to limits",
|
||||
LogDroppedAttributesWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, LogRecord):
|
||||
return NotImplemented
|
||||
return self.__dict__ == other.__dict__
|
||||
@property
|
||||
def dropped_attributes(self) -> int:
|
||||
if isinstance(self.log_record.attributes, BoundedAttributes):
|
||||
return self.log_record.attributes.dropped
|
||||
return 0
|
||||
|
||||
def to_json(self, indent: int | None = 4) -> str:
|
||||
return json.dumps(
|
||||
{
|
||||
"body": self.body,
|
||||
"severity_number": self.severity_number.value
|
||||
if self.severity_number is not None
|
||||
"body": self.log_record.body,
|
||||
"severity_number": self.log_record.severity_number.value
|
||||
if self.log_record.severity_number is not None
|
||||
else None,
|
||||
"severity_text": self.severity_text,
|
||||
"severity_text": self.log_record.severity_text,
|
||||
"attributes": (
|
||||
dict(self.attributes) if bool(self.attributes) else None
|
||||
dict(self.log_record.attributes)
|
||||
if bool(self.log_record.attributes)
|
||||
else None
|
||||
),
|
||||
"dropped_attributes": self.dropped_attributes,
|
||||
"timestamp": ns_to_iso_str(self.timestamp)
|
||||
if self.timestamp is not None
|
||||
"timestamp": ns_to_iso_str(self.log_record.timestamp)
|
||||
if self.log_record.timestamp is not None
|
||||
else None,
|
||||
"observed_timestamp": ns_to_iso_str(self.observed_timestamp),
|
||||
"observed_timestamp": ns_to_iso_str(
|
||||
self.log_record.observed_timestamp
|
||||
),
|
||||
"trace_id": (
|
||||
f"0x{format_trace_id(self.trace_id)}"
|
||||
if self.trace_id is not None
|
||||
f"0x{format_trace_id(self.log_record.trace_id)}"
|
||||
if self.log_record.trace_id is not None
|
||||
else ""
|
||||
),
|
||||
"span_id": (
|
||||
f"0x{format_span_id(self.span_id)}"
|
||||
if self.span_id is not None
|
||||
f"0x{format_span_id(self.log_record.span_id)}"
|
||||
if self.log_record.span_id is not None
|
||||
else ""
|
||||
),
|
||||
"trace_flags": self.trace_flags,
|
||||
"trace_flags": self.log_record.trace_flags,
|
||||
"resource": json.loads(self.resource.to_json()),
|
||||
"event_name": self.event_name if self.event_name else "",
|
||||
"event_name": self.log_record.event_name
|
||||
if self.log_record.event_name
|
||||
else "",
|
||||
},
|
||||
indent=indent,
|
||||
cls=BytesEncoder,
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class ReadWriteLogRecord:
|
||||
"""A ReadWriteLogRecord instance represents an event being logged.
|
||||
ReadWriteLogRecord instances are created and emitted via `Logger`
|
||||
every time something is logged. They contain all the information
|
||||
pertinent to the event being logged.
|
||||
"""
|
||||
|
||||
log_record: LogRecord
|
||||
resource: Resource | None = Resource.create({})
|
||||
instrumentation_scope: InstrumentationScope | None = None
|
||||
limits: LogRecordLimits = field(default_factory=LogRecordLimits)
|
||||
|
||||
def __post_init__(self):
|
||||
self.log_record.attributes = BoundedAttributes(
|
||||
maxlen=self.limits.max_attributes,
|
||||
attributes=self.log_record.attributes
|
||||
if self.log_record.attributes
|
||||
else None,
|
||||
immutable=False,
|
||||
max_value_len=self.limits.max_attribute_length,
|
||||
extended_attributes=True,
|
||||
)
|
||||
if self.dropped_attributes > 0:
|
||||
warnings.warn(
|
||||
"Log record attributes were dropped due to limits",
|
||||
LogRecordDroppedAttributesWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
if not isinstance(other, ReadWriteLogRecord):
|
||||
return NotImplemented
|
||||
return self.__dict__ == other.__dict__
|
||||
|
||||
@property
|
||||
def dropped_attributes(self) -> int:
|
||||
attributes: BoundedAttributes = cast(
|
||||
BoundedAttributes, self.attributes
|
||||
)
|
||||
if attributes:
|
||||
return attributes.dropped
|
||||
if isinstance(self.log_record.attributes, BoundedAttributes):
|
||||
return self.log_record.attributes.dropped
|
||||
return 0
|
||||
|
||||
@classmethod
|
||||
def _from_api_log_record(
|
||||
cls, *, record: APILogRecord, resource: Resource
|
||||
) -> LogRecord:
|
||||
cls,
|
||||
*,
|
||||
record: LogRecord,
|
||||
resource: Resource,
|
||||
instrumentation_scope: InstrumentationScope | None = None,
|
||||
) -> ReadWriteLogRecord:
|
||||
return cls(
|
||||
timestamp=record.timestamp,
|
||||
observed_timestamp=record.observed_timestamp,
|
||||
context=record.context,
|
||||
trace_id=record.trace_id,
|
||||
span_id=record.span_id,
|
||||
trace_flags=record.trace_flags,
|
||||
severity_text=record.severity_text,
|
||||
severity_number=record.severity_number,
|
||||
body=record.body,
|
||||
attributes=record.attributes,
|
||||
event_name=record.event_name,
|
||||
log_record=record,
|
||||
resource=resource,
|
||||
instrumentation_scope=instrumentation_scope,
|
||||
)
|
||||
|
||||
|
||||
class LogData:
|
||||
"""Readable LogRecord data plus associated InstrumentationLibrary."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
log_record: LogRecord,
|
||||
instrumentation_scope: InstrumentationScope,
|
||||
):
|
||||
warnings.warn(
|
||||
"LogData will be removed in 1.39.0 and replaced by ReadWriteLogRecord and ReadableLogRecord",
|
||||
LogDeprecatedInitWarning,
|
||||
stacklevel=2,
|
||||
)
|
||||
self.log_record = log_record
|
||||
self.instrumentation_scope = instrumentation_scope
|
||||
|
||||
|
||||
class LogRecordProcessor(abc.ABC):
|
||||
"""Interface to hook the log record emitting action.
|
||||
|
||||
@@ -381,15 +296,15 @@ class LogRecordProcessor(abc.ABC):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def on_emit(self, log_data: LogData):
|
||||
"""Emits the `LogData`"""
|
||||
def on_emit(self, log_record: ReadWriteLogRecord):
|
||||
"""Emits the `ReadWriteLogRecord`"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def shutdown(self):
|
||||
"""Called when a :class:`opentelemetry.sdk._logs.Logger` is shutdown"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def force_flush(self, timeout_millis: int = 30000):
|
||||
def force_flush(self, timeout_millis: int = 30000) -> bool:
|
||||
"""Export all the received logs to the configured Exporter that have not yet
|
||||
been exported.
|
||||
|
||||
@@ -425,9 +340,9 @@ class SynchronousMultiLogRecordProcessor(LogRecordProcessor):
|
||||
with self._lock:
|
||||
self._log_record_processors += (log_record_processor,)
|
||||
|
||||
def on_emit(self, log_data: LogData) -> None:
|
||||
def on_emit(self, log_record: ReadWriteLogRecord) -> None:
|
||||
for lp in self._log_record_processors:
|
||||
lp.on_emit(log_data)
|
||||
lp.on_emit(log_record)
|
||||
|
||||
def shutdown(self) -> None:
|
||||
"""Shutdown the log processors one by one"""
|
||||
@@ -499,8 +414,8 @@ class ConcurrentMultiLogRecordProcessor(LogRecordProcessor):
|
||||
for future in futures:
|
||||
future.result()
|
||||
|
||||
def on_emit(self, log_data: LogData):
|
||||
self._submit_and_wait(lambda lp: lp.on_emit, log_data)
|
||||
def on_emit(self, log_record: ReadWriteLogRecord):
|
||||
self._submit_and_wait(lambda lp: lp.on_emit, log_record)
|
||||
|
||||
def shutdown(self):
|
||||
self._submit_and_wait(lambda lp: lp.shutdown)
|
||||
@@ -575,8 +490,8 @@ class LoggingHandler(logging.Handler):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
level=logging.NOTSET,
|
||||
logger_provider=None,
|
||||
level: int = logging.NOTSET,
|
||||
logger_provider: APILoggerProvider | None = None,
|
||||
) -> None:
|
||||
super().__init__(level=level)
|
||||
self._logger_provider = logger_provider or get_logger_provider()
|
||||
@@ -609,7 +524,7 @@ class LoggingHandler(logging.Handler):
|
||||
)
|
||||
return attributes
|
||||
|
||||
def _translate(self, record: logging.LogRecord) -> dict:
|
||||
def _translate(self, record: logging.LogRecord) -> LogRecord:
|
||||
timestamp = int(record.created * 1e9)
|
||||
observered_timestamp = time_ns()
|
||||
attributes = self._get_attributes(record)
|
||||
@@ -643,15 +558,15 @@ class LoggingHandler(logging.Handler):
|
||||
"WARN" if record.levelname == "WARNING" else record.levelname
|
||||
)
|
||||
|
||||
return {
|
||||
"timestamp": timestamp,
|
||||
"observed_timestamp": observered_timestamp,
|
||||
"context": get_current() or None,
|
||||
"severity_text": level_name,
|
||||
"severity_number": severity_number,
|
||||
"body": body,
|
||||
"attributes": attributes,
|
||||
}
|
||||
return LogRecord(
|
||||
timestamp=timestamp,
|
||||
observed_timestamp=observered_timestamp,
|
||||
context=get_current() or None,
|
||||
severity_text=level_name,
|
||||
severity_number=severity_number,
|
||||
body=body,
|
||||
attributes=attributes,
|
||||
)
|
||||
|
||||
def emit(self, record: logging.LogRecord) -> None:
|
||||
"""
|
||||
@@ -661,18 +576,18 @@ class LoggingHandler(logging.Handler):
|
||||
"""
|
||||
logger = get_logger(record.name, logger_provider=self._logger_provider)
|
||||
if not isinstance(logger, NoOpLogger):
|
||||
logger.emit(**self._translate(record))
|
||||
logger.emit(self._translate(record))
|
||||
|
||||
def flush(self) -> None:
|
||||
"""
|
||||
Flushes the logging output. Skip flushing if logging_provider has no force_flush method.
|
||||
"""
|
||||
if hasattr(self._logger_provider, "force_flush") and callable(
|
||||
self._logger_provider.force_flush
|
||||
self._logger_provider.force_flush # type: ignore[reportAttributeAccessIssue]
|
||||
):
|
||||
# This is done in a separate thread to avoid a potential deadlock, for
|
||||
# details see https://github.com/open-telemetry/opentelemetry-python/pull/4636.
|
||||
thread = threading.Thread(target=self._logger_provider.force_flush)
|
||||
thread = threading.Thread(target=self._logger_provider.force_flush) # type: ignore[reportAttributeAccessIssue]
|
||||
thread.start()
|
||||
|
||||
|
||||
@@ -700,9 +615,10 @@ class Logger(APILogger):
|
||||
def resource(self):
|
||||
return self._resource
|
||||
|
||||
@overload
|
||||
# pylint: disable=arguments-differ
|
||||
def emit(
|
||||
self,
|
||||
record: LogRecord | None = None,
|
||||
*,
|
||||
timestamp: int | None = None,
|
||||
observed_timestamp: int | None = None,
|
||||
@@ -712,55 +628,41 @@ class Logger(APILogger):
|
||||
body: AnyValue | None = None,
|
||||
attributes: _ExtendedAttributes | None = None,
|
||||
event_name: str | None = None,
|
||||
) -> None: ...
|
||||
|
||||
@overload
|
||||
def emit( # pylint:disable=arguments-differ
|
||||
self,
|
||||
record: APILogRecord,
|
||||
) -> None: ...
|
||||
|
||||
def emit(
|
||||
self,
|
||||
record: APILogRecord | None = None,
|
||||
*,
|
||||
timestamp: int | None = None,
|
||||
observed_timestamp: int | None = None,
|
||||
context: Context | None = None,
|
||||
severity_text: str | None = None,
|
||||
severity_number: SeverityNumber | None = None,
|
||||
body: AnyValue | None = None,
|
||||
attributes: _ExtendedAttributes | None = None,
|
||||
event_name: str | None = None,
|
||||
):
|
||||
"""Emits the :class:`LogData` by associating :class:`LogRecord`
|
||||
and instrumentation info.
|
||||
) -> None:
|
||||
"""Emits the :class:`ReadWriteLogRecord` by setting instrumentation scope
|
||||
and forwarding to the processor.
|
||||
"""
|
||||
|
||||
# silence deprecation warnings from internal users
|
||||
with warnings.catch_warnings():
|
||||
warnings.simplefilter("ignore", category=LogDeprecatedInitWarning)
|
||||
if not record:
|
||||
record = LogRecord(
|
||||
timestamp=timestamp,
|
||||
observed_timestamp=observed_timestamp,
|
||||
context=context,
|
||||
severity_text=severity_text,
|
||||
severity_number=severity_number,
|
||||
body=body,
|
||||
attributes=attributes,
|
||||
event_name=event_name,
|
||||
resource=self._resource,
|
||||
)
|
||||
elif not isinstance(record, LogRecord):
|
||||
# If a record is provided, use it directly
|
||||
if record is not None:
|
||||
if not isinstance(record, ReadWriteLogRecord):
|
||||
# pylint:disable=protected-access
|
||||
record = LogRecord._from_api_log_record(
|
||||
record=record, resource=self._resource
|
||||
writable_record = ReadWriteLogRecord._from_api_log_record(
|
||||
record=record,
|
||||
resource=self._resource,
|
||||
instrumentation_scope=self._instrumentation_scope,
|
||||
)
|
||||
else:
|
||||
writable_record = record
|
||||
else:
|
||||
# Create a record from individual parameters
|
||||
log_record = LogRecord(
|
||||
timestamp=timestamp,
|
||||
observed_timestamp=observed_timestamp,
|
||||
context=context,
|
||||
severity_number=severity_number,
|
||||
severity_text=severity_text,
|
||||
body=body,
|
||||
attributes=attributes,
|
||||
event_name=event_name,
|
||||
)
|
||||
# pylint:disable=protected-access
|
||||
writable_record = ReadWriteLogRecord._from_api_log_record(
|
||||
record=log_record,
|
||||
resource=self._resource,
|
||||
instrumentation_scope=self._instrumentation_scope,
|
||||
)
|
||||
|
||||
log_data = LogData(record, self._instrumentation_scope)
|
||||
|
||||
self._multi_log_record_processor.on_emit(log_data)
|
||||
self._multi_log_record_processor.on_emit(writable_record)
|
||||
|
||||
|
||||
class LoggerProvider(APILoggerProvider):
|
||||
@@ -831,7 +733,7 @@ class LoggerProvider(APILoggerProvider):
|
||||
version: str | None = None,
|
||||
schema_url: str | None = None,
|
||||
attributes: _ExtendedAttributes | None = None,
|
||||
) -> Logger:
|
||||
) -> APILogger:
|
||||
if self._disabled:
|
||||
return NoOpLogger(
|
||||
name,
|
||||
|
||||
Binary file not shown.
@@ -20,6 +20,8 @@ import sys
|
||||
from os import environ, linesep
|
||||
from typing import IO, Callable, Optional, Sequence
|
||||
|
||||
from typing_extensions import deprecated
|
||||
|
||||
from opentelemetry.context import (
|
||||
_SUPPRESS_INSTRUMENTATION_KEY,
|
||||
attach,
|
||||
@@ -27,9 +29,9 @@ from opentelemetry.context import (
|
||||
set_value,
|
||||
)
|
||||
from opentelemetry.sdk._logs import (
|
||||
LogData,
|
||||
LogRecord,
|
||||
LogRecordProcessor,
|
||||
ReadableLogRecord,
|
||||
ReadWriteLogRecord,
|
||||
)
|
||||
from opentelemetry.sdk._shared_internal import BatchProcessor, DuplicateFilter
|
||||
from opentelemetry.sdk.environment_variables import (
|
||||
@@ -38,6 +40,7 @@ from opentelemetry.sdk.environment_variables import (
|
||||
OTEL_BLRP_MAX_QUEUE_SIZE,
|
||||
OTEL_BLRP_SCHEDULE_DELAY,
|
||||
)
|
||||
from opentelemetry.sdk.resources import Resource
|
||||
|
||||
_DEFAULT_SCHEDULE_DELAY_MILLIS = 5000
|
||||
_DEFAULT_MAX_EXPORT_BATCH_SIZE = 512
|
||||
@@ -50,12 +53,20 @@ _logger = logging.getLogger(__name__)
|
||||
_logger.addFilter(DuplicateFilter())
|
||||
|
||||
|
||||
class LogRecordExportResult(enum.Enum):
|
||||
SUCCESS = 0
|
||||
FAILURE = 1
|
||||
|
||||
|
||||
@deprecated(
|
||||
"Use LogRecordExportResult. Since logs are not stable yet this WILL be removed in future releases."
|
||||
)
|
||||
class LogExportResult(enum.Enum):
|
||||
SUCCESS = 0
|
||||
FAILURE = 1
|
||||
|
||||
|
||||
class LogExporter(abc.ABC):
|
||||
class LogRecordExporter(abc.ABC):
|
||||
"""Interface for exporting logs.
|
||||
Interface to be implemented by services that want to export logs received
|
||||
in their own format.
|
||||
@@ -64,10 +75,12 @@ class LogExporter(abc.ABC):
|
||||
"""
|
||||
|
||||
@abc.abstractmethod
|
||||
def export(self, batch: Sequence[LogData]):
|
||||
def export(
|
||||
self, batch: Sequence[ReadableLogRecord]
|
||||
) -> LogRecordExportResult:
|
||||
"""Exports a batch of logs.
|
||||
Args:
|
||||
batch: The list of `LogData` objects to be exported
|
||||
batch: The list of `ReadableLogRecord` objects to be exported
|
||||
Returns:
|
||||
The result of the export
|
||||
"""
|
||||
@@ -80,8 +93,15 @@ class LogExporter(abc.ABC):
|
||||
"""
|
||||
|
||||
|
||||
class ConsoleLogExporter(LogExporter):
|
||||
"""Implementation of :class:`LogExporter` that prints log records to the
|
||||
@deprecated(
|
||||
"Use LogRecordExporter. Since logs are not stable yet this WILL be removed in future releases."
|
||||
)
|
||||
class LogExporter(LogRecordExporter):
|
||||
pass
|
||||
|
||||
|
||||
class ConsoleLogRecordExporter(LogRecordExporter):
|
||||
"""Implementation of :class:`LogRecordExporter` that prints log records to the
|
||||
console.
|
||||
|
||||
This class can be used for diagnostic purposes. It prints the exported
|
||||
@@ -91,39 +111,59 @@ class ConsoleLogExporter(LogExporter):
|
||||
def __init__(
|
||||
self,
|
||||
out: IO = sys.stdout,
|
||||
formatter: Callable[[LogRecord], str] = lambda record: record.to_json()
|
||||
+ linesep,
|
||||
formatter: Callable[
|
||||
[ReadableLogRecord], str
|
||||
] = lambda record: record.to_json() + linesep,
|
||||
):
|
||||
self.out = out
|
||||
self.formatter = formatter
|
||||
|
||||
def export(self, batch: Sequence[LogData]):
|
||||
for data in batch:
|
||||
self.out.write(self.formatter(data.log_record))
|
||||
def export(self, batch: Sequence[ReadableLogRecord]):
|
||||
for log_record in batch:
|
||||
self.out.write(self.formatter(log_record))
|
||||
self.out.flush()
|
||||
return LogExportResult.SUCCESS
|
||||
return LogRecordExportResult.SUCCESS
|
||||
|
||||
def shutdown(self):
|
||||
pass
|
||||
|
||||
|
||||
@deprecated(
|
||||
"Use ConsoleLogRecordExporter. Since logs are not stable yet this WILL be removed in future releases."
|
||||
)
|
||||
class ConsoleLogExporter(ConsoleLogRecordExporter):
|
||||
pass
|
||||
|
||||
|
||||
class SimpleLogRecordProcessor(LogRecordProcessor):
|
||||
"""This is an implementation of LogRecordProcessor which passes
|
||||
received logs in the export-friendly LogData representation to the
|
||||
configured LogExporter, as soon as they are emitted.
|
||||
received logs directly to the configured LogRecordExporter, as soon as they are emitted.
|
||||
"""
|
||||
|
||||
def __init__(self, exporter: LogExporter):
|
||||
def __init__(self, exporter: LogRecordExporter):
|
||||
self._exporter = exporter
|
||||
self._shutdown = False
|
||||
|
||||
def on_emit(self, log_data: LogData):
|
||||
def on_emit(self, log_record: ReadWriteLogRecord):
|
||||
if self._shutdown:
|
||||
_logger.warning("Processor is already shutdown, ignoring call")
|
||||
return
|
||||
token = attach(set_value(_SUPPRESS_INSTRUMENTATION_KEY, True))
|
||||
try:
|
||||
self._exporter.export((log_data,))
|
||||
# Convert ReadWriteLogRecord to ReadableLogRecord before exporting
|
||||
# Note: resource should not be None at this point as it's set during Logger.emit()
|
||||
resource = (
|
||||
log_record.resource
|
||||
if log_record.resource is not None
|
||||
else Resource.create({})
|
||||
)
|
||||
readable_log_record = ReadableLogRecord(
|
||||
log_record=log_record.log_record,
|
||||
resource=resource,
|
||||
instrumentation_scope=log_record.instrumentation_scope,
|
||||
limits=log_record.limits,
|
||||
)
|
||||
self._exporter.export((readable_log_record,))
|
||||
except Exception: # pylint: disable=broad-exception-caught
|
||||
_logger.exception("Exception while exporting logs.")
|
||||
detach(token)
|
||||
@@ -138,8 +178,7 @@ class SimpleLogRecordProcessor(LogRecordProcessor):
|
||||
|
||||
class BatchLogRecordProcessor(LogRecordProcessor):
|
||||
"""This is an implementation of LogRecordProcessor which creates batches of
|
||||
received logs in the export-friendly LogData representation and
|
||||
send to the configured LogExporter, as soon as they are emitted.
|
||||
received logs and sends them to the configured LogRecordExporter.
|
||||
|
||||
`BatchLogRecordProcessor` is configurable with the following environment
|
||||
variables which correspond to constructor parameters:
|
||||
@@ -154,7 +193,7 @@ class BatchLogRecordProcessor(LogRecordProcessor):
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
exporter: LogExporter,
|
||||
exporter: LogRecordExporter,
|
||||
schedule_delay_millis: float | None = None,
|
||||
max_export_batch_size: int | None = None,
|
||||
export_timeout_millis: float | None = None,
|
||||
@@ -191,8 +230,21 @@ class BatchLogRecordProcessor(LogRecordProcessor):
|
||||
"Log",
|
||||
)
|
||||
|
||||
def on_emit(self, log_data: LogData) -> None:
|
||||
return self._batch_processor.emit(log_data)
|
||||
def on_emit(self, log_record: ReadWriteLogRecord) -> None:
|
||||
# Convert ReadWriteLogRecord to ReadableLogRecord before passing to BatchProcessor
|
||||
# Note: resource should not be None at this point as it's set during Logger.emit()
|
||||
resource = (
|
||||
log_record.resource
|
||||
if log_record.resource is not None
|
||||
else Resource.create({})
|
||||
)
|
||||
readable_log_record = ReadableLogRecord(
|
||||
log_record=log_record.log_record,
|
||||
resource=resource,
|
||||
instrumentation_scope=log_record.instrumentation_scope,
|
||||
limits=log_record.limits,
|
||||
)
|
||||
return self._batch_processor.emit(readable_log_record)
|
||||
|
||||
def shutdown(self):
|
||||
return self._batch_processor.shutdown()
|
||||
|
||||
Binary file not shown.
Binary file not shown.
@@ -15,12 +15,17 @@
|
||||
import threading
|
||||
import typing
|
||||
|
||||
from opentelemetry.sdk._logs import LogData
|
||||
from opentelemetry.sdk._logs.export import LogExporter, LogExportResult
|
||||
from typing_extensions import deprecated
|
||||
|
||||
from opentelemetry.sdk._logs import ReadableLogRecord
|
||||
from opentelemetry.sdk._logs.export import (
|
||||
LogRecordExporter,
|
||||
LogRecordExportResult,
|
||||
)
|
||||
|
||||
|
||||
class InMemoryLogExporter(LogExporter):
|
||||
"""Implementation of :class:`.LogExporter` that stores logs in memory.
|
||||
class InMemoryLogRecordExporter(LogRecordExporter):
|
||||
"""Implementation of :class:`.LogRecordExporter` that stores logs in memory.
|
||||
|
||||
This class can be used for testing purposes. It stores the exported logs
|
||||
in a list in memory that can be retrieved using the
|
||||
@@ -36,16 +41,25 @@ class InMemoryLogExporter(LogExporter):
|
||||
with self._lock:
|
||||
self._logs.clear()
|
||||
|
||||
def get_finished_logs(self) -> typing.Tuple[LogData, ...]:
|
||||
def get_finished_logs(self) -> typing.Tuple[ReadableLogRecord, ...]:
|
||||
with self._lock:
|
||||
return tuple(self._logs)
|
||||
|
||||
def export(self, batch: typing.Sequence[LogData]) -> LogExportResult:
|
||||
def export(
|
||||
self, batch: typing.Sequence[ReadableLogRecord]
|
||||
) -> LogRecordExportResult:
|
||||
if self._stopped:
|
||||
return LogExportResult.FAILURE
|
||||
return LogRecordExportResult.FAILURE
|
||||
with self._lock:
|
||||
self._logs.extend(batch)
|
||||
return LogExportResult.SUCCESS
|
||||
return LogRecordExportResult.SUCCESS
|
||||
|
||||
def shutdown(self) -> None:
|
||||
self._stopped = True
|
||||
|
||||
|
||||
@deprecated(
|
||||
"Use InMemoryLogRecordExporter. Since logs are not stable yet this WILL be removed in future releases."
|
||||
)
|
||||
class InMemoryLogExporter(InMemoryLogRecordExporter):
|
||||
pass
|
||||
|
||||
@@ -15,21 +15,29 @@
|
||||
from opentelemetry.sdk._logs._internal.export import (
|
||||
BatchLogRecordProcessor,
|
||||
ConsoleLogExporter,
|
||||
ConsoleLogRecordExporter,
|
||||
LogExporter,
|
||||
LogExportResult,
|
||||
LogRecordExporter,
|
||||
LogRecordExportResult,
|
||||
SimpleLogRecordProcessor,
|
||||
)
|
||||
|
||||
# The point module is not in the export directory to avoid a circular import.
|
||||
from opentelemetry.sdk._logs._internal.export.in_memory_log_exporter import (
|
||||
InMemoryLogExporter,
|
||||
InMemoryLogRecordExporter,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"BatchLogRecordProcessor",
|
||||
"ConsoleLogExporter",
|
||||
"ConsoleLogRecordExporter",
|
||||
"LogExporter",
|
||||
"LogRecordExporter",
|
||||
"LogExportResult",
|
||||
"LogRecordExportResult",
|
||||
"SimpleLogRecordProcessor",
|
||||
"InMemoryLogExporter",
|
||||
"InMemoryLogRecordExporter",
|
||||
]
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user