chore: 添加虚拟环境到仓库
- 添加 backend_service/venv 虚拟环境 - 包含所有Python依赖包 - 注意:虚拟环境约393MB,包含12655个文件
This commit is contained in:
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: opentelemetry/proto/collector/logs/v1/logs_service.proto
|
||||
# Protobuf Python Version: 5.26.1
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf.internal import builder as _builder
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from opentelemetry.proto.logs.v1 import logs_pb2 as opentelemetry_dot_proto_dot_logs_dot_v1_dot_logs__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n8opentelemetry/proto/collector/logs/v1/logs_service.proto\x12%opentelemetry.proto.collector.logs.v1\x1a&opentelemetry/proto/logs/v1/logs.proto\"\\\n\x18\x45xportLogsServiceRequest\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs\"u\n\x19\x45xportLogsServiceResponse\x12X\n\x0fpartial_success\x18\x01 \x01(\x0b\x32?.opentelemetry.proto.collector.logs.v1.ExportLogsPartialSuccess\"O\n\x18\x45xportLogsPartialSuccess\x12\x1c\n\x14rejected_log_records\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\x06\x45xport\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse\"\x00\x42\x98\x01\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServiceProtoP\x01Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\xaa\x02%OpenTelemetry.Proto.Collector.Logs.V1b\x06proto3')
|
||||
|
||||
_globals = globals()
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.collector.logs.v1.logs_service_pb2', _globals)
|
||||
if not _descriptor._USE_C_DESCRIPTORS:
|
||||
_globals['DESCRIPTOR']._loaded_options = None
|
||||
_globals['DESCRIPTOR']._serialized_options = b'\n(io.opentelemetry.proto.collector.logs.v1B\020LogsServiceProtoP\001Z0go.opentelemetry.io/proto/otlp/collector/logs/v1\252\002%OpenTelemetry.Proto.Collector.Logs.V1'
|
||||
_globals['_EXPORTLOGSSERVICEREQUEST']._serialized_start=139
|
||||
_globals['_EXPORTLOGSSERVICEREQUEST']._serialized_end=231
|
||||
_globals['_EXPORTLOGSSERVICERESPONSE']._serialized_start=233
|
||||
_globals['_EXPORTLOGSSERVICERESPONSE']._serialized_end=350
|
||||
_globals['_EXPORTLOGSPARTIALSUCCESS']._serialized_start=352
|
||||
_globals['_EXPORTLOGSPARTIALSUCCESS']._serialized_end=431
|
||||
_globals['_LOGSSERVICE']._serialized_start=434
|
||||
_globals['_LOGSSERVICE']._serialized_end=591
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
@@ -0,0 +1,117 @@
|
||||
"""
|
||||
@generated by mypy-protobuf. Do not edit manually!
|
||||
isort:skip_file
|
||||
Copyright 2020, OpenTelemetry Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import builtins
|
||||
import collections.abc
|
||||
import google.protobuf.descriptor
|
||||
import google.protobuf.internal.containers
|
||||
import google.protobuf.message
|
||||
import opentelemetry.proto.logs.v1.logs_pb2
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
import typing as typing_extensions
|
||||
else:
|
||||
import typing_extensions
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
||||
|
||||
@typing_extensions.final
|
||||
class ExportLogsServiceRequest(google.protobuf.message.Message):
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
RESOURCE_LOGS_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def resource_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs]:
|
||||
"""An array of ResourceLogs.
|
||||
For data coming from a single resource this array will typically contain one
|
||||
element. Intermediary nodes (such as OpenTelemetry Collector) that receive
|
||||
data from multiple origins typically batch the data before forwarding further and
|
||||
in that case this array will contain multiple elements.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
resource_logs: collections.abc.Iterable[opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs] | None = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["resource_logs", b"resource_logs"]) -> None: ...
|
||||
|
||||
global___ExportLogsServiceRequest = ExportLogsServiceRequest
|
||||
|
||||
@typing_extensions.final
|
||||
class ExportLogsServiceResponse(google.protobuf.message.Message):
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def partial_success(self) -> global___ExportLogsPartialSuccess:
|
||||
"""The details of a partially successful export request.
|
||||
|
||||
If the request is only partially accepted
|
||||
(i.e. when the server accepts only parts of the data and rejects the rest)
|
||||
the server MUST initialize the `partial_success` field and MUST
|
||||
set the `rejected_<signal>` with the number of items it rejected.
|
||||
|
||||
Servers MAY also make use of the `partial_success` field to convey
|
||||
warnings/suggestions to senders even when the request was fully accepted.
|
||||
In such cases, the `rejected_<signal>` MUST have a value of `0` and
|
||||
the `error_message` MUST be non-empty.
|
||||
|
||||
A `partial_success` message with an empty value (rejected_<signal> = 0 and
|
||||
`error_message` = "") is equivalent to it not being set/present. Senders
|
||||
SHOULD interpret it the same way as in the full success case.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
partial_success: global___ExportLogsPartialSuccess | None = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> None: ...
|
||||
|
||||
global___ExportLogsServiceResponse = ExportLogsServiceResponse
|
||||
|
||||
@typing_extensions.final
|
||||
class ExportLogsPartialSuccess(google.protobuf.message.Message):
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
REJECTED_LOG_RECORDS_FIELD_NUMBER: builtins.int
|
||||
ERROR_MESSAGE_FIELD_NUMBER: builtins.int
|
||||
rejected_log_records: builtins.int
|
||||
"""The number of rejected log records.
|
||||
|
||||
A `rejected_<signal>` field holding a `0` value indicates that the
|
||||
request was fully accepted.
|
||||
"""
|
||||
error_message: builtins.str
|
||||
"""A developer-facing human-readable message in English. It should be used
|
||||
either to explain why the server rejected parts of the data during a partial
|
||||
success or to convey warnings/suggestions during a full success. The message
|
||||
should offer guidance on how users can address such issues.
|
||||
|
||||
error_message is an optional field. An error_message with an empty value
|
||||
is equivalent to it not being set.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
rejected_log_records: builtins.int = ...,
|
||||
error_message: builtins.str = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["error_message", b"error_message", "rejected_log_records", b"rejected_log_records"]) -> None: ...
|
||||
|
||||
global___ExportLogsPartialSuccess = ExportLogsPartialSuccess
|
||||
@@ -0,0 +1,110 @@
|
||||
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
||||
"""Client and server classes corresponding to protobuf-defined services."""
|
||||
import grpc
|
||||
import warnings
|
||||
|
||||
from opentelemetry.proto.collector.logs.v1 import logs_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2
|
||||
|
||||
GRPC_GENERATED_VERSION = '1.63.2'
|
||||
GRPC_VERSION = grpc.__version__
|
||||
EXPECTED_ERROR_RELEASE = '1.65.0'
|
||||
SCHEDULED_RELEASE_DATE = 'June 25, 2024'
|
||||
_version_not_supported = False
|
||||
|
||||
try:
|
||||
from grpc._utilities import first_version_is_lower
|
||||
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
||||
except ImportError:
|
||||
_version_not_supported = True
|
||||
|
||||
if _version_not_supported:
|
||||
warnings.warn(
|
||||
f'The grpc package installed is at version {GRPC_VERSION},'
|
||||
+ f' but the generated code in opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py depends on'
|
||||
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
||||
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
||||
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
||||
+ f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
|
||||
+ f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
|
||||
RuntimeWarning
|
||||
)
|
||||
|
||||
|
||||
class LogsServiceStub(object):
|
||||
"""Service that can be used to push logs between one Application instrumented with
|
||||
OpenTelemetry and an collector, or between an collector and a central collector (in this
|
||||
case logs are sent/received to/from multiple Applications).
|
||||
"""
|
||||
|
||||
def __init__(self, channel):
|
||||
"""Constructor.
|
||||
|
||||
Args:
|
||||
channel: A grpc.Channel.
|
||||
"""
|
||||
self.Export = channel.unary_unary(
|
||||
'/opentelemetry.proto.collector.logs.v1.LogsService/Export',
|
||||
request_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString,
|
||||
response_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString,
|
||||
_registered_method=True)
|
||||
|
||||
|
||||
class LogsServiceServicer(object):
|
||||
"""Service that can be used to push logs between one Application instrumented with
|
||||
OpenTelemetry and an collector, or between an collector and a central collector (in this
|
||||
case logs are sent/received to/from multiple Applications).
|
||||
"""
|
||||
|
||||
def Export(self, request, context):
|
||||
"""Missing associated documentation comment in .proto file."""
|
||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||
context.set_details('Method not implemented!')
|
||||
raise NotImplementedError('Method not implemented!')
|
||||
|
||||
|
||||
def add_LogsServiceServicer_to_server(servicer, server):
|
||||
rpc_method_handlers = {
|
||||
'Export': grpc.unary_unary_rpc_method_handler(
|
||||
servicer.Export,
|
||||
request_deserializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.FromString,
|
||||
response_serializer=opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.SerializeToString,
|
||||
),
|
||||
}
|
||||
generic_handler = grpc.method_handlers_generic_handler(
|
||||
'opentelemetry.proto.collector.logs.v1.LogsService', rpc_method_handlers)
|
||||
server.add_generic_rpc_handlers((generic_handler,))
|
||||
|
||||
|
||||
# This class is part of an EXPERIMENTAL API.
|
||||
class LogsService(object):
|
||||
"""Service that can be used to push logs between one Application instrumented with
|
||||
OpenTelemetry and an collector, or between an collector and a central collector (in this
|
||||
case logs are sent/received to/from multiple Applications).
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def Export(request,
|
||||
target,
|
||||
options=(),
|
||||
channel_credentials=None,
|
||||
call_credentials=None,
|
||||
insecure=False,
|
||||
compression=None,
|
||||
wait_for_ready=None,
|
||||
timeout=None,
|
||||
metadata=None):
|
||||
return grpc.experimental.unary_unary(
|
||||
request,
|
||||
target,
|
||||
'/opentelemetry.proto.collector.logs.v1.LogsService/Export',
|
||||
opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString,
|
||||
opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString,
|
||||
options,
|
||||
channel_credentials,
|
||||
insecure,
|
||||
call_credentials,
|
||||
compression,
|
||||
wait_for_ready,
|
||||
timeout,
|
||||
metadata,
|
||||
_registered_method=True)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: opentelemetry/proto/collector/metrics/v1/metrics_service.proto
|
||||
# Protobuf Python Version: 5.26.1
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf.internal import builder as _builder
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from opentelemetry.proto.metrics.v1 import metrics_pb2 as opentelemetry_dot_proto_dot_metrics_dot_v1_dot_metrics__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n>opentelemetry/proto/collector/metrics/v1/metrics_service.proto\x12(opentelemetry.proto.collector.metrics.v1\x1a,opentelemetry/proto/metrics/v1/metrics.proto\"h\n\x1b\x45xportMetricsServiceRequest\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics\"~\n\x1c\x45xportMetricsServiceResponse\x12^\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsPartialSuccess\"R\n\x1b\x45xportMetricsPartialSuccess\x12\x1c\n\x14rejected_data_points\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xac\x01\n\x0eMetricsService\x12\x99\x01\n\x06\x45xport\x12\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest\x1a\x46.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceResponse\"\x00\x42\xa4\x01\n+io.opentelemetry.proto.collector.metrics.v1B\x13MetricsServiceProtoP\x01Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\xaa\x02(OpenTelemetry.Proto.Collector.Metrics.V1b\x06proto3')
|
||||
|
||||
_globals = globals()
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.collector.metrics.v1.metrics_service_pb2', _globals)
|
||||
if not _descriptor._USE_C_DESCRIPTORS:
|
||||
_globals['DESCRIPTOR']._loaded_options = None
|
||||
_globals['DESCRIPTOR']._serialized_options = b'\n+io.opentelemetry.proto.collector.metrics.v1B\023MetricsServiceProtoP\001Z3go.opentelemetry.io/proto/otlp/collector/metrics/v1\252\002(OpenTelemetry.Proto.Collector.Metrics.V1'
|
||||
_globals['_EXPORTMETRICSSERVICEREQUEST']._serialized_start=154
|
||||
_globals['_EXPORTMETRICSSERVICEREQUEST']._serialized_end=258
|
||||
_globals['_EXPORTMETRICSSERVICERESPONSE']._serialized_start=260
|
||||
_globals['_EXPORTMETRICSSERVICERESPONSE']._serialized_end=386
|
||||
_globals['_EXPORTMETRICSPARTIALSUCCESS']._serialized_start=388
|
||||
_globals['_EXPORTMETRICSPARTIALSUCCESS']._serialized_end=470
|
||||
_globals['_METRICSSERVICE']._serialized_start=473
|
||||
_globals['_METRICSSERVICE']._serialized_end=645
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
@@ -0,0 +1,117 @@
|
||||
"""
|
||||
@generated by mypy-protobuf. Do not edit manually!
|
||||
isort:skip_file
|
||||
Copyright 2019, OpenTelemetry Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import builtins
|
||||
import collections.abc
|
||||
import google.protobuf.descriptor
|
||||
import google.protobuf.internal.containers
|
||||
import google.protobuf.message
|
||||
import opentelemetry.proto.metrics.v1.metrics_pb2
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
import typing as typing_extensions
|
||||
else:
|
||||
import typing_extensions
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
||||
|
||||
@typing_extensions.final
|
||||
class ExportMetricsServiceRequest(google.protobuf.message.Message):
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
RESOURCE_METRICS_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def resource_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics]:
|
||||
"""An array of ResourceMetrics.
|
||||
For data coming from a single resource this array will typically contain one
|
||||
element. Intermediary nodes (such as OpenTelemetry Collector) that receive
|
||||
data from multiple origins typically batch the data before forwarding further and
|
||||
in that case this array will contain multiple elements.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
resource_metrics: collections.abc.Iterable[opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics] | None = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["resource_metrics", b"resource_metrics"]) -> None: ...
|
||||
|
||||
global___ExportMetricsServiceRequest = ExportMetricsServiceRequest
|
||||
|
||||
@typing_extensions.final
|
||||
class ExportMetricsServiceResponse(google.protobuf.message.Message):
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def partial_success(self) -> global___ExportMetricsPartialSuccess:
|
||||
"""The details of a partially successful export request.
|
||||
|
||||
If the request is only partially accepted
|
||||
(i.e. when the server accepts only parts of the data and rejects the rest)
|
||||
the server MUST initialize the `partial_success` field and MUST
|
||||
set the `rejected_<signal>` with the number of items it rejected.
|
||||
|
||||
Servers MAY also make use of the `partial_success` field to convey
|
||||
warnings/suggestions to senders even when the request was fully accepted.
|
||||
In such cases, the `rejected_<signal>` MUST have a value of `0` and
|
||||
the `error_message` MUST be non-empty.
|
||||
|
||||
A `partial_success` message with an empty value (rejected_<signal> = 0 and
|
||||
`error_message` = "") is equivalent to it not being set/present. Senders
|
||||
SHOULD interpret it the same way as in the full success case.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
partial_success: global___ExportMetricsPartialSuccess | None = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> None: ...
|
||||
|
||||
global___ExportMetricsServiceResponse = ExportMetricsServiceResponse
|
||||
|
||||
@typing_extensions.final
|
||||
class ExportMetricsPartialSuccess(google.protobuf.message.Message):
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
REJECTED_DATA_POINTS_FIELD_NUMBER: builtins.int
|
||||
ERROR_MESSAGE_FIELD_NUMBER: builtins.int
|
||||
rejected_data_points: builtins.int
|
||||
"""The number of rejected data points.
|
||||
|
||||
A `rejected_<signal>` field holding a `0` value indicates that the
|
||||
request was fully accepted.
|
||||
"""
|
||||
error_message: builtins.str
|
||||
"""A developer-facing human-readable message in English. It should be used
|
||||
either to explain why the server rejected parts of the data during a partial
|
||||
success or to convey warnings/suggestions during a full success. The message
|
||||
should offer guidance on how users can address such issues.
|
||||
|
||||
error_message is an optional field. An error_message with an empty value
|
||||
is equivalent to it not being set.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
rejected_data_points: builtins.int = ...,
|
||||
error_message: builtins.str = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["error_message", b"error_message", "rejected_data_points", b"rejected_data_points"]) -> None: ...
|
||||
|
||||
global___ExportMetricsPartialSuccess = ExportMetricsPartialSuccess
|
||||
@@ -0,0 +1,110 @@
|
||||
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
||||
"""Client and server classes corresponding to protobuf-defined services."""
|
||||
import grpc
|
||||
import warnings
|
||||
|
||||
from opentelemetry.proto.collector.metrics.v1 import metrics_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2
|
||||
|
||||
GRPC_GENERATED_VERSION = '1.63.2'
|
||||
GRPC_VERSION = grpc.__version__
|
||||
EXPECTED_ERROR_RELEASE = '1.65.0'
|
||||
SCHEDULED_RELEASE_DATE = 'June 25, 2024'
|
||||
_version_not_supported = False
|
||||
|
||||
try:
|
||||
from grpc._utilities import first_version_is_lower
|
||||
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
||||
except ImportError:
|
||||
_version_not_supported = True
|
||||
|
||||
if _version_not_supported:
|
||||
warnings.warn(
|
||||
f'The grpc package installed is at version {GRPC_VERSION},'
|
||||
+ f' but the generated code in opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py depends on'
|
||||
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
||||
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
||||
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
||||
+ f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
|
||||
+ f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
|
||||
RuntimeWarning
|
||||
)
|
||||
|
||||
|
||||
class MetricsServiceStub(object):
|
||||
"""Service that can be used to push metrics between one Application
|
||||
instrumented with OpenTelemetry and a collector, or between a collector and a
|
||||
central collector.
|
||||
"""
|
||||
|
||||
def __init__(self, channel):
|
||||
"""Constructor.
|
||||
|
||||
Args:
|
||||
channel: A grpc.Channel.
|
||||
"""
|
||||
self.Export = channel.unary_unary(
|
||||
'/opentelemetry.proto.collector.metrics.v1.MetricsService/Export',
|
||||
request_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString,
|
||||
response_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString,
|
||||
_registered_method=True)
|
||||
|
||||
|
||||
class MetricsServiceServicer(object):
|
||||
"""Service that can be used to push metrics between one Application
|
||||
instrumented with OpenTelemetry and a collector, or between a collector and a
|
||||
central collector.
|
||||
"""
|
||||
|
||||
def Export(self, request, context):
|
||||
"""Missing associated documentation comment in .proto file."""
|
||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||
context.set_details('Method not implemented!')
|
||||
raise NotImplementedError('Method not implemented!')
|
||||
|
||||
|
||||
def add_MetricsServiceServicer_to_server(servicer, server):
|
||||
rpc_method_handlers = {
|
||||
'Export': grpc.unary_unary_rpc_method_handler(
|
||||
servicer.Export,
|
||||
request_deserializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.FromString,
|
||||
response_serializer=opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.SerializeToString,
|
||||
),
|
||||
}
|
||||
generic_handler = grpc.method_handlers_generic_handler(
|
||||
'opentelemetry.proto.collector.metrics.v1.MetricsService', rpc_method_handlers)
|
||||
server.add_generic_rpc_handlers((generic_handler,))
|
||||
|
||||
|
||||
# This class is part of an EXPERIMENTAL API.
|
||||
class MetricsService(object):
|
||||
"""Service that can be used to push metrics between one Application
|
||||
instrumented with OpenTelemetry and a collector, or between a collector and a
|
||||
central collector.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def Export(request,
|
||||
target,
|
||||
options=(),
|
||||
channel_credentials=None,
|
||||
call_credentials=None,
|
||||
insecure=False,
|
||||
compression=None,
|
||||
wait_for_ready=None,
|
||||
timeout=None,
|
||||
metadata=None):
|
||||
return grpc.experimental.unary_unary(
|
||||
request,
|
||||
target,
|
||||
'/opentelemetry.proto.collector.metrics.v1.MetricsService/Export',
|
||||
opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString,
|
||||
opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString,
|
||||
options,
|
||||
channel_credentials,
|
||||
insecure,
|
||||
call_credentials,
|
||||
compression,
|
||||
wait_for_ready,
|
||||
timeout,
|
||||
metadata,
|
||||
_registered_method=True)
|
||||
@@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: opentelemetry/proto/collector/profiles/v1development/profiles_service.proto
|
||||
# Protobuf Python Version: 5.26.1
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf.internal import builder as _builder
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from opentelemetry.proto.profiles.v1development import profiles_pb2 as opentelemetry_dot_proto_dot_profiles_dot_v1development_dot_profiles__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\nKopentelemetry/proto/collector/profiles/v1development/profiles_service.proto\x12\x34opentelemetry.proto.collector.profiles.v1development\x1a\x39opentelemetry/proto/profiles/v1development/profiles.proto\"\xcb\x01\n\x1c\x45xportProfilesServiceRequest\x12W\n\x11resource_profiles\x18\x01 \x03(\x0b\x32<.opentelemetry.proto.profiles.v1development.ResourceProfiles\x12R\n\ndictionary\x18\x02 \x01(\x0b\x32>.opentelemetry.proto.profiles.v1development.ProfilesDictionary\"\x8c\x01\n\x1d\x45xportProfilesServiceResponse\x12k\n\x0fpartial_success\x18\x01 \x01(\x0b\x32R.opentelemetry.proto.collector.profiles.v1development.ExportProfilesPartialSuccess\"P\n\x1c\x45xportProfilesPartialSuccess\x12\x19\n\x11rejected_profiles\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xc7\x01\n\x0fProfilesService\x12\xb3\x01\n\x06\x45xport\x12R.opentelemetry.proto.collector.profiles.v1development.ExportProfilesServiceRequest\x1aS.opentelemetry.proto.collector.profiles.v1development.ExportProfilesServiceResponse\"\x00\x42\xc9\x01\n7io.opentelemetry.proto.collector.profiles.v1developmentB\x14ProfilesServiceProtoP\x01Z?go.opentelemetry.io/proto/otlp/collector/profiles/v1development\xaa\x02\x34OpenTelemetry.Proto.Collector.Profiles.V1Developmentb\x06proto3')
|
||||
|
||||
_globals = globals()
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.collector.profiles.v1development.profiles_service_pb2', _globals)
|
||||
if not _descriptor._USE_C_DESCRIPTORS:
|
||||
_globals['DESCRIPTOR']._loaded_options = None
|
||||
_globals['DESCRIPTOR']._serialized_options = b'\n7io.opentelemetry.proto.collector.profiles.v1developmentB\024ProfilesServiceProtoP\001Z?go.opentelemetry.io/proto/otlp/collector/profiles/v1development\252\0024OpenTelemetry.Proto.Collector.Profiles.V1Development'
|
||||
_globals['_EXPORTPROFILESSERVICEREQUEST']._serialized_start=193
|
||||
_globals['_EXPORTPROFILESSERVICEREQUEST']._serialized_end=396
|
||||
_globals['_EXPORTPROFILESSERVICERESPONSE']._serialized_start=399
|
||||
_globals['_EXPORTPROFILESSERVICERESPONSE']._serialized_end=539
|
||||
_globals['_EXPORTPROFILESPARTIALSUCCESS']._serialized_start=541
|
||||
_globals['_EXPORTPROFILESPARTIALSUCCESS']._serialized_end=621
|
||||
_globals['_PROFILESSERVICE']._serialized_start=624
|
||||
_globals['_PROFILESSERVICE']._serialized_end=823
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
@@ -0,0 +1,123 @@
|
||||
"""
|
||||
@generated by mypy-protobuf. Do not edit manually!
|
||||
isort:skip_file
|
||||
Copyright 2023, OpenTelemetry Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import builtins
|
||||
import collections.abc
|
||||
import google.protobuf.descriptor
|
||||
import google.protobuf.internal.containers
|
||||
import google.protobuf.message
|
||||
import opentelemetry.proto.profiles.v1development.profiles_pb2
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
import typing as typing_extensions
|
||||
else:
|
||||
import typing_extensions
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
||||
|
||||
@typing_extensions.final
|
||||
class ExportProfilesServiceRequest(google.protobuf.message.Message):
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
RESOURCE_PROFILES_FIELD_NUMBER: builtins.int
|
||||
DICTIONARY_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def resource_profiles(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.profiles.v1development.profiles_pb2.ResourceProfiles]:
|
||||
"""An array of ResourceProfiles.
|
||||
For data coming from a single resource this array will typically contain one
|
||||
element. Intermediary nodes (such as OpenTelemetry Collector) that receive
|
||||
data from multiple origins typically batch the data before forwarding further and
|
||||
in that case this array will contain multiple elements.
|
||||
"""
|
||||
@property
|
||||
def dictionary(self) -> opentelemetry.proto.profiles.v1development.profiles_pb2.ProfilesDictionary:
|
||||
"""The reference table containing all data shared by profiles across the message being sent."""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
resource_profiles: collections.abc.Iterable[opentelemetry.proto.profiles.v1development.profiles_pb2.ResourceProfiles] | None = ...,
|
||||
dictionary: opentelemetry.proto.profiles.v1development.profiles_pb2.ProfilesDictionary | None = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["dictionary", b"dictionary"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["dictionary", b"dictionary", "resource_profiles", b"resource_profiles"]) -> None: ...
|
||||
|
||||
global___ExportProfilesServiceRequest = ExportProfilesServiceRequest
|
||||
|
||||
@typing_extensions.final
|
||||
class ExportProfilesServiceResponse(google.protobuf.message.Message):
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def partial_success(self) -> global___ExportProfilesPartialSuccess:
|
||||
"""The details of a partially successful export request.
|
||||
|
||||
If the request is only partially accepted
|
||||
(i.e. when the server accepts only parts of the data and rejects the rest)
|
||||
the server MUST initialize the `partial_success` field and MUST
|
||||
set the `rejected_<signal>` with the number of items it rejected.
|
||||
|
||||
Servers MAY also make use of the `partial_success` field to convey
|
||||
warnings/suggestions to senders even when the request was fully accepted.
|
||||
In such cases, the `rejected_<signal>` MUST have a value of `0` and
|
||||
the `error_message` MUST be non-empty.
|
||||
|
||||
A `partial_success` message with an empty value (rejected_<signal> = 0 and
|
||||
`error_message` = "") is equivalent to it not being set/present. Senders
|
||||
SHOULD interpret it the same way as in the full success case.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
partial_success: global___ExportProfilesPartialSuccess | None = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> None: ...
|
||||
|
||||
global___ExportProfilesServiceResponse = ExportProfilesServiceResponse
|
||||
|
||||
@typing_extensions.final
|
||||
class ExportProfilesPartialSuccess(google.protobuf.message.Message):
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
REJECTED_PROFILES_FIELD_NUMBER: builtins.int
|
||||
ERROR_MESSAGE_FIELD_NUMBER: builtins.int
|
||||
rejected_profiles: builtins.int
|
||||
"""The number of rejected profiles.
|
||||
|
||||
A `rejected_<signal>` field holding a `0` value indicates that the
|
||||
request was fully accepted.
|
||||
"""
|
||||
error_message: builtins.str
|
||||
"""A developer-facing human-readable message in English. It should be used
|
||||
either to explain why the server rejected parts of the data during a partial
|
||||
success or to convey warnings/suggestions during a full success. The message
|
||||
should offer guidance on how users can address such issues.
|
||||
|
||||
error_message is an optional field. An error_message with an empty value
|
||||
is equivalent to it not being set.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
rejected_profiles: builtins.int = ...,
|
||||
error_message: builtins.str = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["error_message", b"error_message", "rejected_profiles", b"rejected_profiles"]) -> None: ...
|
||||
|
||||
global___ExportProfilesPartialSuccess = ExportProfilesPartialSuccess
|
||||
@@ -0,0 +1,107 @@
|
||||
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
||||
"""Client and server classes corresponding to protobuf-defined services."""
|
||||
import grpc
|
||||
import warnings
|
||||
|
||||
from opentelemetry.proto.collector.profiles.v1development import profiles_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2
|
||||
|
||||
GRPC_GENERATED_VERSION = '1.63.2'
|
||||
GRPC_VERSION = grpc.__version__
|
||||
EXPECTED_ERROR_RELEASE = '1.65.0'
|
||||
SCHEDULED_RELEASE_DATE = 'June 25, 2024'
|
||||
_version_not_supported = False
|
||||
|
||||
try:
|
||||
from grpc._utilities import first_version_is_lower
|
||||
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
||||
except ImportError:
|
||||
_version_not_supported = True
|
||||
|
||||
if _version_not_supported:
|
||||
warnings.warn(
|
||||
f'The grpc package installed is at version {GRPC_VERSION},'
|
||||
+ f' but the generated code in opentelemetry/proto/collector/profiles/v1development/profiles_service_pb2_grpc.py depends on'
|
||||
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
||||
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
||||
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
||||
+ f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
|
||||
+ f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
|
||||
RuntimeWarning
|
||||
)
|
||||
|
||||
|
||||
class ProfilesServiceStub(object):
|
||||
"""Service that can be used to push profiles between one Application instrumented with
|
||||
OpenTelemetry and a collector, or between a collector and a central collector.
|
||||
"""
|
||||
|
||||
def __init__(self, channel):
|
||||
"""Constructor.
|
||||
|
||||
Args:
|
||||
channel: A grpc.Channel.
|
||||
"""
|
||||
self.Export = channel.unary_unary(
|
||||
'/opentelemetry.proto.collector.profiles.v1development.ProfilesService/Export',
|
||||
request_serializer=opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceRequest.SerializeToString,
|
||||
response_deserializer=opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceResponse.FromString,
|
||||
_registered_method=True)
|
||||
|
||||
|
||||
class ProfilesServiceServicer(object):
|
||||
"""Service that can be used to push profiles between one Application instrumented with
|
||||
OpenTelemetry and a collector, or between a collector and a central collector.
|
||||
"""
|
||||
|
||||
def Export(self, request, context):
|
||||
"""Missing associated documentation comment in .proto file."""
|
||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||
context.set_details('Method not implemented!')
|
||||
raise NotImplementedError('Method not implemented!')
|
||||
|
||||
|
||||
def add_ProfilesServiceServicer_to_server(servicer, server):
|
||||
rpc_method_handlers = {
|
||||
'Export': grpc.unary_unary_rpc_method_handler(
|
||||
servicer.Export,
|
||||
request_deserializer=opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceRequest.FromString,
|
||||
response_serializer=opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceResponse.SerializeToString,
|
||||
),
|
||||
}
|
||||
generic_handler = grpc.method_handlers_generic_handler(
|
||||
'opentelemetry.proto.collector.profiles.v1development.ProfilesService', rpc_method_handlers)
|
||||
server.add_generic_rpc_handlers((generic_handler,))
|
||||
|
||||
|
||||
# This class is part of an EXPERIMENTAL API.
|
||||
class ProfilesService(object):
|
||||
"""Service that can be used to push profiles between one Application instrumented with
|
||||
OpenTelemetry and a collector, or between a collector and a central collector.
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def Export(request,
|
||||
target,
|
||||
options=(),
|
||||
channel_credentials=None,
|
||||
call_credentials=None,
|
||||
insecure=False,
|
||||
compression=None,
|
||||
wait_for_ready=None,
|
||||
timeout=None,
|
||||
metadata=None):
|
||||
return grpc.experimental.unary_unary(
|
||||
request,
|
||||
target,
|
||||
'/opentelemetry.proto.collector.profiles.v1development.ProfilesService/Export',
|
||||
opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceRequest.SerializeToString,
|
||||
opentelemetry_dot_proto_dot_collector_dot_profiles_dot_v1development_dot_profiles__service__pb2.ExportProfilesServiceResponse.FromString,
|
||||
options,
|
||||
channel_credentials,
|
||||
insecure,
|
||||
call_credentials,
|
||||
compression,
|
||||
wait_for_ready,
|
||||
timeout,
|
||||
metadata,
|
||||
_registered_method=True)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,34 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: opentelemetry/proto/collector/trace/v1/trace_service.proto
|
||||
# Protobuf Python Version: 5.26.1
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf.internal import builder as _builder
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from opentelemetry.proto.trace.v1 import trace_pb2 as opentelemetry_dot_proto_dot_trace_dot_v1_dot_trace__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n:opentelemetry/proto/collector/trace/v1/trace_service.proto\x12&opentelemetry.proto.collector.trace.v1\x1a(opentelemetry/proto/trace/v1/trace.proto\"`\n\x19\x45xportTraceServiceRequest\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans\"x\n\x1a\x45xportTraceServiceResponse\x12Z\n\x0fpartial_success\x18\x01 \x01(\x0b\x32\x41.opentelemetry.proto.collector.trace.v1.ExportTracePartialSuccess\"J\n\x19\x45xportTracePartialSuccess\x12\x16\n\x0erejected_spans\x18\x01 \x01(\x03\x12\x15\n\rerror_message\x18\x02 \x01(\t2\xa2\x01\n\x0cTraceService\x12\x91\x01\n\x06\x45xport\x12\x41.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest\x1a\x42.opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse\"\x00\x42\x9c\x01\n)io.opentelemetry.proto.collector.trace.v1B\x11TraceServiceProtoP\x01Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\xaa\x02&OpenTelemetry.Proto.Collector.Trace.V1b\x06proto3')
|
||||
|
||||
_globals = globals()
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.collector.trace.v1.trace_service_pb2', _globals)
|
||||
if not _descriptor._USE_C_DESCRIPTORS:
|
||||
_globals['DESCRIPTOR']._loaded_options = None
|
||||
_globals['DESCRIPTOR']._serialized_options = b'\n)io.opentelemetry.proto.collector.trace.v1B\021TraceServiceProtoP\001Z1go.opentelemetry.io/proto/otlp/collector/trace/v1\252\002&OpenTelemetry.Proto.Collector.Trace.V1'
|
||||
_globals['_EXPORTTRACESERVICEREQUEST']._serialized_start=144
|
||||
_globals['_EXPORTTRACESERVICEREQUEST']._serialized_end=240
|
||||
_globals['_EXPORTTRACESERVICERESPONSE']._serialized_start=242
|
||||
_globals['_EXPORTTRACESERVICERESPONSE']._serialized_end=362
|
||||
_globals['_EXPORTTRACEPARTIALSUCCESS']._serialized_start=364
|
||||
_globals['_EXPORTTRACEPARTIALSUCCESS']._serialized_end=438
|
||||
_globals['_TRACESERVICE']._serialized_start=441
|
||||
_globals['_TRACESERVICE']._serialized_end=603
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
@@ -0,0 +1,117 @@
|
||||
"""
|
||||
@generated by mypy-protobuf. Do not edit manually!
|
||||
isort:skip_file
|
||||
Copyright 2019, OpenTelemetry Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import builtins
|
||||
import collections.abc
|
||||
import google.protobuf.descriptor
|
||||
import google.protobuf.internal.containers
|
||||
import google.protobuf.message
|
||||
import opentelemetry.proto.trace.v1.trace_pb2
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
import typing as typing_extensions
|
||||
else:
|
||||
import typing_extensions
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
||||
|
||||
@typing_extensions.final
|
||||
class ExportTraceServiceRequest(google.protobuf.message.Message):
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
RESOURCE_SPANS_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def resource_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans]:
|
||||
"""An array of ResourceSpans.
|
||||
For data coming from a single resource this array will typically contain one
|
||||
element. Intermediary nodes (such as OpenTelemetry Collector) that receive
|
||||
data from multiple origins typically batch the data before forwarding further and
|
||||
in that case this array will contain multiple elements.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
resource_spans: collections.abc.Iterable[opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans] | None = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["resource_spans", b"resource_spans"]) -> None: ...
|
||||
|
||||
global___ExportTraceServiceRequest = ExportTraceServiceRequest
|
||||
|
||||
@typing_extensions.final
|
||||
class ExportTraceServiceResponse(google.protobuf.message.Message):
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
PARTIAL_SUCCESS_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def partial_success(self) -> global___ExportTracePartialSuccess:
|
||||
"""The details of a partially successful export request.
|
||||
|
||||
If the request is only partially accepted
|
||||
(i.e. when the server accepts only parts of the data and rejects the rest)
|
||||
the server MUST initialize the `partial_success` field and MUST
|
||||
set the `rejected_<signal>` with the number of items it rejected.
|
||||
|
||||
Servers MAY also make use of the `partial_success` field to convey
|
||||
warnings/suggestions to senders even when the request was fully accepted.
|
||||
In such cases, the `rejected_<signal>` MUST have a value of `0` and
|
||||
the `error_message` MUST be non-empty.
|
||||
|
||||
A `partial_success` message with an empty value (rejected_<signal> = 0 and
|
||||
`error_message` = "") is equivalent to it not being set/present. Senders
|
||||
SHOULD interpret it the same way as in the full success case.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
partial_success: global___ExportTracePartialSuccess | None = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["partial_success", b"partial_success"]) -> None: ...
|
||||
|
||||
global___ExportTraceServiceResponse = ExportTraceServiceResponse
|
||||
|
||||
@typing_extensions.final
|
||||
class ExportTracePartialSuccess(google.protobuf.message.Message):
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
REJECTED_SPANS_FIELD_NUMBER: builtins.int
|
||||
ERROR_MESSAGE_FIELD_NUMBER: builtins.int
|
||||
rejected_spans: builtins.int
|
||||
"""The number of rejected spans.
|
||||
|
||||
A `rejected_<signal>` field holding a `0` value indicates that the
|
||||
request was fully accepted.
|
||||
"""
|
||||
error_message: builtins.str
|
||||
"""A developer-facing human-readable message in English. It should be used
|
||||
either to explain why the server rejected parts of the data during a partial
|
||||
success or to convey warnings/suggestions during a full success. The message
|
||||
should offer guidance on how users can address such issues.
|
||||
|
||||
error_message is an optional field. An error_message with an empty value
|
||||
is equivalent to it not being set.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
rejected_spans: builtins.int = ...,
|
||||
error_message: builtins.str = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["error_message", b"error_message", "rejected_spans", b"rejected_spans"]) -> None: ...
|
||||
|
||||
global___ExportTracePartialSuccess = ExportTracePartialSuccess
|
||||
@@ -0,0 +1,110 @@
|
||||
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
|
||||
"""Client and server classes corresponding to protobuf-defined services."""
|
||||
import grpc
|
||||
import warnings
|
||||
|
||||
from opentelemetry.proto.collector.trace.v1 import trace_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2
|
||||
|
||||
GRPC_GENERATED_VERSION = '1.63.2'
|
||||
GRPC_VERSION = grpc.__version__
|
||||
EXPECTED_ERROR_RELEASE = '1.65.0'
|
||||
SCHEDULED_RELEASE_DATE = 'June 25, 2024'
|
||||
_version_not_supported = False
|
||||
|
||||
try:
|
||||
from grpc._utilities import first_version_is_lower
|
||||
_version_not_supported = first_version_is_lower(GRPC_VERSION, GRPC_GENERATED_VERSION)
|
||||
except ImportError:
|
||||
_version_not_supported = True
|
||||
|
||||
if _version_not_supported:
|
||||
warnings.warn(
|
||||
f'The grpc package installed is at version {GRPC_VERSION},'
|
||||
+ f' but the generated code in opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py depends on'
|
||||
+ f' grpcio>={GRPC_GENERATED_VERSION}.'
|
||||
+ f' Please upgrade your grpc module to grpcio>={GRPC_GENERATED_VERSION}'
|
||||
+ f' or downgrade your generated code using grpcio-tools<={GRPC_VERSION}.'
|
||||
+ f' This warning will become an error in {EXPECTED_ERROR_RELEASE},'
|
||||
+ f' scheduled for release on {SCHEDULED_RELEASE_DATE}.',
|
||||
RuntimeWarning
|
||||
)
|
||||
|
||||
|
||||
class TraceServiceStub(object):
|
||||
"""Service that can be used to push spans between one Application instrumented with
|
||||
OpenTelemetry and a collector, or between a collector and a central collector (in this
|
||||
case spans are sent/received to/from multiple Applications).
|
||||
"""
|
||||
|
||||
def __init__(self, channel):
|
||||
"""Constructor.
|
||||
|
||||
Args:
|
||||
channel: A grpc.Channel.
|
||||
"""
|
||||
self.Export = channel.unary_unary(
|
||||
'/opentelemetry.proto.collector.trace.v1.TraceService/Export',
|
||||
request_serializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.SerializeToString,
|
||||
response_deserializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.FromString,
|
||||
_registered_method=True)
|
||||
|
||||
|
||||
class TraceServiceServicer(object):
|
||||
"""Service that can be used to push spans between one Application instrumented with
|
||||
OpenTelemetry and a collector, or between a collector and a central collector (in this
|
||||
case spans are sent/received to/from multiple Applications).
|
||||
"""
|
||||
|
||||
def Export(self, request, context):
|
||||
"""Missing associated documentation comment in .proto file."""
|
||||
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
|
||||
context.set_details('Method not implemented!')
|
||||
raise NotImplementedError('Method not implemented!')
|
||||
|
||||
|
||||
def add_TraceServiceServicer_to_server(servicer, server):
|
||||
rpc_method_handlers = {
|
||||
'Export': grpc.unary_unary_rpc_method_handler(
|
||||
servicer.Export,
|
||||
request_deserializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.FromString,
|
||||
response_serializer=opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.SerializeToString,
|
||||
),
|
||||
}
|
||||
generic_handler = grpc.method_handlers_generic_handler(
|
||||
'opentelemetry.proto.collector.trace.v1.TraceService', rpc_method_handlers)
|
||||
server.add_generic_rpc_handlers((generic_handler,))
|
||||
|
||||
|
||||
# This class is part of an EXPERIMENTAL API.
|
||||
class TraceService(object):
|
||||
"""Service that can be used to push spans between one Application instrumented with
|
||||
OpenTelemetry and a collector, or between a collector and a central collector (in this
|
||||
case spans are sent/received to/from multiple Applications).
|
||||
"""
|
||||
|
||||
@staticmethod
|
||||
def Export(request,
|
||||
target,
|
||||
options=(),
|
||||
channel_credentials=None,
|
||||
call_credentials=None,
|
||||
insecure=False,
|
||||
compression=None,
|
||||
wait_for_ready=None,
|
||||
timeout=None,
|
||||
metadata=None):
|
||||
return grpc.experimental.unary_unary(
|
||||
request,
|
||||
target,
|
||||
'/opentelemetry.proto.collector.trace.v1.TraceService/Export',
|
||||
opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.SerializeToString,
|
||||
opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.FromString,
|
||||
options,
|
||||
channel_credentials,
|
||||
insecure,
|
||||
call_credentials,
|
||||
compression,
|
||||
wait_for_ready,
|
||||
timeout,
|
||||
metadata,
|
||||
_registered_method=True)
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,37 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: opentelemetry/proto/common/v1/common.proto
|
||||
# Protobuf Python Version: 5.26.1
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf.internal import builder as _builder
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n*opentelemetry/proto/common/v1/common.proto\x12\x1dopentelemetry.proto.common.v1\"\x8c\x02\n\x08\x41nyValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12@\n\x0b\x61rray_value\x18\x05 \x01(\x0b\x32).opentelemetry.proto.common.v1.ArrayValueH\x00\x12\x43\n\x0ckvlist_value\x18\x06 \x01(\x0b\x32+.opentelemetry.proto.common.v1.KeyValueListH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x42\x07\n\x05value\"E\n\nArrayValue\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\"G\n\x0cKeyValueList\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\"O\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\"\x94\x01\n\x14InstrumentationScope\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\r\"X\n\tEntityRef\x12\x12\n\nschema_url\x18\x01 \x01(\t\x12\x0c\n\x04type\x18\x02 \x01(\t\x12\x0f\n\x07id_keys\x18\x03 \x03(\t\x12\x18\n\x10\x64\x65scription_keys\x18\x04 \x03(\tB{\n io.opentelemetry.proto.common.v1B\x0b\x43ommonProtoP\x01Z(go.opentelemetry.io/proto/otlp/common/v1\xaa\x02\x1dOpenTelemetry.Proto.Common.V1b\x06proto3')
|
||||
|
||||
_globals = globals()
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.common.v1.common_pb2', _globals)
|
||||
if not _descriptor._USE_C_DESCRIPTORS:
|
||||
_globals['DESCRIPTOR']._loaded_options = None
|
||||
_globals['DESCRIPTOR']._serialized_options = b'\n io.opentelemetry.proto.common.v1B\013CommonProtoP\001Z(go.opentelemetry.io/proto/otlp/common/v1\252\002\035OpenTelemetry.Proto.Common.V1'
|
||||
_globals['_ANYVALUE']._serialized_start=78
|
||||
_globals['_ANYVALUE']._serialized_end=346
|
||||
_globals['_ARRAYVALUE']._serialized_start=348
|
||||
_globals['_ARRAYVALUE']._serialized_end=417
|
||||
_globals['_KEYVALUELIST']._serialized_start=419
|
||||
_globals['_KEYVALUELIST']._serialized_end=490
|
||||
_globals['_KEYVALUE']._serialized_start=492
|
||||
_globals['_KEYVALUE']._serialized_end=571
|
||||
_globals['_INSTRUMENTATIONSCOPE']._serialized_start=574
|
||||
_globals['_INSTRUMENTATIONSCOPE']._serialized_end=722
|
||||
_globals['_ENTITYREF']._serialized_start=724
|
||||
_globals['_ENTITYREF']._serialized_end=812
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
@@ -0,0 +1,235 @@
|
||||
"""
|
||||
@generated by mypy-protobuf. Do not edit manually!
|
||||
isort:skip_file
|
||||
Copyright 2019, OpenTelemetry Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import builtins
|
||||
import collections.abc
|
||||
import google.protobuf.descriptor
|
||||
import google.protobuf.internal.containers
|
||||
import google.protobuf.message
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
import typing as typing_extensions
|
||||
else:
|
||||
import typing_extensions
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
||||
|
||||
@typing_extensions.final
|
||||
class AnyValue(google.protobuf.message.Message):
|
||||
"""AnyValue is used to represent any type of attribute value. AnyValue may contain a
|
||||
primitive value such as a string or integer or it may contain an arbitrary nested
|
||||
object containing arrays, key-value lists and primitives.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
STRING_VALUE_FIELD_NUMBER: builtins.int
|
||||
BOOL_VALUE_FIELD_NUMBER: builtins.int
|
||||
INT_VALUE_FIELD_NUMBER: builtins.int
|
||||
DOUBLE_VALUE_FIELD_NUMBER: builtins.int
|
||||
ARRAY_VALUE_FIELD_NUMBER: builtins.int
|
||||
KVLIST_VALUE_FIELD_NUMBER: builtins.int
|
||||
BYTES_VALUE_FIELD_NUMBER: builtins.int
|
||||
string_value: builtins.str
|
||||
bool_value: builtins.bool
|
||||
int_value: builtins.int
|
||||
double_value: builtins.float
|
||||
@property
|
||||
def array_value(self) -> global___ArrayValue: ...
|
||||
@property
|
||||
def kvlist_value(self) -> global___KeyValueList: ...
|
||||
bytes_value: builtins.bytes
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
string_value: builtins.str = ...,
|
||||
bool_value: builtins.bool = ...,
|
||||
int_value: builtins.int = ...,
|
||||
double_value: builtins.float = ...,
|
||||
array_value: global___ArrayValue | None = ...,
|
||||
kvlist_value: global___KeyValueList | None = ...,
|
||||
bytes_value: builtins.bytes = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["array_value", b"array_value", "bool_value", b"bool_value", "bytes_value", b"bytes_value", "double_value", b"double_value", "int_value", b"int_value", "kvlist_value", b"kvlist_value", "string_value", b"string_value", "value", b"value"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["array_value", b"array_value", "bool_value", b"bool_value", "bytes_value", b"bytes_value", "double_value", b"double_value", "int_value", b"int_value", "kvlist_value", b"kvlist_value", "string_value", b"string_value", "value", b"value"]) -> None: ...
|
||||
def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["string_value", "bool_value", "int_value", "double_value", "array_value", "kvlist_value", "bytes_value"] | None: ...
|
||||
|
||||
global___AnyValue = AnyValue
|
||||
|
||||
@typing_extensions.final
|
||||
class ArrayValue(google.protobuf.message.Message):
|
||||
"""ArrayValue is a list of AnyValue messages. We need ArrayValue as a message
|
||||
since oneof in AnyValue does not allow repeated fields.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
VALUES_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AnyValue]:
|
||||
"""Array of values. The array may be empty (contain 0 elements)."""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
values: collections.abc.Iterable[global___AnyValue] | None = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
|
||||
|
||||
global___ArrayValue = ArrayValue
|
||||
|
||||
@typing_extensions.final
|
||||
class KeyValueList(google.protobuf.message.Message):
|
||||
"""KeyValueList is a list of KeyValue messages. We need KeyValueList as a message
|
||||
since `oneof` in AnyValue does not allow repeated fields. Everywhere else where we need
|
||||
a list of KeyValue messages (e.g. in Span) we use `repeated KeyValue` directly to
|
||||
avoid unnecessary extra wrapping (which slows down the protocol). The 2 approaches
|
||||
are semantically equivalent.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
VALUES_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]:
|
||||
"""A collection of key/value pairs of key-value pairs. The list may be empty (may
|
||||
contain 0 elements).
|
||||
The keys MUST be unique (it is not allowed to have more than one
|
||||
value with the same key).
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
values: collections.abc.Iterable[global___KeyValue] | None = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
|
||||
|
||||
global___KeyValueList = KeyValueList
|
||||
|
||||
@typing_extensions.final
|
||||
class KeyValue(google.protobuf.message.Message):
|
||||
"""KeyValue is a key-value pair that is used to store Span attributes, Link
|
||||
attributes, etc.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
KEY_FIELD_NUMBER: builtins.int
|
||||
VALUE_FIELD_NUMBER: builtins.int
|
||||
key: builtins.str
|
||||
@property
|
||||
def value(self) -> global___AnyValue: ...
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
key: builtins.str = ...,
|
||||
value: global___AnyValue | None = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
|
||||
|
||||
global___KeyValue = KeyValue
|
||||
|
||||
@typing_extensions.final
|
||||
class InstrumentationScope(google.protobuf.message.Message):
|
||||
"""InstrumentationScope is a message representing the instrumentation scope information
|
||||
such as the fully qualified name and version.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
NAME_FIELD_NUMBER: builtins.int
|
||||
VERSION_FIELD_NUMBER: builtins.int
|
||||
ATTRIBUTES_FIELD_NUMBER: builtins.int
|
||||
DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int
|
||||
name: builtins.str
|
||||
"""An empty instrumentation scope name means the name is unknown."""
|
||||
version: builtins.str
|
||||
@property
|
||||
def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]:
|
||||
"""Additional attributes that describe the scope. [Optional].
|
||||
Attribute keys MUST be unique (it is not allowed to have more than one
|
||||
attribute with the same key).
|
||||
"""
|
||||
dropped_attributes_count: builtins.int
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
name: builtins.str = ...,
|
||||
version: builtins.str = ...,
|
||||
attributes: collections.abc.Iterable[global___KeyValue] | None = ...,
|
||||
dropped_attributes_count: builtins.int = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "dropped_attributes_count", b"dropped_attributes_count", "name", b"name", "version", b"version"]) -> None: ...
|
||||
|
||||
global___InstrumentationScope = InstrumentationScope
|
||||
|
||||
@typing_extensions.final
|
||||
class EntityRef(google.protobuf.message.Message):
|
||||
"""A reference to an Entity.
|
||||
Entity represents an object of interest associated with produced telemetry: e.g spans, metrics, profiles, or logs.
|
||||
|
||||
Status: [Development]
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
SCHEMA_URL_FIELD_NUMBER: builtins.int
|
||||
TYPE_FIELD_NUMBER: builtins.int
|
||||
ID_KEYS_FIELD_NUMBER: builtins.int
|
||||
DESCRIPTION_KEYS_FIELD_NUMBER: builtins.int
|
||||
schema_url: builtins.str
|
||||
"""The Schema URL, if known. This is the identifier of the Schema that the entity data
|
||||
is recorded in. To learn more about Schema URL see
|
||||
https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
|
||||
|
||||
This schema_url applies to the data in this message and to the Resource attributes
|
||||
referenced by id_keys and description_keys.
|
||||
TODO: discuss if we are happy with this somewhat complicated definition of what
|
||||
the schema_url applies to.
|
||||
|
||||
This field obsoletes the schema_url field in ResourceMetrics/ResourceSpans/ResourceLogs.
|
||||
"""
|
||||
type: builtins.str
|
||||
"""Defines the type of the entity. MUST not change during the lifetime of the entity.
|
||||
For example: "service" or "host". This field is required and MUST not be empty
|
||||
for valid entities.
|
||||
"""
|
||||
@property
|
||||
def id_keys(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
||||
"""Attribute Keys that identify the entity.
|
||||
MUST not change during the lifetime of the entity. The Id must contain at least one attribute.
|
||||
These keys MUST exist in the containing {message}.attributes.
|
||||
"""
|
||||
@property
|
||||
def description_keys(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
||||
"""Descriptive (non-identifying) attribute keys of the entity.
|
||||
MAY change over the lifetime of the entity. MAY be empty.
|
||||
These attribute keys are not part of entity's identity.
|
||||
These keys MUST exist in the containing {message}.attributes.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
schema_url: builtins.str = ...,
|
||||
type: builtins.str = ...,
|
||||
id_keys: collections.abc.Iterable[builtins.str] | None = ...,
|
||||
description_keys: collections.abc.Iterable[builtins.str] | None = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["description_keys", b"description_keys", "id_keys", b"id_keys", "schema_url", b"schema_url", "type", b"type"]) -> None: ...
|
||||
|
||||
global___EntityRef = EntityRef
|
||||
Binary file not shown.
@@ -0,0 +1,39 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: opentelemetry/proto/logs/v1/logs.proto
|
||||
# Protobuf Python Version: 5.26.1
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf.internal import builder as _builder
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2
|
||||
from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n&opentelemetry/proto/logs/v1/logs.proto\x12\x1bopentelemetry.proto.logs.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"L\n\x08LogsData\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs\"\xa3\x01\n\x0cResourceLogs\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12:\n\nscope_logs\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.ScopeLogs\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\xa0\x01\n\tScopeLogs\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12;\n\x0blog_records\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.logs.v1.LogRecord\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\x83\x03\n\tLogRecord\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x1f\n\x17observed_time_unix_nano\x18\x0b \x01(\x06\x12\x44\n\x0fseverity_number\x18\x02 \x01(\x0e\x32+.opentelemetry.proto.logs.v1.SeverityNumber\x12\x15\n\rseverity_text\x18\x03 \x01(\t\x12\x35\n\x04\x62ody\x18\x05 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\x12;\n\nattributes\x18\x06 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x07 \x01(\r\x12\r\n\x05\x66lags\x18\x08 \x01(\x07\x12\x10\n\x08trace_id\x18\t \x01(\x0c\x12\x0f\n\x07span_id\x18\n \x01(\x0c\x12\x12\n\nevent_name\x18\x0c \x01(\tJ\x04\x08\x04\x10\x05*\xc3\x05\n\x0eSeverityNumber\x12\x1f\n\x1bSEVERITY_NUMBER_UNSPECIFIED\x10\x00\x12\x19\n\x15SEVERITY_NUMBER_TRACE\x10\x01\x12\x1a\n\x16SEVERITY_NUMBER_TRACE2\x10\x02\x12\x1a\n\x16SEVERITY_NUMBER_TRACE3\x10\x03\x12\x1a\n\x16SEVERITY_NUMBER_TRACE4\x10\x04\x12\x19\n\x15SEVERITY_NUMBER_DEBUG\x10\x05\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG2\x10\x06\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG3\x10\x07\x12\x1a\n\x16SEVERITY_NUMBER_DEBUG4\x10\x08\x12\x18\n\x14SEVERITY_NUMBER_INFO\x10\t\x12\x19\n\x15SEVERITY_NUMBER_INFO2\x10\n\x12\x19\n\x15SEVERITY_NUMBER_INFO3\x10\x0b\x12\x19\n\x15SEVERITY_NUMBER_INFO4\x10\x0c\x12\x18\n\x14SEVERITY_NUMBER_WARN\x10\r\x12\x19\n\x15SEVERITY_NUMBER_WARN2\x10\x0e\x12\x19\n\x15SEVERITY_NUMBER_WARN3\x10\x0f\x12\x19\n\x15SEVERITY_NUMBER_WARN4\x10\x10\x12\x19\n\x15SEVERITY_NUMBER_ERROR\x10\x11\x12\x1a\n\x16SEVERITY_NUMBER_ERROR2\x10\x12\x12\x1a\n\x16SEVERITY_NUMBER_ERROR3\x10\x13\x12\x1a\n\x16SEVERITY_NUMBER_ERROR4\x10\x14\x12\x19\n\x15SEVERITY_NUMBER_FATAL\x10\x15\x12\x1a\n\x16SEVERITY_NUMBER_FATAL2\x10\x16\x12\x1a\n\x16SEVERITY_NUMBER_FATAL3\x10\x17\x12\x1a\n\x16SEVERITY_NUMBER_FATAL4\x10\x18*Y\n\x0eLogRecordFlags\x12\x1f\n\x1bLOG_RECORD_FLAGS_DO_NOT_USE\x10\x00\x12&\n!LOG_RECORD_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x42s\n\x1eio.opentelemetry.proto.logs.v1B\tLogsProtoP\x01Z&go.opentelemetry.io/proto/otlp/logs/v1\xaa\x02\x1bOpenTelemetry.Proto.Logs.V1b\x06proto3')
|
||||
|
||||
_globals = globals()
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.logs.v1.logs_pb2', _globals)
|
||||
if not _descriptor._USE_C_DESCRIPTORS:
|
||||
_globals['DESCRIPTOR']._loaded_options = None
|
||||
_globals['DESCRIPTOR']._serialized_options = b'\n\036io.opentelemetry.proto.logs.v1B\tLogsProtoP\001Z&go.opentelemetry.io/proto/otlp/logs/v1\252\002\033OpenTelemetry.Proto.Logs.V1'
|
||||
_globals['_SEVERITYNUMBER']._serialized_start=961
|
||||
_globals['_SEVERITYNUMBER']._serialized_end=1668
|
||||
_globals['_LOGRECORDFLAGS']._serialized_start=1670
|
||||
_globals['_LOGRECORDFLAGS']._serialized_end=1759
|
||||
_globals['_LOGSDATA']._serialized_start=163
|
||||
_globals['_LOGSDATA']._serialized_end=239
|
||||
_globals['_RESOURCELOGS']._serialized_start=242
|
||||
_globals['_RESOURCELOGS']._serialized_end=405
|
||||
_globals['_SCOPELOGS']._serialized_start=408
|
||||
_globals['_SCOPELOGS']._serialized_end=568
|
||||
_globals['_LOGRECORD']._serialized_start=571
|
||||
_globals['_LOGRECORD']._serialized_end=958
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
@@ -0,0 +1,365 @@
|
||||
"""
|
||||
@generated by mypy-protobuf. Do not edit manually!
|
||||
isort:skip_file
|
||||
Copyright 2020, OpenTelemetry Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import builtins
|
||||
import collections.abc
|
||||
import google.protobuf.descriptor
|
||||
import google.protobuf.internal.containers
|
||||
import google.protobuf.internal.enum_type_wrapper
|
||||
import google.protobuf.message
|
||||
import opentelemetry.proto.common.v1.common_pb2
|
||||
import opentelemetry.proto.resource.v1.resource_pb2
|
||||
import sys
|
||||
import typing
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
import typing as typing_extensions
|
||||
else:
|
||||
import typing_extensions
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
||||
|
||||
class _SeverityNumber:
|
||||
ValueType = typing.NewType("ValueType", builtins.int)
|
||||
V: typing_extensions.TypeAlias = ValueType
|
||||
|
||||
class _SeverityNumberEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SeverityNumber.ValueType], builtins.type):
|
||||
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
||||
SEVERITY_NUMBER_UNSPECIFIED: _SeverityNumber.ValueType # 0
|
||||
"""UNSPECIFIED is the default SeverityNumber, it MUST NOT be used."""
|
||||
SEVERITY_NUMBER_TRACE: _SeverityNumber.ValueType # 1
|
||||
SEVERITY_NUMBER_TRACE2: _SeverityNumber.ValueType # 2
|
||||
SEVERITY_NUMBER_TRACE3: _SeverityNumber.ValueType # 3
|
||||
SEVERITY_NUMBER_TRACE4: _SeverityNumber.ValueType # 4
|
||||
SEVERITY_NUMBER_DEBUG: _SeverityNumber.ValueType # 5
|
||||
SEVERITY_NUMBER_DEBUG2: _SeverityNumber.ValueType # 6
|
||||
SEVERITY_NUMBER_DEBUG3: _SeverityNumber.ValueType # 7
|
||||
SEVERITY_NUMBER_DEBUG4: _SeverityNumber.ValueType # 8
|
||||
SEVERITY_NUMBER_INFO: _SeverityNumber.ValueType # 9
|
||||
SEVERITY_NUMBER_INFO2: _SeverityNumber.ValueType # 10
|
||||
SEVERITY_NUMBER_INFO3: _SeverityNumber.ValueType # 11
|
||||
SEVERITY_NUMBER_INFO4: _SeverityNumber.ValueType # 12
|
||||
SEVERITY_NUMBER_WARN: _SeverityNumber.ValueType # 13
|
||||
SEVERITY_NUMBER_WARN2: _SeverityNumber.ValueType # 14
|
||||
SEVERITY_NUMBER_WARN3: _SeverityNumber.ValueType # 15
|
||||
SEVERITY_NUMBER_WARN4: _SeverityNumber.ValueType # 16
|
||||
SEVERITY_NUMBER_ERROR: _SeverityNumber.ValueType # 17
|
||||
SEVERITY_NUMBER_ERROR2: _SeverityNumber.ValueType # 18
|
||||
SEVERITY_NUMBER_ERROR3: _SeverityNumber.ValueType # 19
|
||||
SEVERITY_NUMBER_ERROR4: _SeverityNumber.ValueType # 20
|
||||
SEVERITY_NUMBER_FATAL: _SeverityNumber.ValueType # 21
|
||||
SEVERITY_NUMBER_FATAL2: _SeverityNumber.ValueType # 22
|
||||
SEVERITY_NUMBER_FATAL3: _SeverityNumber.ValueType # 23
|
||||
SEVERITY_NUMBER_FATAL4: _SeverityNumber.ValueType # 24
|
||||
|
||||
class SeverityNumber(_SeverityNumber, metaclass=_SeverityNumberEnumTypeWrapper):
|
||||
"""Possible values for LogRecord.SeverityNumber."""
|
||||
|
||||
SEVERITY_NUMBER_UNSPECIFIED: SeverityNumber.ValueType # 0
|
||||
"""UNSPECIFIED is the default SeverityNumber, it MUST NOT be used."""
|
||||
SEVERITY_NUMBER_TRACE: SeverityNumber.ValueType # 1
|
||||
SEVERITY_NUMBER_TRACE2: SeverityNumber.ValueType # 2
|
||||
SEVERITY_NUMBER_TRACE3: SeverityNumber.ValueType # 3
|
||||
SEVERITY_NUMBER_TRACE4: SeverityNumber.ValueType # 4
|
||||
SEVERITY_NUMBER_DEBUG: SeverityNumber.ValueType # 5
|
||||
SEVERITY_NUMBER_DEBUG2: SeverityNumber.ValueType # 6
|
||||
SEVERITY_NUMBER_DEBUG3: SeverityNumber.ValueType # 7
|
||||
SEVERITY_NUMBER_DEBUG4: SeverityNumber.ValueType # 8
|
||||
SEVERITY_NUMBER_INFO: SeverityNumber.ValueType # 9
|
||||
SEVERITY_NUMBER_INFO2: SeverityNumber.ValueType # 10
|
||||
SEVERITY_NUMBER_INFO3: SeverityNumber.ValueType # 11
|
||||
SEVERITY_NUMBER_INFO4: SeverityNumber.ValueType # 12
|
||||
SEVERITY_NUMBER_WARN: SeverityNumber.ValueType # 13
|
||||
SEVERITY_NUMBER_WARN2: SeverityNumber.ValueType # 14
|
||||
SEVERITY_NUMBER_WARN3: SeverityNumber.ValueType # 15
|
||||
SEVERITY_NUMBER_WARN4: SeverityNumber.ValueType # 16
|
||||
SEVERITY_NUMBER_ERROR: SeverityNumber.ValueType # 17
|
||||
SEVERITY_NUMBER_ERROR2: SeverityNumber.ValueType # 18
|
||||
SEVERITY_NUMBER_ERROR3: SeverityNumber.ValueType # 19
|
||||
SEVERITY_NUMBER_ERROR4: SeverityNumber.ValueType # 20
|
||||
SEVERITY_NUMBER_FATAL: SeverityNumber.ValueType # 21
|
||||
SEVERITY_NUMBER_FATAL2: SeverityNumber.ValueType # 22
|
||||
SEVERITY_NUMBER_FATAL3: SeverityNumber.ValueType # 23
|
||||
SEVERITY_NUMBER_FATAL4: SeverityNumber.ValueType # 24
|
||||
global___SeverityNumber = SeverityNumber
|
||||
|
||||
class _LogRecordFlags:
|
||||
ValueType = typing.NewType("ValueType", builtins.int)
|
||||
V: typing_extensions.TypeAlias = ValueType
|
||||
|
||||
class _LogRecordFlagsEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_LogRecordFlags.ValueType], builtins.type):
|
||||
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
||||
LOG_RECORD_FLAGS_DO_NOT_USE: _LogRecordFlags.ValueType # 0
|
||||
"""The zero value for the enum. Should not be used for comparisons.
|
||||
Instead use bitwise "and" with the appropriate mask as shown above.
|
||||
"""
|
||||
LOG_RECORD_FLAGS_TRACE_FLAGS_MASK: _LogRecordFlags.ValueType # 255
|
||||
"""Bits 0-7 are used for trace flags."""
|
||||
|
||||
class LogRecordFlags(_LogRecordFlags, metaclass=_LogRecordFlagsEnumTypeWrapper):
|
||||
"""LogRecordFlags represents constants used to interpret the
|
||||
LogRecord.flags field, which is protobuf 'fixed32' type and is to
|
||||
be used as bit-fields. Each non-zero value defined in this enum is
|
||||
a bit-mask. To extract the bit-field, for example, use an
|
||||
expression like:
|
||||
|
||||
(logRecord.flags & LOG_RECORD_FLAGS_TRACE_FLAGS_MASK)
|
||||
"""
|
||||
|
||||
LOG_RECORD_FLAGS_DO_NOT_USE: LogRecordFlags.ValueType # 0
|
||||
"""The zero value for the enum. Should not be used for comparisons.
|
||||
Instead use bitwise "and" with the appropriate mask as shown above.
|
||||
"""
|
||||
LOG_RECORD_FLAGS_TRACE_FLAGS_MASK: LogRecordFlags.ValueType # 255
|
||||
"""Bits 0-7 are used for trace flags."""
|
||||
global___LogRecordFlags = LogRecordFlags
|
||||
|
||||
@typing_extensions.final
|
||||
class LogsData(google.protobuf.message.Message):
|
||||
"""LogsData represents the logs data that can be stored in a persistent storage,
|
||||
OR can be embedded by other protocols that transfer OTLP logs data but do not
|
||||
implement the OTLP protocol.
|
||||
|
||||
The main difference between this message and collector protocol is that
|
||||
in this message there will not be any "control" or "metadata" specific to
|
||||
OTLP protocol.
|
||||
|
||||
When new fields are added into this message, the OTLP request MUST be updated
|
||||
as well.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
RESOURCE_LOGS_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def resource_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceLogs]:
|
||||
"""An array of ResourceLogs.
|
||||
For data coming from a single resource this array will typically contain
|
||||
one element. Intermediary nodes that receive data from multiple origins
|
||||
typically batch the data before forwarding further and in that case this
|
||||
array will contain multiple elements.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
resource_logs: collections.abc.Iterable[global___ResourceLogs] | None = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["resource_logs", b"resource_logs"]) -> None: ...
|
||||
|
||||
global___LogsData = LogsData
|
||||
|
||||
@typing_extensions.final
|
||||
class ResourceLogs(google.protobuf.message.Message):
|
||||
"""A collection of ScopeLogs from a Resource."""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
RESOURCE_FIELD_NUMBER: builtins.int
|
||||
SCOPE_LOGS_FIELD_NUMBER: builtins.int
|
||||
SCHEMA_URL_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource:
|
||||
"""The resource for the logs in this message.
|
||||
If this field is not set then resource info is unknown.
|
||||
"""
|
||||
@property
|
||||
def scope_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeLogs]:
|
||||
"""A list of ScopeLogs that originate from a resource."""
|
||||
schema_url: builtins.str
|
||||
"""The Schema URL, if known. This is the identifier of the Schema that the resource data
|
||||
is recorded in. Notably, the last part of the URL path is the version number of the
|
||||
schema: http[s]://server[:port]/path/<version>. To learn more about Schema URL see
|
||||
https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
|
||||
This schema_url applies to the data in the "resource" field. It does not apply
|
||||
to the data in the "scope_logs" field which have their own schema_url field.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
resource: opentelemetry.proto.resource.v1.resource_pb2.Resource | None = ...,
|
||||
scope_logs: collections.abc.Iterable[global___ScopeLogs] | None = ...,
|
||||
schema_url: builtins.str = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["resource", b"resource"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["resource", b"resource", "schema_url", b"schema_url", "scope_logs", b"scope_logs"]) -> None: ...
|
||||
|
||||
global___ResourceLogs = ResourceLogs
|
||||
|
||||
@typing_extensions.final
|
||||
class ScopeLogs(google.protobuf.message.Message):
|
||||
"""A collection of Logs produced by a Scope."""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
SCOPE_FIELD_NUMBER: builtins.int
|
||||
LOG_RECORDS_FIELD_NUMBER: builtins.int
|
||||
SCHEMA_URL_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def scope(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope:
|
||||
"""The instrumentation scope information for the logs in this message.
|
||||
Semantically when InstrumentationScope isn't set, it is equivalent with
|
||||
an empty instrumentation scope name (unknown).
|
||||
"""
|
||||
@property
|
||||
def log_records(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___LogRecord]:
|
||||
"""A list of log records."""
|
||||
schema_url: builtins.str
|
||||
"""The Schema URL, if known. This is the identifier of the Schema that the log data
|
||||
is recorded in. Notably, the last part of the URL path is the version number of the
|
||||
schema: http[s]://server[:port]/path/<version>. To learn more about Schema URL see
|
||||
https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
|
||||
This schema_url applies to all logs in the "logs" field.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
scope: opentelemetry.proto.common.v1.common_pb2.InstrumentationScope | None = ...,
|
||||
log_records: collections.abc.Iterable[global___LogRecord] | None = ...,
|
||||
schema_url: builtins.str = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["scope", b"scope"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["log_records", b"log_records", "schema_url", b"schema_url", "scope", b"scope"]) -> None: ...
|
||||
|
||||
global___ScopeLogs = ScopeLogs
|
||||
|
||||
@typing_extensions.final
|
||||
class LogRecord(google.protobuf.message.Message):
|
||||
"""A log record according to OpenTelemetry Log Data Model:
|
||||
https://github.com/open-telemetry/oteps/blob/main/text/logs/0097-log-data-model.md
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
TIME_UNIX_NANO_FIELD_NUMBER: builtins.int
|
||||
OBSERVED_TIME_UNIX_NANO_FIELD_NUMBER: builtins.int
|
||||
SEVERITY_NUMBER_FIELD_NUMBER: builtins.int
|
||||
SEVERITY_TEXT_FIELD_NUMBER: builtins.int
|
||||
BODY_FIELD_NUMBER: builtins.int
|
||||
ATTRIBUTES_FIELD_NUMBER: builtins.int
|
||||
DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int
|
||||
FLAGS_FIELD_NUMBER: builtins.int
|
||||
TRACE_ID_FIELD_NUMBER: builtins.int
|
||||
SPAN_ID_FIELD_NUMBER: builtins.int
|
||||
EVENT_NAME_FIELD_NUMBER: builtins.int
|
||||
time_unix_nano: builtins.int
|
||||
"""time_unix_nano is the time when the event occurred.
|
||||
Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
|
||||
Value of 0 indicates unknown or missing timestamp.
|
||||
"""
|
||||
observed_time_unix_nano: builtins.int
|
||||
"""Time when the event was observed by the collection system.
|
||||
For events that originate in OpenTelemetry (e.g. using OpenTelemetry Logging SDK)
|
||||
this timestamp is typically set at the generation time and is equal to Timestamp.
|
||||
For events originating externally and collected by OpenTelemetry (e.g. using
|
||||
Collector) this is the time when OpenTelemetry's code observed the event measured
|
||||
by the clock of the OpenTelemetry code. This field MUST be set once the event is
|
||||
observed by OpenTelemetry.
|
||||
|
||||
For converting OpenTelemetry log data to formats that support only one timestamp or
|
||||
when receiving OpenTelemetry log data by recipients that support only one timestamp
|
||||
internally the following logic is recommended:
|
||||
- Use time_unix_nano if it is present, otherwise use observed_time_unix_nano.
|
||||
|
||||
Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
|
||||
Value of 0 indicates unknown or missing timestamp.
|
||||
"""
|
||||
severity_number: global___SeverityNumber.ValueType
|
||||
"""Numerical value of the severity, normalized to values described in Log Data Model.
|
||||
[Optional].
|
||||
"""
|
||||
severity_text: builtins.str
|
||||
"""The severity text (also known as log level). The original string representation as
|
||||
it is known at the source. [Optional].
|
||||
"""
|
||||
@property
|
||||
def body(self) -> opentelemetry.proto.common.v1.common_pb2.AnyValue:
|
||||
"""A value containing the body of the log record. Can be for example a human-readable
|
||||
string message (including multi-line) describing the event in a free form or it can
|
||||
be a structured data composed of arrays and maps of other values. [Optional].
|
||||
"""
|
||||
@property
|
||||
def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
|
||||
"""Additional attributes that describe the specific event occurrence. [Optional].
|
||||
Attribute keys MUST be unique (it is not allowed to have more than one
|
||||
attribute with the same key).
|
||||
"""
|
||||
dropped_attributes_count: builtins.int
|
||||
flags: builtins.int
|
||||
"""Flags, a bit field. 8 least significant bits are the trace flags as
|
||||
defined in W3C Trace Context specification. 24 most significant bits are reserved
|
||||
and must be set to 0. Readers must not assume that 24 most significant bits
|
||||
will be zero and must correctly mask the bits when reading 8-bit trace flag (use
|
||||
flags & LOG_RECORD_FLAGS_TRACE_FLAGS_MASK). [Optional].
|
||||
"""
|
||||
trace_id: builtins.bytes
|
||||
"""A unique identifier for a trace. All logs from the same trace share
|
||||
the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes OR
|
||||
of length other than 16 bytes is considered invalid (empty string in OTLP/JSON
|
||||
is zero-length and thus is also invalid).
|
||||
|
||||
This field is optional.
|
||||
|
||||
The receivers SHOULD assume that the log record is not associated with a
|
||||
trace if any of the following is true:
|
||||
- the field is not present,
|
||||
- the field contains an invalid value.
|
||||
"""
|
||||
span_id: builtins.bytes
|
||||
"""A unique identifier for a span within a trace, assigned when the span
|
||||
is created. The ID is an 8-byte array. An ID with all zeroes OR of length
|
||||
other than 8 bytes is considered invalid (empty string in OTLP/JSON
|
||||
is zero-length and thus is also invalid).
|
||||
|
||||
This field is optional. If the sender specifies a valid span_id then it SHOULD also
|
||||
specify a valid trace_id.
|
||||
|
||||
The receivers SHOULD assume that the log record is not associated with a
|
||||
span if any of the following is true:
|
||||
- the field is not present,
|
||||
- the field contains an invalid value.
|
||||
"""
|
||||
event_name: builtins.str
|
||||
"""A unique identifier of event category/type.
|
||||
All events with the same event_name are expected to conform to the same
|
||||
schema for both their attributes and their body.
|
||||
|
||||
Recommended to be fully qualified and short (no longer than 256 characters).
|
||||
|
||||
Presence of event_name on the log record identifies this record
|
||||
as an event.
|
||||
|
||||
[Optional].
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
time_unix_nano: builtins.int = ...,
|
||||
observed_time_unix_nano: builtins.int = ...,
|
||||
severity_number: global___SeverityNumber.ValueType = ...,
|
||||
severity_text: builtins.str = ...,
|
||||
body: opentelemetry.proto.common.v1.common_pb2.AnyValue | None = ...,
|
||||
attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ...,
|
||||
dropped_attributes_count: builtins.int = ...,
|
||||
flags: builtins.int = ...,
|
||||
trace_id: builtins.bytes = ...,
|
||||
span_id: builtins.bytes = ...,
|
||||
event_name: builtins.str = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["body", b"body"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "body", b"body", "dropped_attributes_count", b"dropped_attributes_count", "event_name", b"event_name", "flags", b"flags", "observed_time_unix_nano", b"observed_time_unix_nano", "severity_number", b"severity_number", "severity_text", b"severity_text", "span_id", b"span_id", "time_unix_nano", b"time_unix_nano", "trace_id", b"trace_id"]) -> None: ...
|
||||
|
||||
global___LogRecord = LogRecord
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,55 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: opentelemetry/proto/profiles/v1development/profiles.proto
|
||||
# Protobuf Python Version: 5.26.1
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf.internal import builder as _builder
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2
|
||||
from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n9opentelemetry/proto/profiles/v1development/profiles.proto\x12*opentelemetry.proto.profiles.v1development\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"\xee\x03\n\x12ProfilesDictionary\x12J\n\rmapping_table\x18\x01 \x03(\x0b\x32\x33.opentelemetry.proto.profiles.v1development.Mapping\x12L\n\x0elocation_table\x18\x02 \x03(\x0b\x32\x34.opentelemetry.proto.profiles.v1development.Location\x12L\n\x0e\x66unction_table\x18\x03 \x03(\x0b\x32\x34.opentelemetry.proto.profiles.v1development.Function\x12\x44\n\nlink_table\x18\x04 \x03(\x0b\x32\x30.opentelemetry.proto.profiles.v1development.Link\x12\x14\n\x0cstring_table\x18\x05 \x03(\t\x12@\n\x0f\x61ttribute_table\x18\x06 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12R\n\x0f\x61ttribute_units\x18\x07 \x03(\x0b\x32\x39.opentelemetry.proto.profiles.v1development.AttributeUnit\"\xbb\x01\n\x0cProfilesData\x12W\n\x11resource_profiles\x18\x01 \x03(\x0b\x32<.opentelemetry.proto.profiles.v1development.ResourceProfiles\x12R\n\ndictionary\x18\x02 \x01(\x0b\x32>.opentelemetry.proto.profiles.v1development.ProfilesDictionary\"\xbe\x01\n\x10ResourceProfiles\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12Q\n\x0escope_profiles\x18\x02 \x03(\x0b\x32\x39.opentelemetry.proto.profiles.v1development.ScopeProfiles\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\xae\x01\n\rScopeProfiles\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x45\n\x08profiles\x18\x02 \x03(\x0b\x32\x33.opentelemetry.proto.profiles.v1development.Profile\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\x86\x04\n\x07Profile\x12J\n\x0bsample_type\x18\x01 \x03(\x0b\x32\x35.opentelemetry.proto.profiles.v1development.ValueType\x12\x42\n\x06sample\x18\x02 \x03(\x0b\x32\x32.opentelemetry.proto.profiles.v1development.Sample\x12\x18\n\x10location_indices\x18\x03 \x03(\x05\x12\x12\n\ntime_nanos\x18\x04 \x01(\x03\x12\x16\n\x0e\x64uration_nanos\x18\x05 \x01(\x03\x12J\n\x0bperiod_type\x18\x06 \x01(\x0b\x32\x35.opentelemetry.proto.profiles.v1development.ValueType\x12\x0e\n\x06period\x18\x07 \x01(\x03\x12\x1a\n\x12\x63omment_strindices\x18\x08 \x03(\x05\x12!\n\x19\x64\x65\x66\x61ult_sample_type_index\x18\t \x01(\x05\x12\x12\n\nprofile_id\x18\n \x01(\x0c\x12 \n\x18\x64ropped_attributes_count\x18\x0b \x01(\r\x12\x1f\n\x17original_payload_format\x18\x0c \x01(\t\x12\x18\n\x10original_payload\x18\r \x01(\x0c\x12\x19\n\x11\x61ttribute_indices\x18\x0e \x03(\x05\"F\n\rAttributeUnit\x12\x1e\n\x16\x61ttribute_key_strindex\x18\x01 \x01(\x05\x12\x15\n\runit_strindex\x18\x02 \x01(\x05\")\n\x04Link\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\"\x9e\x01\n\tValueType\x12\x15\n\rtype_strindex\x18\x01 \x01(\x05\x12\x15\n\runit_strindex\x18\x02 \x01(\x05\x12\x63\n\x17\x61ggregation_temporality\x18\x03 \x01(\x0e\x32\x42.opentelemetry.proto.profiles.v1development.AggregationTemporality\"\xb1\x01\n\x06Sample\x12\x1d\n\x15locations_start_index\x18\x01 \x01(\x05\x12\x18\n\x10locations_length\x18\x02 \x01(\x05\x12\r\n\x05value\x18\x03 \x03(\x03\x12\x19\n\x11\x61ttribute_indices\x18\x04 \x03(\x05\x12\x17\n\nlink_index\x18\x05 \x01(\x05H\x00\x88\x01\x01\x12\x1c\n\x14timestamps_unix_nano\x18\x06 \x03(\x04\x42\r\n\x0b_link_index\"\xe3\x01\n\x07Mapping\x12\x14\n\x0cmemory_start\x18\x01 \x01(\x04\x12\x14\n\x0cmemory_limit\x18\x02 \x01(\x04\x12\x13\n\x0b\x66ile_offset\x18\x03 \x01(\x04\x12\x19\n\x11\x66ilename_strindex\x18\x04 \x01(\x05\x12\x19\n\x11\x61ttribute_indices\x18\x05 \x03(\x05\x12\x15\n\rhas_functions\x18\x06 \x01(\x08\x12\x15\n\rhas_filenames\x18\x07 \x01(\x08\x12\x18\n\x10has_line_numbers\x18\x08 \x01(\x08\x12\x19\n\x11has_inline_frames\x18\t \x01(\x08\"\xb7\x01\n\x08Location\x12\x1a\n\rmapping_index\x18\x01 \x01(\x05H\x00\x88\x01\x01\x12\x0f\n\x07\x61\x64\x64ress\x18\x02 \x01(\x04\x12>\n\x04line\x18\x03 \x03(\x0b\x32\x30.opentelemetry.proto.profiles.v1development.Line\x12\x11\n\tis_folded\x18\x04 \x01(\x08\x12\x19\n\x11\x61ttribute_indices\x18\x05 \x03(\x05\x42\x10\n\x0e_mapping_index\"<\n\x04Line\x12\x16\n\x0e\x66unction_index\x18\x01 \x01(\x05\x12\x0c\n\x04line\x18\x02 \x01(\x03\x12\x0e\n\x06\x63olumn\x18\x03 \x01(\x03\"n\n\x08\x46unction\x12\x15\n\rname_strindex\x18\x01 \x01(\x05\x12\x1c\n\x14system_name_strindex\x18\x02 \x01(\x05\x12\x19\n\x11\x66ilename_strindex\x18\x03 \x01(\x05\x12\x12\n\nstart_line\x18\x04 \x01(\x03*\x8c\x01\n\x16\x41ggregationTemporality\x12\'\n#AGGREGATION_TEMPORALITY_UNSPECIFIED\x10\x00\x12!\n\x1d\x41GGREGATION_TEMPORALITY_DELTA\x10\x01\x12&\n\"AGGREGATION_TEMPORALITY_CUMULATIVE\x10\x02\x42\xa4\x01\n-io.opentelemetry.proto.profiles.v1developmentB\rProfilesProtoP\x01Z5go.opentelemetry.io/proto/otlp/profiles/v1development\xaa\x02*OpenTelemetry.Proto.Profiles.V1Developmentb\x06proto3')
|
||||
|
||||
_globals = globals()
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.profiles.v1development.profiles_pb2', _globals)
|
||||
if not _descriptor._USE_C_DESCRIPTORS:
|
||||
_globals['DESCRIPTOR']._loaded_options = None
|
||||
_globals['DESCRIPTOR']._serialized_options = b'\n-io.opentelemetry.proto.profiles.v1developmentB\rProfilesProtoP\001Z5go.opentelemetry.io/proto/otlp/profiles/v1development\252\002*OpenTelemetry.Proto.Profiles.V1Development'
|
||||
_globals['_AGGREGATIONTEMPORALITY']._serialized_start=2822
|
||||
_globals['_AGGREGATIONTEMPORALITY']._serialized_end=2962
|
||||
_globals['_PROFILESDICTIONARY']._serialized_start=198
|
||||
_globals['_PROFILESDICTIONARY']._serialized_end=692
|
||||
_globals['_PROFILESDATA']._serialized_start=695
|
||||
_globals['_PROFILESDATA']._serialized_end=882
|
||||
_globals['_RESOURCEPROFILES']._serialized_start=885
|
||||
_globals['_RESOURCEPROFILES']._serialized_end=1075
|
||||
_globals['_SCOPEPROFILES']._serialized_start=1078
|
||||
_globals['_SCOPEPROFILES']._serialized_end=1252
|
||||
_globals['_PROFILE']._serialized_start=1255
|
||||
_globals['_PROFILE']._serialized_end=1773
|
||||
_globals['_ATTRIBUTEUNIT']._serialized_start=1775
|
||||
_globals['_ATTRIBUTEUNIT']._serialized_end=1845
|
||||
_globals['_LINK']._serialized_start=1847
|
||||
_globals['_LINK']._serialized_end=1888
|
||||
_globals['_VALUETYPE']._serialized_start=1891
|
||||
_globals['_VALUETYPE']._serialized_end=2049
|
||||
_globals['_SAMPLE']._serialized_start=2052
|
||||
_globals['_SAMPLE']._serialized_end=2229
|
||||
_globals['_MAPPING']._serialized_start=2232
|
||||
_globals['_MAPPING']._serialized_end=2459
|
||||
_globals['_LOCATION']._serialized_start=2462
|
||||
_globals['_LOCATION']._serialized_end=2645
|
||||
_globals['_LINE']._serialized_start=2647
|
||||
_globals['_LINE']._serialized_end=2707
|
||||
_globals['_FUNCTION']._serialized_start=2709
|
||||
_globals['_FUNCTION']._serialized_end=2819
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
@@ -0,0 +1,865 @@
|
||||
"""
|
||||
@generated by mypy-protobuf. Do not edit manually!
|
||||
isort:skip_file
|
||||
Copyright 2023, OpenTelemetry Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
This file includes work covered by the following copyright and permission notices:
|
||||
|
||||
Copyright 2016 Google Inc. All Rights Reserved.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import builtins
|
||||
import collections.abc
|
||||
import google.protobuf.descriptor
|
||||
import google.protobuf.internal.containers
|
||||
import google.protobuf.internal.enum_type_wrapper
|
||||
import google.protobuf.message
|
||||
import opentelemetry.proto.common.v1.common_pb2
|
||||
import opentelemetry.proto.resource.v1.resource_pb2
|
||||
import sys
|
||||
import typing
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
import typing as typing_extensions
|
||||
else:
|
||||
import typing_extensions
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
||||
|
||||
class _AggregationTemporality:
|
||||
ValueType = typing.NewType("ValueType", builtins.int)
|
||||
V: typing_extensions.TypeAlias = ValueType
|
||||
|
||||
class _AggregationTemporalityEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_AggregationTemporality.ValueType], builtins.type):
|
||||
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
||||
AGGREGATION_TEMPORALITY_UNSPECIFIED: _AggregationTemporality.ValueType # 0
|
||||
"""UNSPECIFIED is the default AggregationTemporality, it MUST not be used."""
|
||||
AGGREGATION_TEMPORALITY_DELTA: _AggregationTemporality.ValueType # 1
|
||||
"""* DELTA is an AggregationTemporality for a profiler which reports
|
||||
changes since last report time. Successive metrics contain aggregation of
|
||||
values from continuous and non-overlapping intervals.
|
||||
|
||||
The values for a DELTA metric are based only on the time interval
|
||||
associated with one measurement cycle. There is no dependency on
|
||||
previous measurements like is the case for CUMULATIVE metrics.
|
||||
|
||||
For example, consider a system measuring the number of requests that
|
||||
it receives and reports the sum of these requests every second as a
|
||||
DELTA metric:
|
||||
|
||||
1. The system starts receiving at time=t_0.
|
||||
2. A request is received, the system measures 1 request.
|
||||
3. A request is received, the system measures 1 request.
|
||||
4. A request is received, the system measures 1 request.
|
||||
5. The 1 second collection cycle ends. A metric is exported for the
|
||||
number of requests received over the interval of time t_0 to
|
||||
t_0+1 with a value of 3.
|
||||
6. A request is received, the system measures 1 request.
|
||||
7. A request is received, the system measures 1 request.
|
||||
8. The 1 second collection cycle ends. A metric is exported for the
|
||||
number of requests received over the interval of time t_0+1 to
|
||||
t_0+2 with a value of 2.
|
||||
"""
|
||||
AGGREGATION_TEMPORALITY_CUMULATIVE: _AggregationTemporality.ValueType # 2
|
||||
"""* CUMULATIVE is an AggregationTemporality for a profiler which
|
||||
reports changes since a fixed start time. This means that current values
|
||||
of a CUMULATIVE metric depend on all previous measurements since the
|
||||
start time. Because of this, the sender is required to retain this state
|
||||
in some form. If this state is lost or invalidated, the CUMULATIVE metric
|
||||
values MUST be reset and a new fixed start time following the last
|
||||
reported measurement time sent MUST be used.
|
||||
|
||||
For example, consider a system measuring the number of requests that
|
||||
it receives and reports the sum of these requests every second as a
|
||||
CUMULATIVE metric:
|
||||
|
||||
1. The system starts receiving at time=t_0.
|
||||
2. A request is received, the system measures 1 request.
|
||||
3. A request is received, the system measures 1 request.
|
||||
4. A request is received, the system measures 1 request.
|
||||
5. The 1 second collection cycle ends. A metric is exported for the
|
||||
number of requests received over the interval of time t_0 to
|
||||
t_0+1 with a value of 3.
|
||||
6. A request is received, the system measures 1 request.
|
||||
7. A request is received, the system measures 1 request.
|
||||
8. The 1 second collection cycle ends. A metric is exported for the
|
||||
number of requests received over the interval of time t_0 to
|
||||
t_0+2 with a value of 5.
|
||||
9. The system experiences a fault and loses state.
|
||||
10. The system recovers and resumes receiving at time=t_1.
|
||||
11. A request is received, the system measures 1 request.
|
||||
12. The 1 second collection cycle ends. A metric is exported for the
|
||||
number of requests received over the interval of time t_1 to
|
||||
t_1+1 with a value of 1.
|
||||
|
||||
Note: Even though, when reporting changes since last report time, using
|
||||
CUMULATIVE is valid, it is not recommended.
|
||||
"""
|
||||
|
||||
class AggregationTemporality(_AggregationTemporality, metaclass=_AggregationTemporalityEnumTypeWrapper):
|
||||
"""Specifies the method of aggregating metric values, either DELTA (change since last report)
|
||||
or CUMULATIVE (total since a fixed start time).
|
||||
"""
|
||||
|
||||
AGGREGATION_TEMPORALITY_UNSPECIFIED: AggregationTemporality.ValueType # 0
|
||||
"""UNSPECIFIED is the default AggregationTemporality, it MUST not be used."""
|
||||
AGGREGATION_TEMPORALITY_DELTA: AggregationTemporality.ValueType # 1
|
||||
"""* DELTA is an AggregationTemporality for a profiler which reports
|
||||
changes since last report time. Successive metrics contain aggregation of
|
||||
values from continuous and non-overlapping intervals.
|
||||
|
||||
The values for a DELTA metric are based only on the time interval
|
||||
associated with one measurement cycle. There is no dependency on
|
||||
previous measurements like is the case for CUMULATIVE metrics.
|
||||
|
||||
For example, consider a system measuring the number of requests that
|
||||
it receives and reports the sum of these requests every second as a
|
||||
DELTA metric:
|
||||
|
||||
1. The system starts receiving at time=t_0.
|
||||
2. A request is received, the system measures 1 request.
|
||||
3. A request is received, the system measures 1 request.
|
||||
4. A request is received, the system measures 1 request.
|
||||
5. The 1 second collection cycle ends. A metric is exported for the
|
||||
number of requests received over the interval of time t_0 to
|
||||
t_0+1 with a value of 3.
|
||||
6. A request is received, the system measures 1 request.
|
||||
7. A request is received, the system measures 1 request.
|
||||
8. The 1 second collection cycle ends. A metric is exported for the
|
||||
number of requests received over the interval of time t_0+1 to
|
||||
t_0+2 with a value of 2.
|
||||
"""
|
||||
AGGREGATION_TEMPORALITY_CUMULATIVE: AggregationTemporality.ValueType # 2
|
||||
"""* CUMULATIVE is an AggregationTemporality for a profiler which
|
||||
reports changes since a fixed start time. This means that current values
|
||||
of a CUMULATIVE metric depend on all previous measurements since the
|
||||
start time. Because of this, the sender is required to retain this state
|
||||
in some form. If this state is lost or invalidated, the CUMULATIVE metric
|
||||
values MUST be reset and a new fixed start time following the last
|
||||
reported measurement time sent MUST be used.
|
||||
|
||||
For example, consider a system measuring the number of requests that
|
||||
it receives and reports the sum of these requests every second as a
|
||||
CUMULATIVE metric:
|
||||
|
||||
1. The system starts receiving at time=t_0.
|
||||
2. A request is received, the system measures 1 request.
|
||||
3. A request is received, the system measures 1 request.
|
||||
4. A request is received, the system measures 1 request.
|
||||
5. The 1 second collection cycle ends. A metric is exported for the
|
||||
number of requests received over the interval of time t_0 to
|
||||
t_0+1 with a value of 3.
|
||||
6. A request is received, the system measures 1 request.
|
||||
7. A request is received, the system measures 1 request.
|
||||
8. The 1 second collection cycle ends. A metric is exported for the
|
||||
number of requests received over the interval of time t_0 to
|
||||
t_0+2 with a value of 5.
|
||||
9. The system experiences a fault and loses state.
|
||||
10. The system recovers and resumes receiving at time=t_1.
|
||||
11. A request is received, the system measures 1 request.
|
||||
12. The 1 second collection cycle ends. A metric is exported for the
|
||||
number of requests received over the interval of time t_1 to
|
||||
t_1+1 with a value of 1.
|
||||
|
||||
Note: Even though, when reporting changes since last report time, using
|
||||
CUMULATIVE is valid, it is not recommended.
|
||||
"""
|
||||
global___AggregationTemporality = AggregationTemporality
|
||||
|
||||
@typing_extensions.final
|
||||
class ProfilesDictionary(google.protobuf.message.Message):
|
||||
""" Relationships Diagram
|
||||
|
||||
┌──────────────────┐ LEGEND
|
||||
│ ProfilesData │ ─────┐
|
||||
└──────────────────┘ │ ─────▶ embedded
|
||||
│ │
|
||||
│ 1-n │ ─────▷ referenced by index
|
||||
▼ ▼
|
||||
┌──────────────────┐ ┌────────────────────┐
|
||||
│ ResourceProfiles │ │ ProfilesDictionary │
|
||||
└──────────────────┘ └────────────────────┘
|
||||
│
|
||||
│ 1-n
|
||||
▼
|
||||
┌──────────────────┐
|
||||
│ ScopeProfiles │
|
||||
└──────────────────┘
|
||||
│
|
||||
│ 1-1
|
||||
▼
|
||||
┌──────────────────┐
|
||||
│ Profile │
|
||||
└──────────────────┘
|
||||
│ n-1
|
||||
│ 1-n ┌───────────────────────────────────────┐
|
||||
▼ │ ▽
|
||||
┌──────────────────┐ 1-n ┌──────────────┐ ┌──────────┐
|
||||
│ Sample │ ──────▷ │ KeyValue │ │ Link │
|
||||
└──────────────────┘ └──────────────┘ └──────────┘
|
||||
│ 1-n △ △
|
||||
│ 1-n ┌─────────────────┘ │ 1-n
|
||||
▽ │ │
|
||||
┌──────────────────┐ n-1 ┌──────────────┐
|
||||
│ Location │ ──────▷ │ Mapping │
|
||||
└──────────────────┘ └──────────────┘
|
||||
│
|
||||
│ 1-n
|
||||
▼
|
||||
┌──────────────────┐
|
||||
│ Line │
|
||||
└──────────────────┘
|
||||
│
|
||||
│ 1-1
|
||||
▽
|
||||
┌──────────────────┐
|
||||
│ Function │
|
||||
└──────────────────┘
|
||||
|
||||
ProfilesDictionary represents the profiles data shared across the
|
||||
entire message being sent.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
MAPPING_TABLE_FIELD_NUMBER: builtins.int
|
||||
LOCATION_TABLE_FIELD_NUMBER: builtins.int
|
||||
FUNCTION_TABLE_FIELD_NUMBER: builtins.int
|
||||
LINK_TABLE_FIELD_NUMBER: builtins.int
|
||||
STRING_TABLE_FIELD_NUMBER: builtins.int
|
||||
ATTRIBUTE_TABLE_FIELD_NUMBER: builtins.int
|
||||
ATTRIBUTE_UNITS_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def mapping_table(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Mapping]:
|
||||
"""Mappings from address ranges to the image/binary/library mapped
|
||||
into that address range referenced by locations via Location.mapping_index.
|
||||
"""
|
||||
@property
|
||||
def location_table(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Location]:
|
||||
"""Locations referenced by samples via Profile.location_indices."""
|
||||
@property
|
||||
def function_table(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Function]:
|
||||
"""Functions referenced by locations via Line.function_index."""
|
||||
@property
|
||||
def link_table(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Link]:
|
||||
"""Links referenced by samples via Sample.link_index."""
|
||||
@property
|
||||
def string_table(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
|
||||
"""A common table for strings referenced by various messages.
|
||||
string_table[0] must always be "".
|
||||
"""
|
||||
@property
|
||||
def attribute_table(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
|
||||
"""A common table for attributes referenced by various messages."""
|
||||
@property
|
||||
def attribute_units(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AttributeUnit]:
|
||||
"""Represents a mapping between Attribute Keys and Units."""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
mapping_table: collections.abc.Iterable[global___Mapping] | None = ...,
|
||||
location_table: collections.abc.Iterable[global___Location] | None = ...,
|
||||
function_table: collections.abc.Iterable[global___Function] | None = ...,
|
||||
link_table: collections.abc.Iterable[global___Link] | None = ...,
|
||||
string_table: collections.abc.Iterable[builtins.str] | None = ...,
|
||||
attribute_table: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ...,
|
||||
attribute_units: collections.abc.Iterable[global___AttributeUnit] | None = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["attribute_table", b"attribute_table", "attribute_units", b"attribute_units", "function_table", b"function_table", "link_table", b"link_table", "location_table", b"location_table", "mapping_table", b"mapping_table", "string_table", b"string_table"]) -> None: ...
|
||||
|
||||
global___ProfilesDictionary = ProfilesDictionary
|
||||
|
||||
@typing_extensions.final
|
||||
class ProfilesData(google.protobuf.message.Message):
|
||||
"""ProfilesData represents the profiles data that can be stored in persistent storage,
|
||||
OR can be embedded by other protocols that transfer OTLP profiles data but do not
|
||||
implement the OTLP protocol.
|
||||
|
||||
The main difference between this message and collector protocol is that
|
||||
in this message there will not be any "control" or "metadata" specific to
|
||||
OTLP protocol.
|
||||
|
||||
When new fields are added into this message, the OTLP request MUST be updated
|
||||
as well.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
RESOURCE_PROFILES_FIELD_NUMBER: builtins.int
|
||||
DICTIONARY_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def resource_profiles(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceProfiles]:
|
||||
"""An array of ResourceProfiles.
|
||||
For data coming from an SDK profiler, this array will typically contain one
|
||||
element. Host-level profilers will usually create one ResourceProfile per
|
||||
container, as well as one additional ResourceProfile grouping all samples
|
||||
from non-containerized processes.
|
||||
Other resource groupings are possible as well and clarified via
|
||||
Resource.attributes and semantic conventions.
|
||||
"""
|
||||
@property
|
||||
def dictionary(self) -> global___ProfilesDictionary:
|
||||
"""One instance of ProfilesDictionary"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
resource_profiles: collections.abc.Iterable[global___ResourceProfiles] | None = ...,
|
||||
dictionary: global___ProfilesDictionary | None = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["dictionary", b"dictionary"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["dictionary", b"dictionary", "resource_profiles", b"resource_profiles"]) -> None: ...
|
||||
|
||||
global___ProfilesData = ProfilesData
|
||||
|
||||
@typing_extensions.final
|
||||
class ResourceProfiles(google.protobuf.message.Message):
|
||||
"""A collection of ScopeProfiles from a Resource."""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
RESOURCE_FIELD_NUMBER: builtins.int
|
||||
SCOPE_PROFILES_FIELD_NUMBER: builtins.int
|
||||
SCHEMA_URL_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource:
|
||||
"""The resource for the profiles in this message.
|
||||
If this field is not set then no resource info is known.
|
||||
"""
|
||||
@property
|
||||
def scope_profiles(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeProfiles]:
|
||||
"""A list of ScopeProfiles that originate from a resource."""
|
||||
schema_url: builtins.str
|
||||
"""The Schema URL, if known. This is the identifier of the Schema that the resource data
|
||||
is recorded in. Notably, the last part of the URL path is the version number of the
|
||||
schema: http[s]://server[:port]/path/<version>. To learn more about Schema URL see
|
||||
https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
|
||||
This schema_url applies to the data in the "resource" field. It does not apply
|
||||
to the data in the "scope_profiles" field which have their own schema_url field.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
resource: opentelemetry.proto.resource.v1.resource_pb2.Resource | None = ...,
|
||||
scope_profiles: collections.abc.Iterable[global___ScopeProfiles] | None = ...,
|
||||
schema_url: builtins.str = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["resource", b"resource"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["resource", b"resource", "schema_url", b"schema_url", "scope_profiles", b"scope_profiles"]) -> None: ...
|
||||
|
||||
global___ResourceProfiles = ResourceProfiles
|
||||
|
||||
@typing_extensions.final
|
||||
class ScopeProfiles(google.protobuf.message.Message):
|
||||
"""A collection of Profiles produced by an InstrumentationScope."""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
SCOPE_FIELD_NUMBER: builtins.int
|
||||
PROFILES_FIELD_NUMBER: builtins.int
|
||||
SCHEMA_URL_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def scope(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope:
|
||||
"""The instrumentation scope information for the profiles in this message.
|
||||
Semantically when InstrumentationScope isn't set, it is equivalent with
|
||||
an empty instrumentation scope name (unknown).
|
||||
"""
|
||||
@property
|
||||
def profiles(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Profile]:
|
||||
"""A list of Profiles that originate from an instrumentation scope."""
|
||||
schema_url: builtins.str
|
||||
"""The Schema URL, if known. This is the identifier of the Schema that the profile data
|
||||
is recorded in. Notably, the last part of the URL path is the version number of the
|
||||
schema: http[s]://server[:port]/path/<version>. To learn more about Schema URL see
|
||||
https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
|
||||
This schema_url applies to all profiles in the "profiles" field.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
scope: opentelemetry.proto.common.v1.common_pb2.InstrumentationScope | None = ...,
|
||||
profiles: collections.abc.Iterable[global___Profile] | None = ...,
|
||||
schema_url: builtins.str = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["scope", b"scope"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["profiles", b"profiles", "schema_url", b"schema_url", "scope", b"scope"]) -> None: ...
|
||||
|
||||
global___ScopeProfiles = ScopeProfiles
|
||||
|
||||
@typing_extensions.final
|
||||
class Profile(google.protobuf.message.Message):
|
||||
"""Profile is a common stacktrace profile format.
|
||||
|
||||
Measurements represented with this format should follow the
|
||||
following conventions:
|
||||
|
||||
- Consumers should treat unset optional fields as if they had been
|
||||
set with their default value.
|
||||
|
||||
- When possible, measurements should be stored in "unsampled" form
|
||||
that is most useful to humans. There should be enough
|
||||
information present to determine the original sampled values.
|
||||
|
||||
- On-disk, the serialized proto must be gzip-compressed.
|
||||
|
||||
- The profile is represented as a set of samples, where each sample
|
||||
references a sequence of locations, and where each location belongs
|
||||
to a mapping.
|
||||
- There is a N->1 relationship from sample.location_id entries to
|
||||
locations. For every sample.location_id entry there must be a
|
||||
unique Location with that index.
|
||||
- There is an optional N->1 relationship from locations to
|
||||
mappings. For every nonzero Location.mapping_id there must be a
|
||||
unique Mapping with that index.
|
||||
|
||||
Represents a complete profile, including sample types, samples,
|
||||
mappings to binaries, locations, functions, string table, and additional metadata.
|
||||
It modifies and annotates pprof Profile with OpenTelemetry specific fields.
|
||||
|
||||
Note that whilst fields in this message retain the name and field id from pprof in most cases
|
||||
for ease of understanding data migration, it is not intended that pprof:Profile and
|
||||
OpenTelemetry:Profile encoding be wire compatible.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
SAMPLE_TYPE_FIELD_NUMBER: builtins.int
|
||||
SAMPLE_FIELD_NUMBER: builtins.int
|
||||
LOCATION_INDICES_FIELD_NUMBER: builtins.int
|
||||
TIME_NANOS_FIELD_NUMBER: builtins.int
|
||||
DURATION_NANOS_FIELD_NUMBER: builtins.int
|
||||
PERIOD_TYPE_FIELD_NUMBER: builtins.int
|
||||
PERIOD_FIELD_NUMBER: builtins.int
|
||||
COMMENT_STRINDICES_FIELD_NUMBER: builtins.int
|
||||
DEFAULT_SAMPLE_TYPE_INDEX_FIELD_NUMBER: builtins.int
|
||||
PROFILE_ID_FIELD_NUMBER: builtins.int
|
||||
DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int
|
||||
ORIGINAL_PAYLOAD_FORMAT_FIELD_NUMBER: builtins.int
|
||||
ORIGINAL_PAYLOAD_FIELD_NUMBER: builtins.int
|
||||
ATTRIBUTE_INDICES_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def sample_type(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ValueType]:
|
||||
"""A description of the samples associated with each Sample.value.
|
||||
For a cpu profile this might be:
|
||||
[["cpu","nanoseconds"]] or [["wall","seconds"]] or [["syscall","count"]]
|
||||
For a heap profile, this might be:
|
||||
[["allocations","count"], ["space","bytes"]],
|
||||
If one of the values represents the number of events represented
|
||||
by the sample, by convention it should be at index 0 and use
|
||||
sample_type.unit == "count".
|
||||
"""
|
||||
@property
|
||||
def sample(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Sample]:
|
||||
"""The set of samples recorded in this profile."""
|
||||
@property
|
||||
def location_indices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
|
||||
"""References to locations in ProfilesDictionary.location_table."""
|
||||
time_nanos: builtins.int
|
||||
"""The following fields 4-14 are informational, do not affect
|
||||
interpretation of results.
|
||||
|
||||
Time of collection (UTC) represented as nanoseconds past the epoch.
|
||||
"""
|
||||
duration_nanos: builtins.int
|
||||
"""Duration of the profile, if a duration makes sense."""
|
||||
@property
|
||||
def period_type(self) -> global___ValueType:
|
||||
"""The kind of events between sampled occurrences.
|
||||
e.g [ "cpu","cycles" ] or [ "heap","bytes" ]
|
||||
"""
|
||||
period: builtins.int
|
||||
"""The number of events between sampled occurrences."""
|
||||
@property
|
||||
def comment_strindices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
|
||||
"""Free-form text associated with the profile. The text is displayed as is
|
||||
to the user by the tools that read profiles (e.g. by pprof). This field
|
||||
should not be used to store any machine-readable information, it is only
|
||||
for human-friendly content. The profile must stay functional if this field
|
||||
is cleaned.
|
||||
Indices into ProfilesDictionary.string_table.
|
||||
"""
|
||||
default_sample_type_index: builtins.int
|
||||
"""Index into the sample_type array to the default sample type."""
|
||||
profile_id: builtins.bytes
|
||||
"""A globally unique identifier for a profile. The ID is a 16-byte array. An ID with
|
||||
all zeroes is considered invalid.
|
||||
|
||||
This field is required.
|
||||
"""
|
||||
dropped_attributes_count: builtins.int
|
||||
"""dropped_attributes_count is the number of attributes that were discarded. Attributes
|
||||
can be discarded because their keys are too long or because there are too many
|
||||
attributes. If this value is 0, then no attributes were dropped.
|
||||
"""
|
||||
original_payload_format: builtins.str
|
||||
"""Specifies format of the original payload. Common values are defined in semantic conventions. [required if original_payload is present]"""
|
||||
original_payload: builtins.bytes
|
||||
"""Original payload can be stored in this field. This can be useful for users who want to get the original payload.
|
||||
Formats such as JFR are highly extensible and can contain more information than what is defined in this spec.
|
||||
Inclusion of original payload should be configurable by the user. Default behavior should be to not include the original payload.
|
||||
If the original payload is in pprof format, it SHOULD not be included in this field.
|
||||
The field is optional, however if it is present then equivalent converted data should be populated in other fields
|
||||
of this message as far as is practicable.
|
||||
"""
|
||||
@property
|
||||
def attribute_indices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
|
||||
"""References to attributes in attribute_table. [optional]
|
||||
It is a collection of key/value pairs. Note, global attributes
|
||||
like server name can be set using the resource API. Examples of attributes:
|
||||
|
||||
"/http/user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"
|
||||
"/http/server_latency": 300
|
||||
"abc.com/myattribute": true
|
||||
"abc.com/score": 10.239
|
||||
|
||||
The OpenTelemetry API specification further restricts the allowed value types:
|
||||
https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/common/README.md#attribute
|
||||
Attribute keys MUST be unique (it is not allowed to have more than one
|
||||
attribute with the same key).
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
sample_type: collections.abc.Iterable[global___ValueType] | None = ...,
|
||||
sample: collections.abc.Iterable[global___Sample] | None = ...,
|
||||
location_indices: collections.abc.Iterable[builtins.int] | None = ...,
|
||||
time_nanos: builtins.int = ...,
|
||||
duration_nanos: builtins.int = ...,
|
||||
period_type: global___ValueType | None = ...,
|
||||
period: builtins.int = ...,
|
||||
comment_strindices: collections.abc.Iterable[builtins.int] | None = ...,
|
||||
default_sample_type_index: builtins.int = ...,
|
||||
profile_id: builtins.bytes = ...,
|
||||
dropped_attributes_count: builtins.int = ...,
|
||||
original_payload_format: builtins.str = ...,
|
||||
original_payload: builtins.bytes = ...,
|
||||
attribute_indices: collections.abc.Iterable[builtins.int] | None = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["period_type", b"period_type"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["attribute_indices", b"attribute_indices", "comment_strindices", b"comment_strindices", "default_sample_type_index", b"default_sample_type_index", "dropped_attributes_count", b"dropped_attributes_count", "duration_nanos", b"duration_nanos", "location_indices", b"location_indices", "original_payload", b"original_payload", "original_payload_format", b"original_payload_format", "period", b"period", "period_type", b"period_type", "profile_id", b"profile_id", "sample", b"sample", "sample_type", b"sample_type", "time_nanos", b"time_nanos"]) -> None: ...
|
||||
|
||||
global___Profile = Profile
|
||||
|
||||
@typing_extensions.final
|
||||
class AttributeUnit(google.protobuf.message.Message):
|
||||
"""Represents a mapping between Attribute Keys and Units."""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
ATTRIBUTE_KEY_STRINDEX_FIELD_NUMBER: builtins.int
|
||||
UNIT_STRINDEX_FIELD_NUMBER: builtins.int
|
||||
attribute_key_strindex: builtins.int
|
||||
"""Index into string table."""
|
||||
unit_strindex: builtins.int
|
||||
"""Index into string table."""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
attribute_key_strindex: builtins.int = ...,
|
||||
unit_strindex: builtins.int = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["attribute_key_strindex", b"attribute_key_strindex", "unit_strindex", b"unit_strindex"]) -> None: ...
|
||||
|
||||
global___AttributeUnit = AttributeUnit
|
||||
|
||||
@typing_extensions.final
|
||||
class Link(google.protobuf.message.Message):
|
||||
"""A pointer from a profile Sample to a trace Span.
|
||||
Connects a profile sample to a trace span, identified by unique trace and span IDs.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
TRACE_ID_FIELD_NUMBER: builtins.int
|
||||
SPAN_ID_FIELD_NUMBER: builtins.int
|
||||
trace_id: builtins.bytes
|
||||
"""A unique identifier of a trace that this linked span is part of. The ID is a
|
||||
16-byte array.
|
||||
"""
|
||||
span_id: builtins.bytes
|
||||
"""A unique identifier for the linked span. The ID is an 8-byte array."""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
trace_id: builtins.bytes = ...,
|
||||
span_id: builtins.bytes = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["span_id", b"span_id", "trace_id", b"trace_id"]) -> None: ...
|
||||
|
||||
global___Link = Link
|
||||
|
||||
@typing_extensions.final
|
||||
class ValueType(google.protobuf.message.Message):
|
||||
"""ValueType describes the type and units of a value, with an optional aggregation temporality."""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
TYPE_STRINDEX_FIELD_NUMBER: builtins.int
|
||||
UNIT_STRINDEX_FIELD_NUMBER: builtins.int
|
||||
AGGREGATION_TEMPORALITY_FIELD_NUMBER: builtins.int
|
||||
type_strindex: builtins.int
|
||||
"""Index into ProfilesDictionary.string_table."""
|
||||
unit_strindex: builtins.int
|
||||
"""Index into ProfilesDictionary.string_table."""
|
||||
aggregation_temporality: global___AggregationTemporality.ValueType
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
type_strindex: builtins.int = ...,
|
||||
unit_strindex: builtins.int = ...,
|
||||
aggregation_temporality: global___AggregationTemporality.ValueType = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["aggregation_temporality", b"aggregation_temporality", "type_strindex", b"type_strindex", "unit_strindex", b"unit_strindex"]) -> None: ...
|
||||
|
||||
global___ValueType = ValueType
|
||||
|
||||
@typing_extensions.final
|
||||
class Sample(google.protobuf.message.Message):
|
||||
"""Each Sample records values encountered in some program
|
||||
context. The program context is typically a stack trace, perhaps
|
||||
augmented with auxiliary information like the thread-id, some
|
||||
indicator of a higher level request being handled etc.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
LOCATIONS_START_INDEX_FIELD_NUMBER: builtins.int
|
||||
LOCATIONS_LENGTH_FIELD_NUMBER: builtins.int
|
||||
VALUE_FIELD_NUMBER: builtins.int
|
||||
ATTRIBUTE_INDICES_FIELD_NUMBER: builtins.int
|
||||
LINK_INDEX_FIELD_NUMBER: builtins.int
|
||||
TIMESTAMPS_UNIX_NANO_FIELD_NUMBER: builtins.int
|
||||
locations_start_index: builtins.int
|
||||
"""locations_start_index along with locations_length refers to to a slice of locations in Profile.location_indices."""
|
||||
locations_length: builtins.int
|
||||
"""locations_length along with locations_start_index refers to a slice of locations in Profile.location_indices.
|
||||
Supersedes location_index.
|
||||
"""
|
||||
@property
|
||||
def value(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
|
||||
"""The type and unit of each value is defined by the corresponding
|
||||
entry in Profile.sample_type. All samples must have the same
|
||||
number of values, the same as the length of Profile.sample_type.
|
||||
When aggregating multiple samples into a single sample, the
|
||||
result has a list of values that is the element-wise sum of the
|
||||
lists of the originals.
|
||||
"""
|
||||
@property
|
||||
def attribute_indices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
|
||||
"""References to attributes in ProfilesDictionary.attribute_table. [optional]"""
|
||||
link_index: builtins.int
|
||||
"""Reference to link in ProfilesDictionary.link_table. [optional]"""
|
||||
@property
|
||||
def timestamps_unix_nano(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
|
||||
"""Timestamps associated with Sample represented in nanoseconds. These timestamps are expected
|
||||
to fall within the Profile's time range. [optional]
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
locations_start_index: builtins.int = ...,
|
||||
locations_length: builtins.int = ...,
|
||||
value: collections.abc.Iterable[builtins.int] | None = ...,
|
||||
attribute_indices: collections.abc.Iterable[builtins.int] | None = ...,
|
||||
link_index: builtins.int | None = ...,
|
||||
timestamps_unix_nano: collections.abc.Iterable[builtins.int] | None = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["_link_index", b"_link_index", "link_index", b"link_index"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["_link_index", b"_link_index", "attribute_indices", b"attribute_indices", "link_index", b"link_index", "locations_length", b"locations_length", "locations_start_index", b"locations_start_index", "timestamps_unix_nano", b"timestamps_unix_nano", "value", b"value"]) -> None: ...
|
||||
def WhichOneof(self, oneof_group: typing_extensions.Literal["_link_index", b"_link_index"]) -> typing_extensions.Literal["link_index"] | None: ...
|
||||
|
||||
global___Sample = Sample
|
||||
|
||||
@typing_extensions.final
|
||||
class Mapping(google.protobuf.message.Message):
|
||||
"""Describes the mapping of a binary in memory, including its address range,
|
||||
file offset, and metadata like build ID
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
MEMORY_START_FIELD_NUMBER: builtins.int
|
||||
MEMORY_LIMIT_FIELD_NUMBER: builtins.int
|
||||
FILE_OFFSET_FIELD_NUMBER: builtins.int
|
||||
FILENAME_STRINDEX_FIELD_NUMBER: builtins.int
|
||||
ATTRIBUTE_INDICES_FIELD_NUMBER: builtins.int
|
||||
HAS_FUNCTIONS_FIELD_NUMBER: builtins.int
|
||||
HAS_FILENAMES_FIELD_NUMBER: builtins.int
|
||||
HAS_LINE_NUMBERS_FIELD_NUMBER: builtins.int
|
||||
HAS_INLINE_FRAMES_FIELD_NUMBER: builtins.int
|
||||
memory_start: builtins.int
|
||||
"""Address at which the binary (or DLL) is loaded into memory."""
|
||||
memory_limit: builtins.int
|
||||
"""The limit of the address range occupied by this mapping."""
|
||||
file_offset: builtins.int
|
||||
"""Offset in the binary that corresponds to the first mapped address."""
|
||||
filename_strindex: builtins.int
|
||||
"""The object this entry is loaded from. This can be a filename on
|
||||
disk for the main binary and shared libraries, or virtual
|
||||
abstractions like "[vdso]".
|
||||
Index into ProfilesDictionary.string_table.
|
||||
"""
|
||||
@property
|
||||
def attribute_indices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
|
||||
"""References to attributes in ProfilesDictionary.attribute_table. [optional]"""
|
||||
has_functions: builtins.bool
|
||||
"""The following fields indicate the resolution of symbolic info."""
|
||||
has_filenames: builtins.bool
|
||||
has_line_numbers: builtins.bool
|
||||
has_inline_frames: builtins.bool
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
memory_start: builtins.int = ...,
|
||||
memory_limit: builtins.int = ...,
|
||||
file_offset: builtins.int = ...,
|
||||
filename_strindex: builtins.int = ...,
|
||||
attribute_indices: collections.abc.Iterable[builtins.int] | None = ...,
|
||||
has_functions: builtins.bool = ...,
|
||||
has_filenames: builtins.bool = ...,
|
||||
has_line_numbers: builtins.bool = ...,
|
||||
has_inline_frames: builtins.bool = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["attribute_indices", b"attribute_indices", "file_offset", b"file_offset", "filename_strindex", b"filename_strindex", "has_filenames", b"has_filenames", "has_functions", b"has_functions", "has_inline_frames", b"has_inline_frames", "has_line_numbers", b"has_line_numbers", "memory_limit", b"memory_limit", "memory_start", b"memory_start"]) -> None: ...
|
||||
|
||||
global___Mapping = Mapping
|
||||
|
||||
@typing_extensions.final
|
||||
class Location(google.protobuf.message.Message):
|
||||
"""Describes function and line table debug information."""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
MAPPING_INDEX_FIELD_NUMBER: builtins.int
|
||||
ADDRESS_FIELD_NUMBER: builtins.int
|
||||
LINE_FIELD_NUMBER: builtins.int
|
||||
IS_FOLDED_FIELD_NUMBER: builtins.int
|
||||
ATTRIBUTE_INDICES_FIELD_NUMBER: builtins.int
|
||||
mapping_index: builtins.int
|
||||
"""Reference to mapping in ProfilesDictionary.mapping_table.
|
||||
It can be unset if the mapping is unknown or not applicable for
|
||||
this profile type.
|
||||
"""
|
||||
address: builtins.int
|
||||
"""The instruction address for this location, if available. It
|
||||
should be within [Mapping.memory_start...Mapping.memory_limit]
|
||||
for the corresponding mapping. A non-leaf address may be in the
|
||||
middle of a call instruction. It is up to display tools to find
|
||||
the beginning of the instruction if necessary.
|
||||
"""
|
||||
@property
|
||||
def line(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Line]:
|
||||
"""Multiple line indicates this location has inlined functions,
|
||||
where the last entry represents the caller into which the
|
||||
preceding entries were inlined.
|
||||
|
||||
E.g., if memcpy() is inlined into printf:
|
||||
line[0].function_name == "memcpy"
|
||||
line[1].function_name == "printf"
|
||||
"""
|
||||
is_folded: builtins.bool
|
||||
"""Provides an indication that multiple symbols map to this location's
|
||||
address, for example due to identical code folding by the linker. In that
|
||||
case the line information above represents one of the multiple
|
||||
symbols. This field must be recomputed when the symbolization state of the
|
||||
profile changes.
|
||||
"""
|
||||
@property
|
||||
def attribute_indices(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
|
||||
"""References to attributes in ProfilesDictionary.attribute_table. [optional]"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
mapping_index: builtins.int | None = ...,
|
||||
address: builtins.int = ...,
|
||||
line: collections.abc.Iterable[global___Line] | None = ...,
|
||||
is_folded: builtins.bool = ...,
|
||||
attribute_indices: collections.abc.Iterable[builtins.int] | None = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["_mapping_index", b"_mapping_index", "mapping_index", b"mapping_index"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["_mapping_index", b"_mapping_index", "address", b"address", "attribute_indices", b"attribute_indices", "is_folded", b"is_folded", "line", b"line", "mapping_index", b"mapping_index"]) -> None: ...
|
||||
def WhichOneof(self, oneof_group: typing_extensions.Literal["_mapping_index", b"_mapping_index"]) -> typing_extensions.Literal["mapping_index"] | None: ...
|
||||
|
||||
global___Location = Location
|
||||
|
||||
@typing_extensions.final
|
||||
class Line(google.protobuf.message.Message):
|
||||
"""Details a specific line in a source code, linked to a function."""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
FUNCTION_INDEX_FIELD_NUMBER: builtins.int
|
||||
LINE_FIELD_NUMBER: builtins.int
|
||||
COLUMN_FIELD_NUMBER: builtins.int
|
||||
function_index: builtins.int
|
||||
"""Reference to function in ProfilesDictionary.function_table."""
|
||||
line: builtins.int
|
||||
"""Line number in source code. 0 means unset."""
|
||||
column: builtins.int
|
||||
"""Column number in source code. 0 means unset."""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
function_index: builtins.int = ...,
|
||||
line: builtins.int = ...,
|
||||
column: builtins.int = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["column", b"column", "function_index", b"function_index", "line", b"line"]) -> None: ...
|
||||
|
||||
global___Line = Line
|
||||
|
||||
@typing_extensions.final
|
||||
class Function(google.protobuf.message.Message):
|
||||
"""Describes a function, including its human-readable name, system name,
|
||||
source file, and starting line number in the source.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
NAME_STRINDEX_FIELD_NUMBER: builtins.int
|
||||
SYSTEM_NAME_STRINDEX_FIELD_NUMBER: builtins.int
|
||||
FILENAME_STRINDEX_FIELD_NUMBER: builtins.int
|
||||
START_LINE_FIELD_NUMBER: builtins.int
|
||||
name_strindex: builtins.int
|
||||
"""Function name. Empty string if not available."""
|
||||
system_name_strindex: builtins.int
|
||||
"""Function name, as identified by the system. For instance,
|
||||
it can be a C++ mangled name. Empty string if not available.
|
||||
"""
|
||||
filename_strindex: builtins.int
|
||||
"""Source file containing the function. Empty string if not available."""
|
||||
start_line: builtins.int
|
||||
"""Line number in source file. 0 means unset."""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
name_strindex: builtins.int = ...,
|
||||
system_name_strindex: builtins.int = ...,
|
||||
filename_strindex: builtins.int = ...,
|
||||
start_line: builtins.int = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["filename_strindex", b"filename_strindex", "name_strindex", b"name_strindex", "start_line", b"start_line", "system_name_strindex", b"system_name_strindex"]) -> None: ...
|
||||
|
||||
global___Function = Function
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,28 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: opentelemetry/proto/resource/v1/resource.proto
|
||||
# Protobuf Python Version: 5.26.1
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf.internal import builder as _builder
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n.opentelemetry/proto/resource/v1/resource.proto\x12\x1fopentelemetry.proto.resource.v1\x1a*opentelemetry/proto/common/v1/common.proto\"\xa8\x01\n\x08Resource\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x02 \x01(\r\x12=\n\x0b\x65ntity_refs\x18\x03 \x03(\x0b\x32(.opentelemetry.proto.common.v1.EntityRefB\x83\x01\n\"io.opentelemetry.proto.resource.v1B\rResourceProtoP\x01Z*go.opentelemetry.io/proto/otlp/resource/v1\xaa\x02\x1fOpenTelemetry.Proto.Resource.V1b\x06proto3')
|
||||
|
||||
_globals = globals()
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.resource.v1.resource_pb2', _globals)
|
||||
if not _descriptor._USE_C_DESCRIPTORS:
|
||||
_globals['DESCRIPTOR']._loaded_options = None
|
||||
_globals['DESCRIPTOR']._serialized_options = b'\n\"io.opentelemetry.proto.resource.v1B\rResourceProtoP\001Z*go.opentelemetry.io/proto/otlp/resource/v1\252\002\037OpenTelemetry.Proto.Resource.V1'
|
||||
_globals['_RESOURCE']._serialized_start=128
|
||||
_globals['_RESOURCE']._serialized_end=296
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
@@ -0,0 +1,69 @@
|
||||
"""
|
||||
@generated by mypy-protobuf. Do not edit manually!
|
||||
isort:skip_file
|
||||
Copyright 2019, OpenTelemetry Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import builtins
|
||||
import collections.abc
|
||||
import google.protobuf.descriptor
|
||||
import google.protobuf.internal.containers
|
||||
import google.protobuf.message
|
||||
import opentelemetry.proto.common.v1.common_pb2
|
||||
import sys
|
||||
|
||||
if sys.version_info >= (3, 8):
|
||||
import typing as typing_extensions
|
||||
else:
|
||||
import typing_extensions
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
||||
|
||||
@typing_extensions.final
|
||||
class Resource(google.protobuf.message.Message):
|
||||
"""Resource information."""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
ATTRIBUTES_FIELD_NUMBER: builtins.int
|
||||
DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int
|
||||
ENTITY_REFS_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
|
||||
"""Set of attributes that describe the resource.
|
||||
Attribute keys MUST be unique (it is not allowed to have more than one
|
||||
attribute with the same key).
|
||||
"""
|
||||
dropped_attributes_count: builtins.int
|
||||
"""dropped_attributes_count is the number of dropped attributes. If the value is 0, then
|
||||
no attributes were dropped.
|
||||
"""
|
||||
@property
|
||||
def entity_refs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.EntityRef]:
|
||||
"""Set of entities that participate in this Resource.
|
||||
|
||||
Note: keys in the references MUST exist in attributes of this message.
|
||||
|
||||
Status: [Development]
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ...,
|
||||
dropped_attributes_count: builtins.int = ...,
|
||||
entity_refs: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.EntityRef] | None = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "dropped_attributes_count", b"dropped_attributes_count", "entity_refs", b"entity_refs"]) -> None: ...
|
||||
|
||||
global___Resource = Resource
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -0,0 +1,47 @@
|
||||
# -*- coding: utf-8 -*-
|
||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
||||
# source: opentelemetry/proto/trace/v1/trace.proto
|
||||
# Protobuf Python Version: 5.26.1
|
||||
"""Generated protocol buffer code."""
|
||||
from google.protobuf import descriptor as _descriptor
|
||||
from google.protobuf import descriptor_pool as _descriptor_pool
|
||||
from google.protobuf import symbol_database as _symbol_database
|
||||
from google.protobuf.internal import builder as _builder
|
||||
# @@protoc_insertion_point(imports)
|
||||
|
||||
_sym_db = _symbol_database.Default()
|
||||
|
||||
|
||||
from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2
|
||||
from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2
|
||||
|
||||
|
||||
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n(opentelemetry/proto/trace/v1/trace.proto\x12\x1copentelemetry.proto.trace.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"Q\n\nTracesData\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans\"\xa7\x01\n\rResourceSpans\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12=\n\x0bscope_spans\x18\x02 \x03(\x0b\x32(.opentelemetry.proto.trace.v1.ScopeSpans\x12\x12\n\nschema_url\x18\x03 \x01(\tJ\x06\x08\xe8\x07\x10\xe9\x07\"\x97\x01\n\nScopeSpans\x12\x42\n\x05scope\x18\x01 \x01(\x0b\x32\x33.opentelemetry.proto.common.v1.InstrumentationScope\x12\x31\n\x05spans\x18\x02 \x03(\x0b\x32\".opentelemetry.proto.trace.v1.Span\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\x84\x08\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12\x16\n\x0eparent_span_id\x18\x04 \x01(\x0c\x12\r\n\x05\x66lags\x18\x10 \x01(\x07\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x39\n\x04kind\x18\x06 \x01(\x0e\x32+.opentelemetry.proto.trace.v1.Span.SpanKind\x12\x1c\n\x14start_time_unix_nano\x18\x07 \x01(\x06\x12\x1a\n\x12\x65nd_time_unix_nano\x18\x08 \x01(\x06\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\n \x01(\r\x12\x38\n\x06\x65vents\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.trace.v1.Span.Event\x12\x1c\n\x14\x64ropped_events_count\x18\x0c \x01(\r\x12\x36\n\x05links\x18\r \x03(\x0b\x32\'.opentelemetry.proto.trace.v1.Span.Link\x12\x1b\n\x13\x64ropped_links_count\x18\x0e \x01(\r\x12\x34\n\x06status\x18\x0f \x01(\x0b\x32$.opentelemetry.proto.trace.v1.Status\x1a\x8c\x01\n\x05\x45vent\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x0c\n\x04name\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\r\x1a\xac\x01\n\x04Link\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12;\n\nattributes\x18\x04 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x05 \x01(\r\x12\r\n\x05\x66lags\x18\x06 \x01(\x07\"\x99\x01\n\x08SpanKind\x12\x19\n\x15SPAN_KIND_UNSPECIFIED\x10\x00\x12\x16\n\x12SPAN_KIND_INTERNAL\x10\x01\x12\x14\n\x10SPAN_KIND_SERVER\x10\x02\x12\x14\n\x10SPAN_KIND_CLIENT\x10\x03\x12\x16\n\x12SPAN_KIND_PRODUCER\x10\x04\x12\x16\n\x12SPAN_KIND_CONSUMER\x10\x05\"\xae\x01\n\x06Status\x12\x0f\n\x07message\x18\x02 \x01(\t\x12=\n\x04\x63ode\x18\x03 \x01(\x0e\x32/.opentelemetry.proto.trace.v1.Status.StatusCode\"N\n\nStatusCode\x12\x15\n\x11STATUS_CODE_UNSET\x10\x00\x12\x12\n\x0eSTATUS_CODE_OK\x10\x01\x12\x15\n\x11STATUS_CODE_ERROR\x10\x02J\x04\x08\x01\x10\x02*\x9c\x01\n\tSpanFlags\x12\x19\n\x15SPAN_FLAGS_DO_NOT_USE\x10\x00\x12 \n\x1bSPAN_FLAGS_TRACE_FLAGS_MASK\x10\xff\x01\x12*\n%SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK\x10\x80\x02\x12&\n!SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK\x10\x80\x04\x42w\n\x1fio.opentelemetry.proto.trace.v1B\nTraceProtoP\x01Z\'go.opentelemetry.io/proto/otlp/trace/v1\xaa\x02\x1cOpenTelemetry.Proto.Trace.V1b\x06proto3')
|
||||
|
||||
_globals = globals()
|
||||
_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals)
|
||||
_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'opentelemetry.proto.trace.v1.trace_pb2', _globals)
|
||||
if not _descriptor._USE_C_DESCRIPTORS:
|
||||
_globals['DESCRIPTOR']._loaded_options = None
|
||||
_globals['DESCRIPTOR']._serialized_options = b'\n\037io.opentelemetry.proto.trace.v1B\nTraceProtoP\001Z\'go.opentelemetry.io/proto/otlp/trace/v1\252\002\034OpenTelemetry.Proto.Trace.V1'
|
||||
_globals['_SPANFLAGS']._serialized_start=1782
|
||||
_globals['_SPANFLAGS']._serialized_end=1938
|
||||
_globals['_TRACESDATA']._serialized_start=166
|
||||
_globals['_TRACESDATA']._serialized_end=247
|
||||
_globals['_RESOURCESPANS']._serialized_start=250
|
||||
_globals['_RESOURCESPANS']._serialized_end=417
|
||||
_globals['_SCOPESPANS']._serialized_start=420
|
||||
_globals['_SCOPESPANS']._serialized_end=571
|
||||
_globals['_SPAN']._serialized_start=574
|
||||
_globals['_SPAN']._serialized_end=1602
|
||||
_globals['_SPAN_EVENT']._serialized_start=1131
|
||||
_globals['_SPAN_EVENT']._serialized_end=1271
|
||||
_globals['_SPAN_LINK']._serialized_start=1274
|
||||
_globals['_SPAN_LINK']._serialized_end=1446
|
||||
_globals['_SPAN_SPANKIND']._serialized_start=1449
|
||||
_globals['_SPAN_SPANKIND']._serialized_end=1602
|
||||
_globals['_STATUS']._serialized_start=1605
|
||||
_globals['_STATUS']._serialized_end=1779
|
||||
_globals['_STATUS_STATUSCODE']._serialized_start=1695
|
||||
_globals['_STATUS_STATUSCODE']._serialized_end=1773
|
||||
# @@protoc_insertion_point(module_scope)
|
||||
@@ -0,0 +1,584 @@
|
||||
"""
|
||||
@generated by mypy-protobuf. Do not edit manually!
|
||||
isort:skip_file
|
||||
Copyright 2019, OpenTelemetry Authors
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
"""
|
||||
import builtins
|
||||
import collections.abc
|
||||
import google.protobuf.descriptor
|
||||
import google.protobuf.internal.containers
|
||||
import google.protobuf.internal.enum_type_wrapper
|
||||
import google.protobuf.message
|
||||
import opentelemetry.proto.common.v1.common_pb2
|
||||
import opentelemetry.proto.resource.v1.resource_pb2
|
||||
import sys
|
||||
import typing
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
import typing as typing_extensions
|
||||
else:
|
||||
import typing_extensions
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
|
||||
|
||||
class _SpanFlags:
|
||||
ValueType = typing.NewType("ValueType", builtins.int)
|
||||
V: typing_extensions.TypeAlias = ValueType
|
||||
|
||||
class _SpanFlagsEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SpanFlags.ValueType], builtins.type):
|
||||
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
||||
SPAN_FLAGS_DO_NOT_USE: _SpanFlags.ValueType # 0
|
||||
"""The zero value for the enum. Should not be used for comparisons.
|
||||
Instead use bitwise "and" with the appropriate mask as shown above.
|
||||
"""
|
||||
SPAN_FLAGS_TRACE_FLAGS_MASK: _SpanFlags.ValueType # 255
|
||||
"""Bits 0-7 are used for trace flags."""
|
||||
SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK: _SpanFlags.ValueType # 256
|
||||
"""Bits 8 and 9 are used to indicate that the parent span or link span is remote.
|
||||
Bit 8 (`HAS_IS_REMOTE`) indicates whether the value is known.
|
||||
Bit 9 (`IS_REMOTE`) indicates whether the span or link is remote.
|
||||
"""
|
||||
SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK: _SpanFlags.ValueType # 512
|
||||
|
||||
class SpanFlags(_SpanFlags, metaclass=_SpanFlagsEnumTypeWrapper):
|
||||
"""SpanFlags represents constants used to interpret the
|
||||
Span.flags field, which is protobuf 'fixed32' type and is to
|
||||
be used as bit-fields. Each non-zero value defined in this enum is
|
||||
a bit-mask. To extract the bit-field, for example, use an
|
||||
expression like:
|
||||
|
||||
(span.flags & SPAN_FLAGS_TRACE_FLAGS_MASK)
|
||||
|
||||
See https://www.w3.org/TR/trace-context-2/#trace-flags for the flag definitions.
|
||||
|
||||
Note that Span flags were introduced in version 1.1 of the
|
||||
OpenTelemetry protocol. Older Span producers do not set this
|
||||
field, consequently consumers should not rely on the absence of a
|
||||
particular flag bit to indicate the presence of a particular feature.
|
||||
"""
|
||||
|
||||
SPAN_FLAGS_DO_NOT_USE: SpanFlags.ValueType # 0
|
||||
"""The zero value for the enum. Should not be used for comparisons.
|
||||
Instead use bitwise "and" with the appropriate mask as shown above.
|
||||
"""
|
||||
SPAN_FLAGS_TRACE_FLAGS_MASK: SpanFlags.ValueType # 255
|
||||
"""Bits 0-7 are used for trace flags."""
|
||||
SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK: SpanFlags.ValueType # 256
|
||||
"""Bits 8 and 9 are used to indicate that the parent span or link span is remote.
|
||||
Bit 8 (`HAS_IS_REMOTE`) indicates whether the value is known.
|
||||
Bit 9 (`IS_REMOTE`) indicates whether the span or link is remote.
|
||||
"""
|
||||
SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK: SpanFlags.ValueType # 512
|
||||
global___SpanFlags = SpanFlags
|
||||
|
||||
@typing_extensions.final
|
||||
class TracesData(google.protobuf.message.Message):
|
||||
"""TracesData represents the traces data that can be stored in a persistent storage,
|
||||
OR can be embedded by other protocols that transfer OTLP traces data but do
|
||||
not implement the OTLP protocol.
|
||||
|
||||
The main difference between this message and collector protocol is that
|
||||
in this message there will not be any "control" or "metadata" specific to
|
||||
OTLP protocol.
|
||||
|
||||
When new fields are added into this message, the OTLP request MUST be updated
|
||||
as well.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
RESOURCE_SPANS_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def resource_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ResourceSpans]:
|
||||
"""An array of ResourceSpans.
|
||||
For data coming from a single resource this array will typically contain
|
||||
one element. Intermediary nodes that receive data from multiple origins
|
||||
typically batch the data before forwarding further and in that case this
|
||||
array will contain multiple elements.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
resource_spans: collections.abc.Iterable[global___ResourceSpans] | None = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["resource_spans", b"resource_spans"]) -> None: ...
|
||||
|
||||
global___TracesData = TracesData
|
||||
|
||||
@typing_extensions.final
|
||||
class ResourceSpans(google.protobuf.message.Message):
|
||||
"""A collection of ScopeSpans from a Resource."""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
RESOURCE_FIELD_NUMBER: builtins.int
|
||||
SCOPE_SPANS_FIELD_NUMBER: builtins.int
|
||||
SCHEMA_URL_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource:
|
||||
"""The resource for the spans in this message.
|
||||
If this field is not set then no resource info is known.
|
||||
"""
|
||||
@property
|
||||
def scope_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___ScopeSpans]:
|
||||
"""A list of ScopeSpans that originate from a resource."""
|
||||
schema_url: builtins.str
|
||||
"""The Schema URL, if known. This is the identifier of the Schema that the resource data
|
||||
is recorded in. Notably, the last part of the URL path is the version number of the
|
||||
schema: http[s]://server[:port]/path/<version>. To learn more about Schema URL see
|
||||
https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
|
||||
This schema_url applies to the data in the "resource" field. It does not apply
|
||||
to the data in the "scope_spans" field which have their own schema_url field.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
resource: opentelemetry.proto.resource.v1.resource_pb2.Resource | None = ...,
|
||||
scope_spans: collections.abc.Iterable[global___ScopeSpans] | None = ...,
|
||||
schema_url: builtins.str = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["resource", b"resource"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["resource", b"resource", "schema_url", b"schema_url", "scope_spans", b"scope_spans"]) -> None: ...
|
||||
|
||||
global___ResourceSpans = ResourceSpans
|
||||
|
||||
@typing_extensions.final
|
||||
class ScopeSpans(google.protobuf.message.Message):
|
||||
"""A collection of Spans produced by an InstrumentationScope."""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
SCOPE_FIELD_NUMBER: builtins.int
|
||||
SPANS_FIELD_NUMBER: builtins.int
|
||||
SCHEMA_URL_FIELD_NUMBER: builtins.int
|
||||
@property
|
||||
def scope(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationScope:
|
||||
"""The instrumentation scope information for the spans in this message.
|
||||
Semantically when InstrumentationScope isn't set, it is equivalent with
|
||||
an empty instrumentation scope name (unknown).
|
||||
"""
|
||||
@property
|
||||
def spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span]:
|
||||
"""A list of Spans that originate from an instrumentation scope."""
|
||||
schema_url: builtins.str
|
||||
"""The Schema URL, if known. This is the identifier of the Schema that the span data
|
||||
is recorded in. Notably, the last part of the URL path is the version number of the
|
||||
schema: http[s]://server[:port]/path/<version>. To learn more about Schema URL see
|
||||
https://opentelemetry.io/docs/specs/otel/schemas/#schema-url
|
||||
This schema_url applies to all spans and span events in the "spans" field.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
scope: opentelemetry.proto.common.v1.common_pb2.InstrumentationScope | None = ...,
|
||||
spans: collections.abc.Iterable[global___Span] | None = ...,
|
||||
schema_url: builtins.str = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["scope", b"scope"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["schema_url", b"schema_url", "scope", b"scope", "spans", b"spans"]) -> None: ...
|
||||
|
||||
global___ScopeSpans = ScopeSpans
|
||||
|
||||
@typing_extensions.final
|
||||
class Span(google.protobuf.message.Message):
|
||||
"""A Span represents a single operation performed by a single component of the system.
|
||||
|
||||
The next available field id is 17.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
class _SpanKind:
|
||||
ValueType = typing.NewType("ValueType", builtins.int)
|
||||
V: typing_extensions.TypeAlias = ValueType
|
||||
|
||||
class _SpanKindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Span._SpanKind.ValueType], builtins.type):
|
||||
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
||||
SPAN_KIND_UNSPECIFIED: Span._SpanKind.ValueType # 0
|
||||
"""Unspecified. Do NOT use as default.
|
||||
Implementations MAY assume SpanKind to be INTERNAL when receiving UNSPECIFIED.
|
||||
"""
|
||||
SPAN_KIND_INTERNAL: Span._SpanKind.ValueType # 1
|
||||
"""Indicates that the span represents an internal operation within an application,
|
||||
as opposed to an operation happening at the boundaries. Default value.
|
||||
"""
|
||||
SPAN_KIND_SERVER: Span._SpanKind.ValueType # 2
|
||||
"""Indicates that the span covers server-side handling of an RPC or other
|
||||
remote network request.
|
||||
"""
|
||||
SPAN_KIND_CLIENT: Span._SpanKind.ValueType # 3
|
||||
"""Indicates that the span describes a request to some remote service."""
|
||||
SPAN_KIND_PRODUCER: Span._SpanKind.ValueType # 4
|
||||
"""Indicates that the span describes a producer sending a message to a broker.
|
||||
Unlike CLIENT and SERVER, there is often no direct critical path latency relationship
|
||||
between producer and consumer spans. A PRODUCER span ends when the message was accepted
|
||||
by the broker while the logical processing of the message might span a much longer time.
|
||||
"""
|
||||
SPAN_KIND_CONSUMER: Span._SpanKind.ValueType # 5
|
||||
"""Indicates that the span describes consumer receiving a message from a broker.
|
||||
Like the PRODUCER kind, there is often no direct critical path latency relationship
|
||||
between producer and consumer spans.
|
||||
"""
|
||||
|
||||
class SpanKind(_SpanKind, metaclass=_SpanKindEnumTypeWrapper):
|
||||
"""SpanKind is the type of span. Can be used to specify additional relationships between spans
|
||||
in addition to a parent/child relationship.
|
||||
"""
|
||||
|
||||
SPAN_KIND_UNSPECIFIED: Span.SpanKind.ValueType # 0
|
||||
"""Unspecified. Do NOT use as default.
|
||||
Implementations MAY assume SpanKind to be INTERNAL when receiving UNSPECIFIED.
|
||||
"""
|
||||
SPAN_KIND_INTERNAL: Span.SpanKind.ValueType # 1
|
||||
"""Indicates that the span represents an internal operation within an application,
|
||||
as opposed to an operation happening at the boundaries. Default value.
|
||||
"""
|
||||
SPAN_KIND_SERVER: Span.SpanKind.ValueType # 2
|
||||
"""Indicates that the span covers server-side handling of an RPC or other
|
||||
remote network request.
|
||||
"""
|
||||
SPAN_KIND_CLIENT: Span.SpanKind.ValueType # 3
|
||||
"""Indicates that the span describes a request to some remote service."""
|
||||
SPAN_KIND_PRODUCER: Span.SpanKind.ValueType # 4
|
||||
"""Indicates that the span describes a producer sending a message to a broker.
|
||||
Unlike CLIENT and SERVER, there is often no direct critical path latency relationship
|
||||
between producer and consumer spans. A PRODUCER span ends when the message was accepted
|
||||
by the broker while the logical processing of the message might span a much longer time.
|
||||
"""
|
||||
SPAN_KIND_CONSUMER: Span.SpanKind.ValueType # 5
|
||||
"""Indicates that the span describes consumer receiving a message from a broker.
|
||||
Like the PRODUCER kind, there is often no direct critical path latency relationship
|
||||
between producer and consumer spans.
|
||||
"""
|
||||
|
||||
@typing_extensions.final
|
||||
class Event(google.protobuf.message.Message):
|
||||
"""Event is a time-stamped annotation of the span, consisting of user-supplied
|
||||
text description and key-value pairs.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
TIME_UNIX_NANO_FIELD_NUMBER: builtins.int
|
||||
NAME_FIELD_NUMBER: builtins.int
|
||||
ATTRIBUTES_FIELD_NUMBER: builtins.int
|
||||
DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int
|
||||
time_unix_nano: builtins.int
|
||||
"""time_unix_nano is the time the event occurred."""
|
||||
name: builtins.str
|
||||
"""name of the event.
|
||||
This field is semantically required to be set to non-empty string.
|
||||
"""
|
||||
@property
|
||||
def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
|
||||
"""attributes is a collection of attribute key/value pairs on the event.
|
||||
Attribute keys MUST be unique (it is not allowed to have more than one
|
||||
attribute with the same key).
|
||||
"""
|
||||
dropped_attributes_count: builtins.int
|
||||
"""dropped_attributes_count is the number of dropped attributes. If the value is 0,
|
||||
then no attributes were dropped.
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
time_unix_nano: builtins.int = ...,
|
||||
name: builtins.str = ...,
|
||||
attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ...,
|
||||
dropped_attributes_count: builtins.int = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "dropped_attributes_count", b"dropped_attributes_count", "name", b"name", "time_unix_nano", b"time_unix_nano"]) -> None: ...
|
||||
|
||||
@typing_extensions.final
|
||||
class Link(google.protobuf.message.Message):
|
||||
"""A pointer from the current span to another span in the same trace or in a
|
||||
different trace. For example, this can be used in batching operations,
|
||||
where a single batch handler processes multiple requests from different
|
||||
traces or when the handler receives a request from a different project.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
TRACE_ID_FIELD_NUMBER: builtins.int
|
||||
SPAN_ID_FIELD_NUMBER: builtins.int
|
||||
TRACE_STATE_FIELD_NUMBER: builtins.int
|
||||
ATTRIBUTES_FIELD_NUMBER: builtins.int
|
||||
DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int
|
||||
FLAGS_FIELD_NUMBER: builtins.int
|
||||
trace_id: builtins.bytes
|
||||
"""A unique identifier of a trace that this linked span is part of. The ID is a
|
||||
16-byte array.
|
||||
"""
|
||||
span_id: builtins.bytes
|
||||
"""A unique identifier for the linked span. The ID is an 8-byte array."""
|
||||
trace_state: builtins.str
|
||||
"""The trace_state associated with the link."""
|
||||
@property
|
||||
def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
|
||||
"""attributes is a collection of attribute key/value pairs on the link.
|
||||
Attribute keys MUST be unique (it is not allowed to have more than one
|
||||
attribute with the same key).
|
||||
"""
|
||||
dropped_attributes_count: builtins.int
|
||||
"""dropped_attributes_count is the number of dropped attributes. If the value is 0,
|
||||
then no attributes were dropped.
|
||||
"""
|
||||
flags: builtins.int
|
||||
"""Flags, a bit field.
|
||||
|
||||
Bits 0-7 (8 least significant bits) are the trace flags as defined in W3C Trace
|
||||
Context specification. To read the 8-bit W3C trace flag, use
|
||||
`flags & SPAN_FLAGS_TRACE_FLAGS_MASK`.
|
||||
|
||||
See https://www.w3.org/TR/trace-context-2/#trace-flags for the flag definitions.
|
||||
|
||||
Bits 8 and 9 represent the 3 states of whether the link is remote.
|
||||
The states are (unknown, is not remote, is remote).
|
||||
To read whether the value is known, use `(flags & SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK) != 0`.
|
||||
To read whether the link is remote, use `(flags & SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK) != 0`.
|
||||
|
||||
Readers MUST NOT assume that bits 10-31 (22 most significant bits) will be zero.
|
||||
When creating new spans, bits 10-31 (most-significant 22-bits) MUST be zero.
|
||||
|
||||
[Optional].
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
trace_id: builtins.bytes = ...,
|
||||
span_id: builtins.bytes = ...,
|
||||
trace_state: builtins.str = ...,
|
||||
attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ...,
|
||||
dropped_attributes_count: builtins.int = ...,
|
||||
flags: builtins.int = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "dropped_attributes_count", b"dropped_attributes_count", "flags", b"flags", "span_id", b"span_id", "trace_id", b"trace_id", "trace_state", b"trace_state"]) -> None: ...
|
||||
|
||||
TRACE_ID_FIELD_NUMBER: builtins.int
|
||||
SPAN_ID_FIELD_NUMBER: builtins.int
|
||||
TRACE_STATE_FIELD_NUMBER: builtins.int
|
||||
PARENT_SPAN_ID_FIELD_NUMBER: builtins.int
|
||||
FLAGS_FIELD_NUMBER: builtins.int
|
||||
NAME_FIELD_NUMBER: builtins.int
|
||||
KIND_FIELD_NUMBER: builtins.int
|
||||
START_TIME_UNIX_NANO_FIELD_NUMBER: builtins.int
|
||||
END_TIME_UNIX_NANO_FIELD_NUMBER: builtins.int
|
||||
ATTRIBUTES_FIELD_NUMBER: builtins.int
|
||||
DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int
|
||||
EVENTS_FIELD_NUMBER: builtins.int
|
||||
DROPPED_EVENTS_COUNT_FIELD_NUMBER: builtins.int
|
||||
LINKS_FIELD_NUMBER: builtins.int
|
||||
DROPPED_LINKS_COUNT_FIELD_NUMBER: builtins.int
|
||||
STATUS_FIELD_NUMBER: builtins.int
|
||||
trace_id: builtins.bytes
|
||||
"""A unique identifier for a trace. All spans from the same trace share
|
||||
the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes OR
|
||||
of length other than 16 bytes is considered invalid (empty string in OTLP/JSON
|
||||
is zero-length and thus is also invalid).
|
||||
|
||||
This field is required.
|
||||
"""
|
||||
span_id: builtins.bytes
|
||||
"""A unique identifier for a span within a trace, assigned when the span
|
||||
is created. The ID is an 8-byte array. An ID with all zeroes OR of length
|
||||
other than 8 bytes is considered invalid (empty string in OTLP/JSON
|
||||
is zero-length and thus is also invalid).
|
||||
|
||||
This field is required.
|
||||
"""
|
||||
trace_state: builtins.str
|
||||
"""trace_state conveys information about request position in multiple distributed tracing graphs.
|
||||
It is a trace_state in w3c-trace-context format: https://www.w3.org/TR/trace-context/#tracestate-header
|
||||
See also https://github.com/w3c/distributed-tracing for more details about this field.
|
||||
"""
|
||||
parent_span_id: builtins.bytes
|
||||
"""The `span_id` of this span's parent span. If this is a root span, then this
|
||||
field must be empty. The ID is an 8-byte array.
|
||||
"""
|
||||
flags: builtins.int
|
||||
"""Flags, a bit field.
|
||||
|
||||
Bits 0-7 (8 least significant bits) are the trace flags as defined in W3C Trace
|
||||
Context specification. To read the 8-bit W3C trace flag, use
|
||||
`flags & SPAN_FLAGS_TRACE_FLAGS_MASK`.
|
||||
|
||||
See https://www.w3.org/TR/trace-context-2/#trace-flags for the flag definitions.
|
||||
|
||||
Bits 8 and 9 represent the 3 states of whether a span's parent
|
||||
is remote. The states are (unknown, is not remote, is remote).
|
||||
To read whether the value is known, use `(flags & SPAN_FLAGS_CONTEXT_HAS_IS_REMOTE_MASK) != 0`.
|
||||
To read whether the span is remote, use `(flags & SPAN_FLAGS_CONTEXT_IS_REMOTE_MASK) != 0`.
|
||||
|
||||
When creating span messages, if the message is logically forwarded from another source
|
||||
with an equivalent flags fields (i.e., usually another OTLP span message), the field SHOULD
|
||||
be copied as-is. If creating from a source that does not have an equivalent flags field
|
||||
(such as a runtime representation of an OpenTelemetry span), the high 22 bits MUST
|
||||
be set to zero.
|
||||
Readers MUST NOT assume that bits 10-31 (22 most significant bits) will be zero.
|
||||
|
||||
[Optional].
|
||||
"""
|
||||
name: builtins.str
|
||||
"""A description of the span's operation.
|
||||
|
||||
For example, the name can be a qualified method name or a file name
|
||||
and a line number where the operation is called. A best practice is to use
|
||||
the same display name at the same call point in an application.
|
||||
This makes it easier to correlate spans in different traces.
|
||||
|
||||
This field is semantically required to be set to non-empty string.
|
||||
Empty value is equivalent to an unknown span name.
|
||||
|
||||
This field is required.
|
||||
"""
|
||||
kind: global___Span.SpanKind.ValueType
|
||||
"""Distinguishes between spans generated in a particular context. For example,
|
||||
two spans with the same name may be distinguished using `CLIENT` (caller)
|
||||
and `SERVER` (callee) to identify queueing latency associated with the span.
|
||||
"""
|
||||
start_time_unix_nano: builtins.int
|
||||
"""start_time_unix_nano is the start time of the span. On the client side, this is the time
|
||||
kept by the local machine where the span execution starts. On the server side, this
|
||||
is the time when the server's application handler starts running.
|
||||
Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
|
||||
|
||||
This field is semantically required and it is expected that end_time >= start_time.
|
||||
"""
|
||||
end_time_unix_nano: builtins.int
|
||||
"""end_time_unix_nano is the end time of the span. On the client side, this is the time
|
||||
kept by the local machine where the span execution ends. On the server side, this
|
||||
is the time when the server application handler stops running.
|
||||
Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
|
||||
|
||||
This field is semantically required and it is expected that end_time >= start_time.
|
||||
"""
|
||||
@property
|
||||
def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
|
||||
"""attributes is a collection of key/value pairs. Note, global attributes
|
||||
like server name can be set using the resource API. Examples of attributes:
|
||||
|
||||
"/http/user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"
|
||||
"/http/server_latency": 300
|
||||
"example.com/myattribute": true
|
||||
"example.com/score": 10.239
|
||||
|
||||
The OpenTelemetry API specification further restricts the allowed value types:
|
||||
https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/common/README.md#attribute
|
||||
Attribute keys MUST be unique (it is not allowed to have more than one
|
||||
attribute with the same key).
|
||||
"""
|
||||
dropped_attributes_count: builtins.int
|
||||
"""dropped_attributes_count is the number of attributes that were discarded. Attributes
|
||||
can be discarded because their keys are too long or because there are too many
|
||||
attributes. If this value is 0, then no attributes were dropped.
|
||||
"""
|
||||
@property
|
||||
def events(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span.Event]:
|
||||
"""events is a collection of Event items."""
|
||||
dropped_events_count: builtins.int
|
||||
"""dropped_events_count is the number of dropped events. If the value is 0, then no
|
||||
events were dropped.
|
||||
"""
|
||||
@property
|
||||
def links(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span.Link]:
|
||||
"""links is a collection of Links, which are references from this span to a span
|
||||
in the same or different trace.
|
||||
"""
|
||||
dropped_links_count: builtins.int
|
||||
"""dropped_links_count is the number of dropped links after the maximum size was
|
||||
enforced. If this value is 0, then no links were dropped.
|
||||
"""
|
||||
@property
|
||||
def status(self) -> global___Status:
|
||||
"""An optional final status for this span. Semantically when Status isn't set, it means
|
||||
span's status code is unset, i.e. assume STATUS_CODE_UNSET (code = 0).
|
||||
"""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
trace_id: builtins.bytes = ...,
|
||||
span_id: builtins.bytes = ...,
|
||||
trace_state: builtins.str = ...,
|
||||
parent_span_id: builtins.bytes = ...,
|
||||
flags: builtins.int = ...,
|
||||
name: builtins.str = ...,
|
||||
kind: global___Span.SpanKind.ValueType = ...,
|
||||
start_time_unix_nano: builtins.int = ...,
|
||||
end_time_unix_nano: builtins.int = ...,
|
||||
attributes: collections.abc.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue] | None = ...,
|
||||
dropped_attributes_count: builtins.int = ...,
|
||||
events: collections.abc.Iterable[global___Span.Event] | None = ...,
|
||||
dropped_events_count: builtins.int = ...,
|
||||
links: collections.abc.Iterable[global___Span.Link] | None = ...,
|
||||
dropped_links_count: builtins.int = ...,
|
||||
status: global___Status | None = ...,
|
||||
) -> None: ...
|
||||
def HasField(self, field_name: typing_extensions.Literal["status", b"status"]) -> builtins.bool: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["attributes", b"attributes", "dropped_attributes_count", b"dropped_attributes_count", "dropped_events_count", b"dropped_events_count", "dropped_links_count", b"dropped_links_count", "end_time_unix_nano", b"end_time_unix_nano", "events", b"events", "flags", b"flags", "kind", b"kind", "links", b"links", "name", b"name", "parent_span_id", b"parent_span_id", "span_id", b"span_id", "start_time_unix_nano", b"start_time_unix_nano", "status", b"status", "trace_id", b"trace_id", "trace_state", b"trace_state"]) -> None: ...
|
||||
|
||||
global___Span = Span
|
||||
|
||||
@typing_extensions.final
|
||||
class Status(google.protobuf.message.Message):
|
||||
"""The Status type defines a logical error model that is suitable for different
|
||||
programming environments, including REST APIs and RPC APIs.
|
||||
"""
|
||||
|
||||
DESCRIPTOR: google.protobuf.descriptor.Descriptor
|
||||
|
||||
class _StatusCode:
|
||||
ValueType = typing.NewType("ValueType", builtins.int)
|
||||
V: typing_extensions.TypeAlias = ValueType
|
||||
|
||||
class _StatusCodeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Status._StatusCode.ValueType], builtins.type):
|
||||
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
|
||||
STATUS_CODE_UNSET: Status._StatusCode.ValueType # 0
|
||||
"""The default status."""
|
||||
STATUS_CODE_OK: Status._StatusCode.ValueType # 1
|
||||
"""The Span has been validated by an Application developer or Operator to
|
||||
have completed successfully.
|
||||
"""
|
||||
STATUS_CODE_ERROR: Status._StatusCode.ValueType # 2
|
||||
"""The Span contains an error."""
|
||||
|
||||
class StatusCode(_StatusCode, metaclass=_StatusCodeEnumTypeWrapper):
|
||||
"""For the semantics of status codes see
|
||||
https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/api.md#set-status
|
||||
"""
|
||||
|
||||
STATUS_CODE_UNSET: Status.StatusCode.ValueType # 0
|
||||
"""The default status."""
|
||||
STATUS_CODE_OK: Status.StatusCode.ValueType # 1
|
||||
"""The Span has been validated by an Application developer or Operator to
|
||||
have completed successfully.
|
||||
"""
|
||||
STATUS_CODE_ERROR: Status.StatusCode.ValueType # 2
|
||||
"""The Span contains an error."""
|
||||
|
||||
MESSAGE_FIELD_NUMBER: builtins.int
|
||||
CODE_FIELD_NUMBER: builtins.int
|
||||
message: builtins.str
|
||||
"""A developer-facing human readable error message."""
|
||||
code: global___Status.StatusCode.ValueType
|
||||
"""The status code."""
|
||||
def __init__(
|
||||
self,
|
||||
*,
|
||||
message: builtins.str = ...,
|
||||
code: global___Status.StatusCode.ValueType = ...,
|
||||
) -> None: ...
|
||||
def ClearField(self, field_name: typing_extensions.Literal["code", b"code", "message", b"message"]) -> None: ...
|
||||
|
||||
global___Status = Status
|
||||
@@ -0,0 +1,15 @@
|
||||
# Copyright The OpenTelemetry Authors
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
__version__ = "1.38.0"
|
||||
Reference in New Issue
Block a user