diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 48d109f5ef..d564f69c67 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -23,7 +23,6 @@ Please describe the tests that you ran to verify your changes. Provide instructi
Answer the following question based on these examples of changes that would require a Contrib Repo Change:
- [The OTel specification](https://github.com/open-telemetry/opentelemetry-specification) has changed which prompted this PR to update the method interfaces of `opentelemetry-api/` or `opentelemetry-sdk/`
-- The method interfaces of `opentelemetry-instrumentation/` have changed
- The method interfaces of `test/util` have changed
- Scripts in `scripts/` that were copied over to the Contrib repo have changed
- Configuration files that were copied over to the Contrib repo have changed (when consistency between repositories is applicable) such as in
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
index 404b1cb485..192a11d9ca 100644
--- a/.github/workflows/test.yml
+++ b/.github/workflows/test.yml
@@ -10,7 +10,7 @@ env:
# Otherwise, set variable to the commit of your branch on
# opentelemetry-python-contrib which is compatible with these Core repo
# changes.
- CONTRIB_REPO_SHA: 4a4d889b1876323d7f70507b5e4d079f454fe0d6
+ CONTRIB_REPO_SHA: c2e674983a265e54c5eb14e376459a992498aae6
# This is needed because we do not clone the core repo in contrib builds anymore.
# When running contrib builds as part of core builds, we use actions/checkout@v2 which
# does not set an environment variable (simply just runs tox), which is different when
@@ -34,7 +34,7 @@ jobs:
fail-fast: false # ensures the entire test matrix is run, even if one permutation fails
matrix:
python-version: [ py36, py37, py38, py39, pypy3 ]
- package: ["api", "sdk", "instrumentation", "semantic", "getting", "distro" , "shim", "exporter", "protobuf", "propagator"]
+ package: ["api", "sdk", "semantic", "getting", "shim", "exporter", "protobuf", "propagator"]
os: [ ubuntu-20.04, windows-2019 ]
steps:
- name: Checkout Core Repo @ SHA - ${{ github.sha }}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index e16982e193..2bbfe493f0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -6,8 +6,12 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased](https://github.com/open-telemetry/opentelemetry-python/compare/v1.6.0-0.25b0...HEAD)
+- Upgrade GRPC/protobuf related dependency and regenerate otlp protobufs
+ ([#2201](https://github.com/open-telemetry/opentelemetry-python/pull/2201))
+- Propagation: only warn about oversized baggage headers when headers exist
+ ([#2212](https://github.com/open-telemetry/opentelemetry-python/pull/2212))
- Return proxy instruments from ProxyMeter
- [[#2169](https://github.com/open-telemetry/opentelemetry-python/pull/2169)]
+ ([#2169](https://github.com/open-telemetry/opentelemetry-python/pull/2169))
- Make Measurement a concrete class
([#2153](https://github.com/open-telemetry/opentelemetry-python/pull/2153))
- Add metrics API
diff --git a/dev-requirements.txt b/dev-requirements.txt
index bd369fac76..22595d5af2 100644
--- a/dev-requirements.txt
+++ b/dev-requirements.txt
@@ -6,10 +6,10 @@ httpretty~=1.0
mypy==0.812
sphinx~=3.5.4
sphinx-rtd-theme~=0.5
-sphinx-autodoc-typehints
+sphinx-autodoc-typehints~=1.12.0
pytest>=6.0
pytest-cov>=2.8
readme-renderer~=24.0
-grpcio-tools==1.29.0
-mypy-protobuf>=1.23
-protobuf>=3.13.0
+grpcio-tools~=1.41.0
+mypy-protobuf~=3.0.0
+protobuf~=3.18.1
diff --git a/docs-requirements.txt b/docs-requirements.txt
index b670676fbb..a93eb8aa70 100644
--- a/docs-requirements.txt
+++ b/docs-requirements.txt
@@ -8,9 +8,7 @@ sphinx-jekyll-builder
# doesn't work for pkg_resources.
./opentelemetry-api
./opentelemetry-semantic-conventions
-./opentelemetry-instrumentation
./opentelemetry-sdk
-./opentelemetry-instrumentation
# Required by instrumentation and exporter packages
ddtrace>=0.34.0
diff --git a/docs/examples/django/README.rst b/docs/examples/django/README.rst
index 09ddd2638b..2e071127ff 100644
--- a/docs/examples/django/README.rst
+++ b/docs/examples/django/README.rst
@@ -113,7 +113,7 @@ Auto Instrumentation
--------------------
This same example can be run using auto instrumentation. Comment out the call
-to ``DjangoInstrumento().instrument()`` in ``main``, then Run the django app
+to ``DjangoInstrumentor().instrument()`` in ``main``, then Run the django app
with ``opentelemetry-instrument python manage.py runserver --noreload``.
Repeat the steps with the client, the result should be the same.
diff --git a/eachdist.ini b/eachdist.ini
index 548d6d36e0..52d5be769f 100644
--- a/eachdist.ini
+++ b/eachdist.ini
@@ -5,7 +5,6 @@
sortfirst=
opentelemetry-api
opentelemetry-sdk
- opentelemetry-instrumentation
opentelemetry-proto
opentelemetry-distro
tests/util
@@ -39,7 +38,6 @@ packages=
opentelemetry-distro
opentelemetry-semantic-conventions
opentelemetry-test
- opentelemetry-instrumentation
tests
[experimental]
diff --git a/opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py b/opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py
index 8ba28357c3..8430dc2301 100644
--- a/opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py
+++ b/opentelemetry-api/src/opentelemetry/baggage/propagation/__init__.py
@@ -53,7 +53,10 @@ def extract(
getter.get(carrier, self._BAGGAGE_HEADER_NAME)
)
- if not header or len(header) > self._MAX_HEADER_LENGTH:
+ if not header:
+ return context
+
+ if len(header) > self._MAX_HEADER_LENGTH:
_logger.warning(
"Baggage header `%s` exceeded the maximum number of bytes per baggage-string",
header,
diff --git a/opentelemetry-distro/MANIFEST.in b/opentelemetry-distro/MANIFEST.in
deleted file mode 100644
index 191b7d1959..0000000000
--- a/opentelemetry-distro/MANIFEST.in
+++ /dev/null
@@ -1,7 +0,0 @@
-prune tests
-graft src
-global-exclude *.pyc
-global-exclude *.pyo
-global-exclude __pycache__/*
-include MANIFEST.in
-include README.rst
diff --git a/opentelemetry-distro/README.rst b/opentelemetry-distro/README.rst
deleted file mode 100644
index 8095283910..0000000000
--- a/opentelemetry-distro/README.rst
+++ /dev/null
@@ -1,23 +0,0 @@
-OpenTelemetry Distro
-====================
-
-|pypi|
-
-.. |pypi| image:: https://badge.fury.io/py/opentelemetry-distro.svg
- :target: https://pypi.org/project/opentelemetry-distro/
-
-Installation
-------------
-
-::
-
- pip install opentelemetry-distro
-
-
-This package provides entrypoints to configure OpenTelemetry.
-
-References
-----------
-
-* `OpenTelemetry Project `_
-* `Example using opentelemetry-distro `_
diff --git a/opentelemetry-distro/setup.py b/opentelemetry-distro/setup.py
deleted file mode 100644
index 4783772d06..0000000000
--- a/opentelemetry-distro/setup.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-import setuptools
-
-BASE_DIR = os.path.dirname(__file__)
-VERSION_FILENAME = os.path.join(
- BASE_DIR, "src", "opentelemetry", "distro", "version.py"
-)
-PACKAGE_INFO = {}
-with open(VERSION_FILENAME, encoding="utf-8") as f:
- exec(f.read(), PACKAGE_INFO)
-
-setuptools.setup(
- version=PACKAGE_INFO["__version__"],
-)
diff --git a/opentelemetry-distro/src/opentelemetry/distro/__init__.py b/opentelemetry-distro/src/opentelemetry/distro/__init__.py
deleted file mode 100644
index 97e3e2fcc9..0000000000
--- a/opentelemetry-distro/src/opentelemetry/distro/__init__.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-from opentelemetry.environment_variables import OTEL_TRACES_EXPORTER
-from opentelemetry.instrumentation.distro import BaseDistro
-from opentelemetry.sdk._configuration import _OTelSDKConfigurator
-
-
-class OpenTelemetryConfigurator(_OTelSDKConfigurator):
- pass
-
-
-class OpenTelemetryDistro(BaseDistro):
- """
- The OpenTelemetry provided Distro configures a default set of
- configuration out of the box.
- """
-
- # pylint: disable=no-self-use
- def _configure(self, **kwargs):
- os.environ.setdefault(OTEL_TRACES_EXPORTER, "otlp_proto_grpc_span")
diff --git a/opentelemetry-distro/src/opentelemetry/distro/py.typed b/opentelemetry-distro/src/opentelemetry/distro/py.typed
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/opentelemetry-distro/tests/__init__.py b/opentelemetry-distro/tests/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/opentelemetry-distro/tests/test_distro.py b/opentelemetry-distro/tests/test_distro.py
deleted file mode 100644
index 2e42ed904a..0000000000
--- a/opentelemetry-distro/tests/test_distro.py
+++ /dev/null
@@ -1,38 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# type: ignore
-
-import os
-from unittest import TestCase
-
-from pkg_resources import DistributionNotFound, require
-
-from opentelemetry.distro import OpenTelemetryDistro
-from opentelemetry.environment_variables import OTEL_TRACES_EXPORTER
-
-
-class TestDistribution(TestCase):
- def test_package_available(self):
- try:
- require(["opentelemetry-distro"])
- except DistributionNotFound:
- self.fail("opentelemetry-distro not installed")
-
- def test_default_configuration(self):
- distro = OpenTelemetryDistro()
- self.assertIsNone(os.environ.get(OTEL_TRACES_EXPORTER))
- distro.configure()
- self.assertEqual(
- "otlp_proto_grpc_span", os.environ.get(OTEL_TRACES_EXPORTER)
- )
diff --git a/opentelemetry-instrumentation/LICENSE b/opentelemetry-instrumentation/LICENSE
deleted file mode 100644
index 1ef7dad2c5..0000000000
--- a/opentelemetry-instrumentation/LICENSE
+++ /dev/null
@@ -1,201 +0,0 @@
- Apache License
- Version 2.0, January 2004
- http://www.apache.org/licenses/
-
- TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
-
- 1. Definitions.
-
- "License" shall mean the terms and conditions for use, reproduction,
- and distribution as defined by Sections 1 through 9 of this document.
-
- "Licensor" shall mean the copyright owner or entity authorized by
- the copyright owner that is granting the License.
-
- "Legal Entity" shall mean the union of the acting entity and all
- other entities that control, are controlled by, or are under common
- control with that entity. For the purposes of this definition,
- "control" means (i) the power, direct or indirect, to cause the
- direction or management of such entity, whether by contract or
- otherwise, or (ii) ownership of fifty percent (50%) or more of the
- outstanding shares, or (iii) beneficial ownership of such entity.
-
- "You" (or "Your") shall mean an individual or Legal Entity
- exercising permissions granted by this License.
-
- "Source" form shall mean the preferred form for making modifications,
- including but not limited to software source code, documentation
- source, and configuration files.
-
- "Object" form shall mean any form resulting from mechanical
- transformation or translation of a Source form, including but
- not limited to compiled object code, generated documentation,
- and conversions to other media types.
-
- "Work" shall mean the work of authorship, whether in Source or
- Object form, made available under the License, as indicated by a
- copyright notice that is included in or attached to the work
- (an example is provided in the Appendix below).
-
- "Derivative Works" shall mean any work, whether in Source or Object
- form, that is based on (or derived from) the Work and for which the
- editorial revisions, annotations, elaborations, or other modifications
- represent, as a whole, an original work of authorship. For the purposes
- of this License, Derivative Works shall not include works that remain
- separable from, or merely link (or bind by name) to the interfaces of,
- the Work and Derivative Works thereof.
-
- "Contribution" shall mean any work of authorship, including
- the original version of the Work and any modifications or additions
- to that Work or Derivative Works thereof, that is intentionally
- submitted to Licensor for inclusion in the Work by the copyright owner
- or by an individual or Legal Entity authorized to submit on behalf of
- the copyright owner. For the purposes of this definition, "submitted"
- means any form of electronic, verbal, or written communication sent
- to the Licensor or its representatives, including but not limited to
- communication on electronic mailing lists, source code control systems,
- and issue tracking systems that are managed by, or on behalf of, the
- Licensor for the purpose of discussing and improving the Work, but
- excluding communication that is conspicuously marked or otherwise
- designated in writing by the copyright owner as "Not a Contribution."
-
- "Contributor" shall mean Licensor and any individual or Legal Entity
- on behalf of whom a Contribution has been received by Licensor and
- subsequently incorporated within the Work.
-
- 2. Grant of Copyright License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- copyright license to reproduce, prepare Derivative Works of,
- publicly display, publicly perform, sublicense, and distribute the
- Work and such Derivative Works in Source or Object form.
-
- 3. Grant of Patent License. Subject to the terms and conditions of
- this License, each Contributor hereby grants to You a perpetual,
- worldwide, non-exclusive, no-charge, royalty-free, irrevocable
- (except as stated in this section) patent license to make, have made,
- use, offer to sell, sell, import, and otherwise transfer the Work,
- where such license applies only to those patent claims licensable
- by such Contributor that are necessarily infringed by their
- Contribution(s) alone or by combination of their Contribution(s)
- with the Work to which such Contribution(s) was submitted. If You
- institute patent litigation against any entity (including a
- cross-claim or counterclaim in a lawsuit) alleging that the Work
- or a Contribution incorporated within the Work constitutes direct
- or contributory patent infringement, then any patent licenses
- granted to You under this License for that Work shall terminate
- as of the date such litigation is filed.
-
- 4. Redistribution. You may reproduce and distribute copies of the
- Work or Derivative Works thereof in any medium, with or without
- modifications, and in Source or Object form, provided that You
- meet the following conditions:
-
- (a) You must give any other recipients of the Work or
- Derivative Works a copy of this License; and
-
- (b) You must cause any modified files to carry prominent notices
- stating that You changed the files; and
-
- (c) You must retain, in the Source form of any Derivative Works
- that You distribute, all copyright, patent, trademark, and
- attribution notices from the Source form of the Work,
- excluding those notices that do not pertain to any part of
- the Derivative Works; and
-
- (d) If the Work includes a "NOTICE" text file as part of its
- distribution, then any Derivative Works that You distribute must
- include a readable copy of the attribution notices contained
- within such NOTICE file, excluding those notices that do not
- pertain to any part of the Derivative Works, in at least one
- of the following places: within a NOTICE text file distributed
- as part of the Derivative Works; within the Source form or
- documentation, if provided along with the Derivative Works; or,
- within a display generated by the Derivative Works, if and
- wherever such third-party notices normally appear. The contents
- of the NOTICE file are for informational purposes only and
- do not modify the License. You may add Your own attribution
- notices within Derivative Works that You distribute, alongside
- or as an addendum to the NOTICE text from the Work, provided
- that such additional attribution notices cannot be construed
- as modifying the License.
-
- You may add Your own copyright statement to Your modifications and
- may provide additional or different license terms and conditions
- for use, reproduction, or distribution of Your modifications, or
- for any such Derivative Works as a whole, provided Your use,
- reproduction, and distribution of the Work otherwise complies with
- the conditions stated in this License.
-
- 5. Submission of Contributions. Unless You explicitly state otherwise,
- any Contribution intentionally submitted for inclusion in the Work
- by You to the Licensor shall be under the terms and conditions of
- this License, without any additional terms or conditions.
- Notwithstanding the above, nothing herein shall supersede or modify
- the terms of any separate license agreement you may have executed
- with Licensor regarding such Contributions.
-
- 6. Trademarks. This License does not grant permission to use the trade
- names, trademarks, service marks, or product names of the Licensor,
- except as required for reasonable and customary use in describing the
- origin of the Work and reproducing the content of the NOTICE file.
-
- 7. Disclaimer of Warranty. Unless required by applicable law or
- agreed to in writing, Licensor provides the Work (and each
- Contributor provides its Contributions) on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
- implied, including, without limitation, any warranties or conditions
- of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
- PARTICULAR PURPOSE. You are solely responsible for determining the
- appropriateness of using or redistributing the Work and assume any
- risks associated with Your exercise of permissions under this License.
-
- 8. Limitation of Liability. In no event and under no legal theory,
- whether in tort (including negligence), contract, or otherwise,
- unless required by applicable law (such as deliberate and grossly
- negligent acts) or agreed to in writing, shall any Contributor be
- liable to You for damages, including any direct, indirect, special,
- incidental, or consequential damages of any character arising as a
- result of this License or out of the use or inability to use the
- Work (including but not limited to damages for loss of goodwill,
- work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses), even if such Contributor
- has been advised of the possibility of such damages.
-
- 9. Accepting Warranty or Additional Liability. While redistributing
- the Work or Derivative Works thereof, You may choose to offer,
- and charge a fee for, acceptance of support, warranty, indemnity,
- or other liability obligations and/or rights consistent with this
- License. However, in accepting such obligations, You may act only
- on Your own behalf and on Your sole responsibility, not on behalf
- of any other Contributor, and only if You agree to indemnify,
- defend, and hold each Contributor harmless for any liability
- incurred by, or claims asserted against, such Contributor by reason
- of your accepting any such warranty or additional liability.
-
- END OF TERMS AND CONDITIONS
-
- APPENDIX: How to apply the Apache License to your work.
-
- To apply the Apache License to your work, attach the following
- boilerplate notice, with the fields enclosed by brackets "[]"
- replaced with your own identifying information. (Don't include
- the brackets!) The text should be enclosed in the appropriate
- comment syntax for the file format. We also recommend that a
- file or class name and description of purpose be included on the
- same "printed page" as the copyright notice for easier
- identification within third-party archives.
-
- Copyright The OpenTelemetry Authors
-
- Licensed under the Apache License, Version 2.0 (the "License");
- you may not use this file except in compliance with the License.
- You may obtain a copy of the License at
-
- http://www.apache.org/licenses/LICENSE-2.0
-
- Unless required by applicable law or agreed to in writing, software
- distributed under the License is distributed on an "AS IS" BASIS,
- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- See the License for the specific language governing permissions and
- limitations under the License.
diff --git a/opentelemetry-instrumentation/MANIFEST.in b/opentelemetry-instrumentation/MANIFEST.in
deleted file mode 100644
index faee277146..0000000000
--- a/opentelemetry-instrumentation/MANIFEST.in
+++ /dev/null
@@ -1,8 +0,0 @@
-graft src
-graft tests
-global-exclude *.pyc
-global-exclude *.pyo
-global-exclude __pycache__/*
-include MANIFEST.in
-include README.rst
-include LICENSE
diff --git a/opentelemetry-instrumentation/README.rst b/opentelemetry-instrumentation/README.rst
deleted file mode 100644
index cae4e3ab5f..0000000000
--- a/opentelemetry-instrumentation/README.rst
+++ /dev/null
@@ -1,123 +0,0 @@
-OpenTelemetry Instrumentation
-=============================
-
-|pypi|
-
-.. |pypi| image:: https://badge.fury.io/py/opentelemetry-instrumentation.svg
- :target: https://pypi.org/project/opentelemetry-instrumentation/
-
-Installation
-------------
-
-::
-
- pip install opentelemetry-instrumentation
-
-
-This package provides a couple of commands that help automatically instruments a program:
-
-.. note::
- You need to install a distro package to get auto instrumentation working. The ``opentelemetry-distro``
- package contains the default distro and automatically configures some of the common options for users.
- For more info about ``opentelemetry-distro`` check `here `__
- ::
-
- pip install opentelemetry-distro[otlp]
-
-
-opentelemetry-bootstrap
------------------------
-
-::
-
- opentelemetry-bootstrap --action=install|requirements
-
-This commands inspects the active Python site-packages and figures out which
-instrumentation packages the user might want to install. By default it prints out
-a list of the suggested instrumentation packages which can be added to a requirements.txt
-file. It also supports installing the suggested packages when run with :code:`--action=install`
-flag.
-
-
-opentelemetry-instrument
-------------------------
-
-::
-
- opentelemetry-instrument python program.py
-
-The instrument command will try to automatically detect packages used by your python program
-and when possible, apply automatic tracing instrumentation on them. This means your program
-will get automatic distributed tracing for free without having to make any code changes
-at all. This will also configure a global tracer and tracing exporter without you having to
-make any code changes. By default, the instrument command will use the OTLP exporter but
-this can be overriden when needed.
-
-The command supports the following configuration options as CLI arguments and environment vars:
-
-
-* ``--trace-exporter`` or ``OTEL_TRACES_EXPORTER``
-
-Used to specify which trace exporter to use. Can be set to one or more of the well-known exporter
-names (see below).
-
- - Defaults to `otlp`.
- - Can be set to `none` to disable automatic tracer initialization.
-
-You can pass multiple values to configure multiple exporters e.g, ``zipkin,prometheus``
-
-Well known trace exporter names:
-
- - jaeger_proto
- - jaeger_thrift
- - opencensus
- - otlp
- - otlp_proto_grpc_span
- - otlp_proto_http_span
- - zipkin_json
- - zipkin_proto
-
-``otlp`` is an alias for ``otlp_proto_grpc_span``.
-
-* ``--id-generator`` or ``OTEL_PYTHON_ID_GENERATOR``
-
-Used to specify which IDs Generator to use for the global Tracer Provider. By default, it
-will use the random IDs generator.
-
-The code in ``program.py`` needs to use one of the packages for which there is
-an OpenTelemetry integration. For a list of the available integrations please
-check `here `_
-
-* ``OTEL_PYTHON_DISABLED_INSTRUMENTATIONS``
-
-If set by the user, opentelemetry-instrument will read this environment variable to disable specific instrumentations.
-e.g OTEL_PYTHON_DISABLED_INSTRUMENTATIONS = "requests,django"
-
-
-Examples
-^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-
-::
-
- opentelemetry-instrument --trace-exporter otlp flask run --port=3000
-
-The above command will pass ``--trace-exporter otlp`` to the instrument command and ``--port=3000`` to ``flask run``.
-
-::
-
- opentelemetry-instrument --trace-exporter zipkin_json,otlp celery -A tasks worker --loglevel=info
-
-The above command will configure global trace provider, attach zipkin and otlp exporters to it and then
-start celery with the rest of the arguments.
-
-::
-
- opentelemetry-instrument --ids-generator random flask run --port=3000
-
-The above command will configure the global trace provider to use the Random IDs Generator, and then
-pass ``--port=3000`` to ``flask run``.
-
-References
-----------
-
-* `OpenTelemetry Project `_
diff --git a/opentelemetry-instrumentation/setup.cfg b/opentelemetry-instrumentation/setup.cfg
deleted file mode 100644
index 042f0edb19..0000000000
--- a/opentelemetry-instrumentation/setup.cfg
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-[metadata]
-name = opentelemetry-instrumentation
-description = Instrumentation Tools & Auto Instrumentation for OpenTelemetry Python
-long_description = file: README.rst
-long_description_content_type = text/x-rst
-author = OpenTelemetry Authors
-author_email = cncf-opentelemetry-contributors@lists.cncf.io
-url = https://github.com/open-telemetry/opentelemetry-python/tree/main/opentelemetry-instrumentation
-platforms = any
-license = Apache-2.0
-classifiers =
- Development Status :: 4 - Beta
- Intended Audience :: Developers
- License :: OSI Approved :: Apache Software License
- Programming Language :: Python
- Programming Language :: Python :: 3
- Programming Language :: Python :: 3.6
- Programming Language :: Python :: 3.7
- Programming Language :: Python :: 3.8
- Programming Language :: Python :: 3.9
-
-[options]
-python_requires = >=3.6
-package_dir=
- =src
-packages=find_namespace:
-zip_safe = False
-include_package_data = True
-install_requires =
- opentelemetry-api ~= 1.4
- wrapt >= 1.0.0, < 2.0.0
-
-[options.packages.find]
-where = src
-
-[options.entry_points]
-console_scripts =
- opentelemetry-instrument = opentelemetry.instrumentation.auto_instrumentation:run
- opentelemetry-bootstrap = opentelemetry.instrumentation.bootstrap:run
-opentelemetry_environment_variables =
- instrumentation = opentelemetry.instrumentation.environment_variables
-
-[options.extras_require]
-test =
diff --git a/opentelemetry-instrumentation/setup.py b/opentelemetry-instrumentation/setup.py
deleted file mode 100644
index 9d1d5b7b06..0000000000
--- a/opentelemetry-instrumentation/setup.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import os
-
-import setuptools
-
-BASE_DIR = os.path.dirname(__file__)
-VERSION_FILENAME = os.path.join(
- BASE_DIR, "src", "opentelemetry", "instrumentation", "version.py"
-)
-PACKAGE_INFO = {}
-with open(VERSION_FILENAME, encoding="utf-8") as f:
- exec(f.read(), PACKAGE_INFO)
-
-setuptools.setup(
- version=PACKAGE_INFO["__version__"],
-)
diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/__init__.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/__init__.py
deleted file mode 100644
index 29b09a0c34..0000000000
--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/__init__.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from argparse import REMAINDER, ArgumentParser
-from logging import getLogger
-from os import environ, execl, getcwd
-from os.path import abspath, dirname, pathsep
-from re import sub
-from shutil import which
-
-from pkg_resources import iter_entry_points
-
-_logger = getLogger(__file__)
-
-
-def run() -> None:
-
- parser = ArgumentParser(
- description="""
- opentelemetry-instrument automatically instruments a Python
- program and its dependencies and then runs the program.
- """,
- epilog="""
- Optional arguments (except for --help) for opentelemetry-instrument
- directly correspond with OpenTelemetry environment variables. The
- corresponding optional argument is formed by removing the OTEL_ or
- OTEL_PYTHON_ prefix from the environment variable and lower casing the
- rest. For example, the optional argument --attribute_value_length_limit
- corresponds with the environment variable
- OTEL_ATTRIBUTE_VALUE_LENGTH_LIMIT.
-
- These optional arguments will override the current value of the
- corresponding environment variable during the execution of the command.
- """,
- )
-
- argument_otel_environment_variable = {}
-
- for entry_point in iter_entry_points(
- "opentelemetry_environment_variables"
- ):
- environment_variable_module = entry_point.load()
-
- for attribute in dir(environment_variable_module):
-
- if attribute.startswith("OTEL_"):
-
- argument = sub(r"OTEL_(PYTHON_)?", "", attribute).lower()
-
- parser.add_argument(
- f"--{argument}",
- required=False,
- )
- argument_otel_environment_variable[argument] = attribute
-
- parser.add_argument("command", help="Your Python application.")
- parser.add_argument(
- "command_args",
- help="Arguments for your application.",
- nargs=REMAINDER,
- )
-
- args = parser.parse_args()
-
- for argument, otel_environment_variable in (
- argument_otel_environment_variable
- ).items():
- value = getattr(args, argument)
- if value is not None:
-
- environ[otel_environment_variable] = value
-
- python_path = environ.get("PYTHONPATH")
-
- if not python_path:
- python_path = []
-
- else:
- python_path = python_path.split(pathsep)
-
- cwd_path = getcwd()
-
- # This is being added to support applications that are being run from their
- # own executable, like Django.
- # FIXME investigate if there is another way to achieve this
- if cwd_path not in python_path:
- python_path.insert(0, cwd_path)
-
- filedir_path = dirname(abspath(__file__))
-
- python_path = [path for path in python_path if path != filedir_path]
-
- python_path.insert(0, filedir_path)
-
- environ["PYTHONPATH"] = pathsep.join(python_path)
-
- executable = which(args.command)
- execl(executable, executable, *args.command_args)
diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py
deleted file mode 100644
index f7a6412ff6..0000000000
--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/auto_instrumentation/sitecustomize.py
+++ /dev/null
@@ -1,141 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import sys
-from logging import getLogger
-from os import environ, path
-from os.path import abspath, dirname, pathsep
-from re import sub
-
-from pkg_resources import iter_entry_points
-
-from opentelemetry.instrumentation.dependencies import (
- get_dist_dependency_conflicts,
-)
-from opentelemetry.instrumentation.distro import BaseDistro, DefaultDistro
-from opentelemetry.instrumentation.environment_variables import (
- OTEL_PYTHON_DISABLED_INSTRUMENTATIONS,
-)
-
-logger = getLogger(__file__)
-
-
-def _load_distros() -> BaseDistro:
- for entry_point in iter_entry_points("opentelemetry_distro"):
- try:
- distro = entry_point.load()()
- if not isinstance(distro, BaseDistro):
- logger.debug(
- "%s is not an OpenTelemetry Distro. Skipping",
- entry_point.name,
- )
- continue
- logger.debug(
- "Distribution %s will be configured", entry_point.name
- )
- return distro
- except Exception as exc: # pylint: disable=broad-except
- logger.exception(
- "Distribution %s configuration failed", entry_point.name
- )
- raise exc
- return DefaultDistro()
-
-
-def _load_instrumentors(distro):
- package_to_exclude = environ.get(OTEL_PYTHON_DISABLED_INSTRUMENTATIONS, [])
- if isinstance(package_to_exclude, str):
- package_to_exclude = package_to_exclude.split(",")
- # to handle users entering "requests , flask" or "requests, flask" with spaces
- package_to_exclude = [x.strip() for x in package_to_exclude]
-
- for entry_point in iter_entry_points("opentelemetry_pre_instrument"):
- entry_point.load()()
-
- for entry_point in iter_entry_points("opentelemetry_instrumentor"):
- if entry_point.name in package_to_exclude:
- logger.debug(
- "Instrumentation skipped for library %s", entry_point.name
- )
- continue
-
- try:
- conflict = get_dist_dependency_conflicts(entry_point.dist)
- if conflict:
- logger.debug(
- "Skipping instrumentation %s: %s",
- entry_point.name,
- conflict,
- )
- continue
-
- # tell instrumentation to not run dep checks again as we already did it above
- distro.load_instrumentor(entry_point, skip_dep_check=True)
- logger.debug("Instrumented %s", entry_point.name)
- except Exception as exc: # pylint: disable=broad-except
- logger.exception("Instrumenting of %s failed", entry_point.name)
- raise exc
-
- for entry_point in iter_entry_points("opentelemetry_post_instrument"):
- entry_point.load()()
-
-
-def _load_configurators():
- configured = None
- for entry_point in iter_entry_points("opentelemetry_configurator"):
- if configured is not None:
- logger.warning(
- "Configuration of %s not loaded, %s already loaded",
- entry_point.name,
- configured,
- )
- continue
- try:
- entry_point.load()().configure() # type: ignore
- configured = entry_point.name
- except Exception as exc: # pylint: disable=broad-except
- logger.exception("Configuration of %s failed", entry_point.name)
- raise exc
-
-
-def initialize():
- try:
- distro = _load_distros()
- distro.configure()
- _load_configurators()
- _load_instrumentors(distro)
- except Exception: # pylint: disable=broad-except
- logger.exception("Failed to auto initialize opentelemetry")
- finally:
- environ["PYTHONPATH"] = sub(
- fr"{dirname(abspath(__file__))}{pathsep}?",
- "",
- environ["PYTHONPATH"],
- )
-
-
-if (
- hasattr(sys, "argv")
- and sys.argv[0].split(path.sep)[-1] == "celery"
- and "worker" in sys.argv[1:]
-):
- from celery.signals import worker_process_init # pylint:disable=E0401
-
- @worker_process_init.connect(weak=False)
- def init_celery(*args, **kwargs):
- initialize()
-
-
-else:
- initialize()
diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/bootstrap.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/bootstrap.py
deleted file mode 100644
index f1c8181bae..0000000000
--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/bootstrap.py
+++ /dev/null
@@ -1,154 +0,0 @@
-#!/usr/bin/env python3
-
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-import argparse
-import logging
-import subprocess
-import sys
-
-import pkg_resources
-
-from opentelemetry.instrumentation.bootstrap_gen import (
- default_instrumentations,
- libraries,
-)
-
-logger = logging.getLogger(__file__)
-
-
-def _syscall(func):
- def wrapper(package=None):
- try:
- if package:
- return func(package)
- return func()
- except subprocess.SubprocessError as exp:
- cmd = getattr(exp, "cmd", None)
- if cmd:
- msg = f'Error calling system command "{" ".join(cmd)}"'
- if package:
- msg = f'{msg} for package "{package}"'
- raise RuntimeError(msg)
-
- return wrapper
-
-
-@_syscall
-def _sys_pip_install(package):
- # explicit upgrade strategy to override potential pip config
- subprocess.check_call(
- [
- sys.executable,
- "-m",
- "pip",
- "install",
- "-U",
- "--upgrade-strategy",
- "only-if-needed",
- package,
- ]
- )
-
-
-def _pip_check():
- """Ensures none of the instrumentations have dependency conflicts.
- Clean check reported as:
- 'No broken requirements found.'
- Dependency conflicts are reported as:
- 'opentelemetry-instrumentation-flask 1.0.1 has requirement opentelemetry-sdk<2.0,>=1.0, but you have opentelemetry-sdk 0.5.'
- To not be too restrictive, we'll only check for relevant packages.
- """
- with subprocess.Popen(
- [sys.executable, "-m", "pip", "check"], stdout=subprocess.PIPE
- ) as check_pipe:
- pip_check = check_pipe.communicate()[0].decode()
- pip_check_lower = pip_check.lower()
- for package_tup in libraries.values():
- for package in package_tup:
- if package.lower() in pip_check_lower:
- raise RuntimeError(f"Dependency conflict found: {pip_check}")
-
-
-def _is_installed(req):
- if req in sys.modules:
- return True
-
- try:
- pkg_resources.get_distribution(req)
- except pkg_resources.DistributionNotFound:
- return False
- except pkg_resources.VersionConflict as exc:
- logger.warning(
- "instrumentation for package %s is available but version %s is installed. Skipping.",
- exc.req,
- exc.dist.as_requirement(), # pylint: disable=no-member
- )
- return False
- return True
-
-
-def _find_installed_libraries():
- libs = default_instrumentations[:]
- libs.extend(
- [
- v["instrumentation"]
- for _, v in libraries.items()
- if _is_installed(v["library"])
- ]
- )
- return libs
-
-
-def _run_requirements():
- logger.setLevel(logging.ERROR)
- print("\n".join(_find_installed_libraries()), end="")
-
-
-def _run_install():
- for lib in _find_installed_libraries():
- _sys_pip_install(lib)
- _pip_check()
-
-
-def run() -> None:
- action_install = "install"
- action_requirements = "requirements"
-
- parser = argparse.ArgumentParser(
- description="""
- opentelemetry-bootstrap detects installed libraries and automatically
- installs the relevant instrumentation packages for them.
- """
- )
- parser.add_argument(
- "-a",
- "--action",
- choices=[action_install, action_requirements],
- default=action_requirements,
- help="""
- install - uses pip to install the new requirements using to the
- currently active site-package.
- requirements - prints out the new requirements to stdout. Action can
- be piped and appended to a requirements.txt file.
- """,
- )
- args = parser.parse_args()
-
- cmd = {
- action_install: _run_install,
- action_requirements: _run_requirements,
- }[args.action]
- cmd()
diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py
deleted file mode 100644
index 6c65d6677e..0000000000
--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/dependencies.py
+++ /dev/null
@@ -1,62 +0,0 @@
-from logging import getLogger
-from typing import Collection, Optional
-
-from pkg_resources import (
- Distribution,
- DistributionNotFound,
- RequirementParseError,
- VersionConflict,
- get_distribution,
-)
-
-logger = getLogger(__file__)
-
-
-class DependencyConflict:
- required: str = None
- found: Optional[str] = None
-
- def __init__(self, required, found=None):
- self.required = required
- self.found = found
-
- def __str__(self):
- return f'DependencyConflict: requested: "{self.required}" but found: "{self.found}"'
-
-
-def get_dist_dependency_conflicts(
- dist: Distribution,
-) -> Optional[DependencyConflict]:
- main_deps = dist.requires()
- instrumentation_deps = []
- for dep in dist.requires(("instruments",)):
- if dep not in main_deps:
- # we set marker to none so string representation of the dependency looks like
- # requests ~= 1.0
- # instead of
- # requests ~= 1.0; extra = "instruments"
- # which does not work with `get_distribution()`
- dep.marker = None
- instrumentation_deps.append(str(dep))
-
- return get_dependency_conflicts(instrumentation_deps)
-
-
-def get_dependency_conflicts(
- deps: Collection[str],
-) -> Optional[DependencyConflict]:
- for dep in deps:
- try:
- get_distribution(dep)
- except VersionConflict as exc:
- return DependencyConflict(dep, exc.dist)
- except DistributionNotFound:
- return DependencyConflict(dep)
- except RequirementParseError as exc:
- logger.warning(
- 'error parsing dependency, reporting as a conflict: "%s" - %s',
- dep,
- exc,
- )
- return DependencyConflict(dep)
- return None
diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/distro.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/distro.py
deleted file mode 100644
index cc1c99c1e0..0000000000
--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/distro.py
+++ /dev/null
@@ -1,71 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# type: ignore
-
-"""
-OpenTelemetry Base Distribution (Distro)
-"""
-
-from abc import ABC, abstractmethod
-from logging import getLogger
-
-from pkg_resources import EntryPoint
-
-from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
-
-_LOG = getLogger(__name__)
-
-
-class BaseDistro(ABC):
- """An ABC for distro"""
-
- _instance = None
-
- def __new__(cls, *args, **kwargs):
-
- if cls._instance is None:
- cls._instance = object.__new__(cls, *args, **kwargs)
-
- return cls._instance
-
- @abstractmethod
- def _configure(self, **kwargs):
- """Configure the distribution"""
-
- def configure(self, **kwargs):
- """Configure the distribution"""
- self._configure(**kwargs)
-
- def load_instrumentor( # pylint: disable=no-self-use
- self, entry_point: EntryPoint, **kwargs
- ):
- """Takes a collection of instrumentation entry points
- and activates them by instantiating and calling instrument()
- on each one.
-
- Distros can override this method to customize the behavior by
- inspecting each entry point and configuring them in special ways,
- passing additional arguments, load a replacement/fork instead,
- skip loading entirely, etc.
- """
- instrumentor: BaseInstrumentor = entry_point.load()
- instrumentor().instrument(**kwargs)
-
-
-class DefaultDistro(BaseDistro):
- def _configure(self, **kwargs):
- pass
-
-
-__all__ = ["BaseDistro", "DefaultDistro"]
diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/environment_variables.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/environment_variables.py
deleted file mode 100644
index ad28f06859..0000000000
--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/environment_variables.py
+++ /dev/null
@@ -1,18 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-OTEL_PYTHON_DISABLED_INSTRUMENTATIONS = "OTEL_PYTHON_DISABLED_INSTRUMENTATIONS"
-"""
-.. envvar:: OTEL_PYTHON_DISABLED_INSTRUMENTATIONS
-"""
diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/instrumentor.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/instrumentor.py
deleted file mode 100644
index 74ebe86746..0000000000
--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/instrumentor.py
+++ /dev/null
@@ -1,132 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# type: ignore
-
-"""
-OpenTelemetry Base Instrumentor
-"""
-
-from abc import ABC, abstractmethod
-from logging import getLogger
-from typing import Collection, Optional
-
-from opentelemetry.instrumentation.dependencies import (
- DependencyConflict,
- get_dependency_conflicts,
-)
-
-_LOG = getLogger(__name__)
-
-
-class BaseInstrumentor(ABC):
- """An ABC for instrumentors
-
- Child classes of this ABC should instrument specific third
- party libraries or frameworks either by using the
- ``opentelemetry-instrument`` command or by calling their methods
- directly.
-
- Since every third party library or framework is different and has different
- instrumentation needs, more methods can be added to the child classes as
- needed to provide practical instrumentation to the end user.
- """
-
- _instance = None
- _is_instrumented_by_opentelemetry = False
-
- def __new__(cls, *args, **kwargs):
-
- if cls._instance is None:
- cls._instance = object.__new__(cls, *args, **kwargs)
-
- return cls._instance
-
- @property
- def is_instrumented_by_opentelemetry(self):
- return self._is_instrumented_by_opentelemetry
-
- @abstractmethod
- def instrumentation_dependencies(self) -> Collection[str]:
- """Return a list of python packages with versions that the will be instrumented.
-
- The format should be the same as used in requirements.txt or setup.py.
-
- For example, if an instrumentation instruments requests 1.x, this method should look
- like:
-
- def instrumentation_dependencies(self) -> Collection[str]:
- return ['requests ~= 1.0']
-
- This will ensure that the instrumentation will only be used when the specified library
- is present in the environment.
- """
-
- def _instrument(self, **kwargs):
- """Instrument the library"""
-
- @abstractmethod
- def _uninstrument(self, **kwargs):
- """Uninstrument the library"""
-
- def _check_dependency_conflicts(self) -> Optional[DependencyConflict]:
- dependencies = self.instrumentation_dependencies()
- return get_dependency_conflicts(dependencies)
-
- def instrument(self, **kwargs):
- """Instrument the library
-
- This method will be called without any optional arguments by the
- ``opentelemetry-instrument`` command.
-
- This means that calling this method directly without passing any
- optional values should do the very same thing that the
- ``opentelemetry-instrument`` command does.
- """
-
- if self._is_instrumented_by_opentelemetry:
- _LOG.warning("Attempting to instrument while already instrumented")
- return None
-
- # check if instrumentor has any missing or conflicting dependencies
- skip_dep_check = kwargs.pop("skip_dep_check", False)
- if not skip_dep_check:
- conflict = self._check_dependency_conflicts()
- if conflict:
- _LOG.error(conflict)
- return None
-
- result = self._instrument( # pylint: disable=assignment-from-no-return
- **kwargs
- )
- self._is_instrumented_by_opentelemetry = True
- return result
-
- def uninstrument(self, **kwargs):
- """Uninstrument the library
-
- See ``BaseInstrumentor.instrument`` for more information regarding the
- usage of ``kwargs``.
- """
-
- if self._is_instrumented_by_opentelemetry:
- result = self._uninstrument(**kwargs)
- self._is_instrumented_by_opentelemetry = False
- return result
-
- _LOG.warning("Attempting to uninstrument while already uninstrumented")
-
- return None
-
-
-__all__ = ["BaseInstrumentor"]
diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/propagators.py b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/propagators.py
deleted file mode 100644
index bc40f7742c..0000000000
--- a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/propagators.py
+++ /dev/null
@@ -1,124 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-"""
-This module implements experimental propagators to inject trace context
-into response carriers. This is useful for server side frameworks that start traces
-when server requests and want to share the trace context with the client so the
-client can add its spans to the same trace.
-
-This is part of an upcoming W3C spec and will eventually make it to the Otel spec.
-
-https://w3c.github.io/trace-context/#trace-context-http-response-headers-format
-"""
-
-import typing
-from abc import ABC, abstractmethod
-
-from opentelemetry import trace
-from opentelemetry.context.context import Context
-from opentelemetry.propagators import textmap
-from opentelemetry.trace import format_span_id, format_trace_id
-
-_HTTP_HEADER_ACCESS_CONTROL_EXPOSE_HEADERS = "Access-Control-Expose-Headers"
-_RESPONSE_PROPAGATOR = None
-
-
-def get_global_response_propagator():
- return _RESPONSE_PROPAGATOR
-
-
-def set_global_response_propagator(propagator):
- global _RESPONSE_PROPAGATOR # pylint:disable=global-statement
- _RESPONSE_PROPAGATOR = propagator
-
-
-class Setter(ABC):
- @abstractmethod
- def set(self, carrier, key, value):
- """Inject the provided key value pair in carrier."""
-
-
-class DictHeaderSetter(Setter):
- def set(self, carrier, key, value): # pylint: disable=no-self-use
- old_value = carrier.get(key, "")
- if old_value:
- value = f"{old_value}, {value}"
- carrier[key] = value
-
-
-class FuncSetter(Setter):
- """FuncSetter coverts a function into a valid Setter. Any function that can
- set values in a carrier can be converted into a Setter by using FuncSetter.
- This is useful when injecting trace context into non-dict objects such
- HTTP Response objects for different framework.
-
- For example, it can be used to create a setter for Falcon response object as:
-
- setter = FuncSetter(falcon.api.Response.append_header)
-
- and then used with the propagator as:
-
- propagator.inject(falcon_response, setter=setter)
-
- This would essentially make the propagator call `falcon_response.append_header(key, value)`
- """
-
- def __init__(self, func):
- self._func = func
-
- def set(self, carrier, key, value):
- self._func(carrier, key, value)
-
-
-default_setter = DictHeaderSetter()
-
-
-class ResponsePropagator(ABC):
- @abstractmethod
- def inject(
- self,
- carrier: textmap.CarrierT,
- context: typing.Optional[Context] = None,
- setter: textmap.Setter = default_setter,
- ) -> None:
- """Injects SpanContext into the HTTP response carrier."""
-
-
-class TraceResponsePropagator(ResponsePropagator):
- """Experimental propagator that injects tracecontext into HTTP responses."""
-
- def inject(
- self,
- carrier: textmap.CarrierT,
- context: typing.Optional[Context] = None,
- setter: textmap.Setter = default_setter,
- ) -> None:
- """Injects SpanContext into the HTTP response carrier."""
- span = trace.get_current_span(context)
- span_context = span.get_span_context()
- if span_context == trace.INVALID_SPAN_CONTEXT:
- return
-
- header_name = "traceresponse"
- setter.set(
- carrier,
- header_name,
- f"00-{format_trace_id(span_context.trace_id)}-{format_span_id(span_context.span_id)}-{span_context.trace_flags:02x}",
- )
- setter.set(
- carrier,
- _HTTP_HEADER_ACCESS_CONTROL_EXPOSE_HEADERS,
- header_name,
- )
diff --git a/opentelemetry-instrumentation/src/opentelemetry/instrumentation/py.typed b/opentelemetry-instrumentation/src/opentelemetry/instrumentation/py.typed
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/opentelemetry-instrumentation/tests/__init__.py b/opentelemetry-instrumentation/tests/__init__.py
deleted file mode 100644
index e69de29bb2..0000000000
diff --git a/opentelemetry-instrumentation/tests/test_bootstrap.py b/opentelemetry-instrumentation/tests/test_bootstrap.py
deleted file mode 100644
index a266bf8a43..0000000000
--- a/opentelemetry-instrumentation/tests/test_bootstrap.py
+++ /dev/null
@@ -1,91 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# type: ignore
-
-from io import StringIO
-from random import sample
-from unittest import TestCase
-from unittest.mock import call, patch
-
-from opentelemetry.instrumentation import bootstrap
-from opentelemetry.instrumentation.bootstrap_gen import libraries
-
-
-def sample_packages(packages, rate):
- return sample(
- list(packages),
- int(len(packages) * rate),
- )
-
-
-class TestBootstrap(TestCase):
-
- installed_libraries = {}
- installed_instrumentations = {}
-
- @classmethod
- def setUpClass(cls):
- cls.installed_libraries = sample_packages(
- [lib["instrumentation"] for lib in libraries.values()], 0.6
- )
-
- # treat 50% of sampled packages as pre-installed
- cls.installed_instrumentations = sample_packages(
- cls.installed_libraries, 0.5
- )
-
- cls.pkg_patcher = patch(
- "opentelemetry.instrumentation.bootstrap._find_installed_libraries",
- return_value=cls.installed_libraries,
- )
-
- cls.pip_install_patcher = patch(
- "opentelemetry.instrumentation.bootstrap._sys_pip_install",
- )
- cls.pip_check_patcher = patch(
- "opentelemetry.instrumentation.bootstrap._pip_check",
- )
-
- cls.pkg_patcher.start()
- cls.mock_pip_install = cls.pip_install_patcher.start()
- cls.mock_pip_check = cls.pip_check_patcher.start()
-
- @classmethod
- def tearDownClass(cls):
- cls.pip_check_patcher.start()
- cls.pip_install_patcher.start()
- cls.pkg_patcher.stop()
-
- @patch("sys.argv", ["bootstrap", "-a", "pipenv"])
- def test_run_unknown_cmd(self):
- with self.assertRaises(SystemExit):
- bootstrap.run()
-
- @patch("sys.argv", ["bootstrap", "-a", "requirements"])
- def test_run_cmd_print(self):
- with patch("sys.stdout", new=StringIO()) as fake_out:
- bootstrap.run()
- self.assertEqual(
- fake_out.getvalue(),
- "\n".join(self.installed_libraries),
- )
-
- @patch("sys.argv", ["bootstrap", "-a", "install"])
- def test_run_cmd_install(self):
- bootstrap.run()
- self.mock_pip_install.assert_has_calls(
- [call(i) for i in self.installed_libraries],
- any_order=True,
- )
- self.assertEqual(self.mock_pip_check.call_count, 1)
diff --git a/opentelemetry-instrumentation/tests/test_dependencies.py b/opentelemetry-instrumentation/tests/test_dependencies.py
deleted file mode 100644
index a8acac62f4..0000000000
--- a/opentelemetry-instrumentation/tests/test_dependencies.py
+++ /dev/null
@@ -1,77 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# pylint: disable=protected-access
-
-import pkg_resources
-import pytest
-
-from opentelemetry.instrumentation.dependencies import (
- DependencyConflict,
- get_dependency_conflicts,
- get_dist_dependency_conflicts,
-)
-from opentelemetry.test.test_base import TestBase
-
-
-class TestDependencyConflicts(TestBase):
- def setUp(self):
- pass
-
- def test_get_dependency_conflicts_empty(self):
- self.assertIsNone(get_dependency_conflicts([]))
-
- def test_get_dependency_conflicts_no_conflict(self):
- self.assertIsNone(get_dependency_conflicts(["pytest"]))
-
- def test_get_dependency_conflicts_not_installed(self):
- conflict = get_dependency_conflicts(["this-package-does-not-exist"])
- self.assertTrue(conflict is not None)
- self.assertTrue(isinstance(conflict, DependencyConflict))
- self.assertEqual(
- str(conflict),
- 'DependencyConflict: requested: "this-package-does-not-exist" but found: "None"',
- )
-
- def test_get_dependency_conflicts_mismatched_version(self):
- conflict = get_dependency_conflicts(["pytest == 5000"])
- self.assertTrue(conflict is not None)
- self.assertTrue(isinstance(conflict, DependencyConflict))
- self.assertEqual(
- str(conflict),
- f'DependencyConflict: requested: "pytest == 5000" but found: "pytest {pytest.__version__}"',
- )
-
- def test_get_dist_dependency_conflicts(self):
- def mock_requires(extras=()):
- if "instruments" in extras:
- return [
- pkg_resources.Requirement(
- 'test-pkg ~= 1.0; extra == "instruments"'
- )
- ]
- return []
-
- dist = pkg_resources.Distribution(
- project_name="test-instrumentation", version="1.0"
- )
- dist.requires = mock_requires
-
- conflict = get_dist_dependency_conflicts(dist)
- self.assertTrue(conflict is not None)
- self.assertTrue(isinstance(conflict, DependencyConflict))
- self.assertEqual(
- str(conflict),
- 'DependencyConflict: requested: "test-pkg~=1.0" but found: "None"',
- )
diff --git a/opentelemetry-instrumentation/tests/test_distro.py b/opentelemetry-instrumentation/tests/test_distro.py
deleted file mode 100644
index 399b3f8a65..0000000000
--- a/opentelemetry-instrumentation/tests/test_distro.py
+++ /dev/null
@@ -1,58 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# type: ignore
-
-from unittest import TestCase
-
-from pkg_resources import EntryPoint
-
-from opentelemetry.instrumentation.distro import BaseDistro
-from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
-
-
-class MockInstrumetor(BaseInstrumentor):
- def instrumentation_dependencies(self):
- return []
-
- def _instrument(self, **kwargs):
- pass
-
- def _uninstrument(self, **kwargs):
- pass
-
-
-class MockEntryPoint(EntryPoint):
- def __init__(self, obj): # pylint: disable=super-init-not-called
- self._obj = obj
-
- def load(self, *args, **kwargs): # pylint: disable=signature-differs
- return self._obj
-
-
-class MockDistro(BaseDistro):
- def _configure(self, **kwargs):
- pass
-
-
-class TestDistro(TestCase):
- def test_load_instrumentor(self):
- # pylint: disable=protected-access
- distro = MockDistro()
-
- instrumentor = MockInstrumetor()
- entry_point = MockEntryPoint(MockInstrumetor)
-
- self.assertFalse(instrumentor._is_instrumented_by_opentelemetry)
- distro.load_instrumentor(entry_point)
- self.assertTrue(instrumentor._is_instrumented_by_opentelemetry)
diff --git a/opentelemetry-instrumentation/tests/test_instrumentor.py b/opentelemetry-instrumentation/tests/test_instrumentor.py
deleted file mode 100644
index dee32c34e4..0000000000
--- a/opentelemetry-instrumentation/tests/test_instrumentor.py
+++ /dev/null
@@ -1,50 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# type: ignore
-
-from logging import WARNING
-from unittest import TestCase
-
-from opentelemetry.instrumentation.instrumentor import BaseInstrumentor
-
-
-class TestInstrumentor(TestCase):
- class Instrumentor(BaseInstrumentor):
- def _instrument(self, **kwargs):
- return "instrumented"
-
- def _uninstrument(self, **kwargs):
- return "uninstrumented"
-
- def instrumentation_dependencies(self):
- return []
-
- def test_protect(self):
- instrumentor = self.Instrumentor()
-
- with self.assertLogs(level=WARNING):
- self.assertIs(instrumentor.uninstrument(), None)
-
- self.assertEqual(instrumentor.instrument(), "instrumented")
-
- with self.assertLogs(level=WARNING):
- self.assertIs(instrumentor.instrument(), None)
-
- self.assertEqual(instrumentor.uninstrument(), "uninstrumented")
-
- with self.assertLogs(level=WARNING):
- self.assertIs(instrumentor.uninstrument(), None)
-
- def test_singleton(self):
- self.assertIs(self.Instrumentor(), self.Instrumentor())
diff --git a/opentelemetry-instrumentation/tests/test_propagators.py b/opentelemetry-instrumentation/tests/test_propagators.py
deleted file mode 100644
index 62461aafa9..0000000000
--- a/opentelemetry-instrumentation/tests/test_propagators.py
+++ /dev/null
@@ -1,80 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-# pylint: disable=protected-access
-
-from opentelemetry import trace
-from opentelemetry.instrumentation import propagators
-from opentelemetry.instrumentation.propagators import (
- DictHeaderSetter,
- TraceResponsePropagator,
- get_global_response_propagator,
- set_global_response_propagator,
-)
-from opentelemetry.test.test_base import TestBase
-
-
-class TestGlobals(TestBase):
- def test_get_set(self):
- original = propagators._RESPONSE_PROPAGATOR
-
- propagators._RESPONSE_PROPAGATOR = None
- self.assertIsNone(get_global_response_propagator())
-
- prop = TraceResponsePropagator()
- set_global_response_propagator(prop)
- self.assertIs(prop, get_global_response_propagator())
-
- propagators._RESPONSE_PROPAGATOR = original
-
-
-class TestDictHeaderSetter(TestBase):
- def test_simple(self):
- setter = DictHeaderSetter()
- carrier = {}
- setter.set(carrier, "kk", "vv")
- self.assertIn("kk", carrier)
- self.assertEqual(carrier["kk"], "vv")
-
- def test_append(self):
- setter = DictHeaderSetter()
- carrier = {"kk": "old"}
- setter.set(carrier, "kk", "vv")
- self.assertIn("kk", carrier)
- self.assertEqual(carrier["kk"], "old, vv")
-
-
-class TestTraceResponsePropagator(TestBase):
- def test_inject(self):
- span = trace.NonRecordingSpan(
- trace.SpanContext(
- trace_id=1,
- span_id=2,
- is_remote=False,
- trace_flags=trace.DEFAULT_TRACE_OPTIONS,
- trace_state=trace.DEFAULT_TRACE_STATE,
- ),
- )
-
- ctx = trace.set_span_in_context(span)
- prop = TraceResponsePropagator()
- carrier = {}
- prop.inject(carrier, ctx)
- self.assertEqual(
- carrier["Access-Control-Expose-Headers"], "traceresponse"
- )
- self.assertEqual(
- carrier["traceresponse"],
- "00-00000000000000000000000000000001-0000000000000002-00",
- )
diff --git a/opentelemetry-instrumentation/tests/test_run.py b/opentelemetry-instrumentation/tests/test_run.py
deleted file mode 100644
index 9fd3a21711..0000000000
--- a/opentelemetry-instrumentation/tests/test_run.py
+++ /dev/null
@@ -1,118 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-# type: ignore
-
-from os import environ, getcwd
-from os.path import abspath, dirname, pathsep
-from unittest import TestCase
-from unittest.mock import patch
-
-from opentelemetry.environment_variables import OTEL_TRACES_EXPORTER
-from opentelemetry.instrumentation import auto_instrumentation
-
-
-class TestRun(TestCase):
- auto_instrumentation_path = dirname(abspath(auto_instrumentation.__file__))
-
- @classmethod
- def setUpClass(cls):
- cls.execl_patcher = patch(
- "opentelemetry.instrumentation.auto_instrumentation.execl"
- )
- cls.which_patcher = patch(
- "opentelemetry.instrumentation.auto_instrumentation.which"
- )
-
- cls.execl_patcher.start()
- cls.which_patcher.start()
-
- @classmethod
- def tearDownClass(cls):
- cls.execl_patcher.stop()
- cls.which_patcher.stop()
-
- @patch("sys.argv", ["instrument", ""])
- @patch.dict("os.environ", {"PYTHONPATH": ""})
- def test_empty(self):
- auto_instrumentation.run()
- self.assertEqual(
- environ["PYTHONPATH"],
- pathsep.join([self.auto_instrumentation_path, getcwd()]),
- )
-
- @patch("sys.argv", ["instrument", ""])
- @patch.dict("os.environ", {"PYTHONPATH": "abc"})
- def test_non_empty(self):
- auto_instrumentation.run()
- self.assertEqual(
- environ["PYTHONPATH"],
- pathsep.join([self.auto_instrumentation_path, getcwd(), "abc"]),
- )
-
- @patch("sys.argv", ["instrument", ""])
- @patch.dict(
- "os.environ",
- {"PYTHONPATH": pathsep.join(["abc", auto_instrumentation_path])},
- )
- def test_after_path(self):
- auto_instrumentation.run()
- self.assertEqual(
- environ["PYTHONPATH"],
- pathsep.join([self.auto_instrumentation_path, getcwd(), "abc"]),
- )
-
- @patch("sys.argv", ["instrument", ""])
- @patch.dict(
- "os.environ",
- {
- "PYTHONPATH": pathsep.join(
- [auto_instrumentation_path, "abc", auto_instrumentation_path]
- )
- },
- )
- def test_single_path(self):
- auto_instrumentation.run()
- self.assertEqual(
- environ["PYTHONPATH"],
- pathsep.join([self.auto_instrumentation_path, getcwd(), "abc"]),
- )
-
-
-class TestExecl(TestCase):
- @patch("sys.argv", ["1", "2", "3"])
- @patch("opentelemetry.instrumentation.auto_instrumentation.which")
- @patch("opentelemetry.instrumentation.auto_instrumentation.execl")
- def test_execl(
- self, mock_execl, mock_which
- ): # pylint: disable=no-self-use
- mock_which.configure_mock(**{"return_value": "python"})
-
- auto_instrumentation.run()
-
- mock_execl.assert_called_with("python", "python", "3")
-
-
-class TestArgs(TestCase):
- @patch("opentelemetry.instrumentation.auto_instrumentation.execl")
- def test_exporter(self, _): # pylint: disable=no-self-use
- with patch("sys.argv", ["instrument", "2"]):
- auto_instrumentation.run()
- self.assertIsNone(environ.get(OTEL_TRACES_EXPORTER))
-
- with patch(
- "sys.argv",
- ["instrument", "--traces_exporter", "jaeger", "1", "2"],
- ):
- auto_instrumentation.run()
- self.assertEqual(environ.get(OTEL_TRACES_EXPORTER), "jaeger")
diff --git a/opentelemetry-instrumentation/tests/test_utils.py b/opentelemetry-instrumentation/tests/test_utils.py
deleted file mode 100644
index e5246335c9..0000000000
--- a/opentelemetry-instrumentation/tests/test_utils.py
+++ /dev/null
@@ -1,57 +0,0 @@
-# Copyright The OpenTelemetry Authors
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-
-from http import HTTPStatus
-
-from opentelemetry.instrumentation.utils import http_status_to_status_code
-from opentelemetry.test.test_base import TestBase
-from opentelemetry.trace import StatusCode
-
-
-class TestUtils(TestBase):
- # See https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/semantic_conventions/http.md#status
- def test_http_status_to_status_code(self):
- for status_code, expected in (
- (HTTPStatus.OK, StatusCode.UNSET),
- (HTTPStatus.ACCEPTED, StatusCode.UNSET),
- (HTTPStatus.IM_USED, StatusCode.UNSET),
- (HTTPStatus.MULTIPLE_CHOICES, StatusCode.UNSET),
- (HTTPStatus.BAD_REQUEST, StatusCode.ERROR),
- (HTTPStatus.UNAUTHORIZED, StatusCode.ERROR),
- (HTTPStatus.FORBIDDEN, StatusCode.ERROR),
- (HTTPStatus.NOT_FOUND, StatusCode.ERROR),
- (
- HTTPStatus.UNPROCESSABLE_ENTITY,
- StatusCode.ERROR,
- ),
- (
- HTTPStatus.TOO_MANY_REQUESTS,
- StatusCode.ERROR,
- ),
- (HTTPStatus.NOT_IMPLEMENTED, StatusCode.ERROR),
- (HTTPStatus.SERVICE_UNAVAILABLE, StatusCode.ERROR),
- (
- HTTPStatus.GATEWAY_TIMEOUT,
- StatusCode.ERROR,
- ),
- (
- HTTPStatus.HTTP_VERSION_NOT_SUPPORTED,
- StatusCode.ERROR,
- ),
- (600, StatusCode.ERROR),
- (99, StatusCode.ERROR),
- ):
- with self.subTest(status_code=status_code):
- actual = http_status_to_status_code(int(status_code))
- self.assertEqual(actual, expected, status_code)
diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py
index ade8e516c9..e315109d13 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/collector/logs/v1/logs_service.proto
-
+"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -19,6 +19,7 @@
package='opentelemetry.proto.collector.logs.v1',
syntax='proto3',
serialized_options=b'\n(io.opentelemetry.proto.collector.logs.v1B\020LogsServiceProtoP\001ZFgitpro.ttaallkk.top/open-telemetry/opentelemetry-proto/gen/go/collector/logs/v1',
+ create_key=_descriptor._internal_create_key,
serialized_pb=b'\n8opentelemetry/proto/collector/logs/v1/logs_service.proto\x12%opentelemetry.proto.collector.logs.v1\x1a&opentelemetry/proto/logs/v1/logs.proto\"\\\n\x18\x45xportLogsServiceRequest\x12@\n\rresource_logs\x18\x01 \x03(\x0b\x32).opentelemetry.proto.logs.v1.ResourceLogs\"\x1b\n\x19\x45xportLogsServiceResponse2\x9d\x01\n\x0bLogsService\x12\x8d\x01\n\x06\x45xport\x12?.opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest\x1a@.opentelemetry.proto.collector.logs.v1.ExportLogsServiceResponse\"\x00\x42\x86\x01\n(io.opentelemetry.proto.collector.logs.v1B\x10LogsServiceProtoP\x01ZFgitpro.ttaallkk.top/open-telemetry/opentelemetry-proto/gen/go/collector/logs/v1b\x06proto3'
,
dependencies=[opentelemetry_dot_proto_dot_logs_dot_v1_dot_logs__pb2.DESCRIPTOR,])
@@ -32,6 +33,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource_logs', full_name='opentelemetry.proto.collector.logs.v1.ExportLogsServiceRequest.resource_logs', index=0,
@@ -39,7 +41,7 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -63,6 +65,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
@@ -108,6 +111,7 @@
file=DESCRIPTOR,
index=0,
serialized_options=None,
+ create_key=_descriptor._internal_create_key,
serialized_start=263,
serialized_end=420,
methods=[
@@ -119,6 +123,7 @@
input_type=_EXPORTLOGSSERVICEREQUEST,
output_type=_EXPORTLOGSSERVICERESPONSE,
serialized_options=None,
+ create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_LOGSSERVICE)
diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi
index da5d542e05..5940c192b2 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi
+++ b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2.pyi
@@ -15,20 +15,24 @@ DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
class ExportLogsServiceRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
RESOURCE_LOGS_FIELD_NUMBER: builtins.int
-
@property
- def resource_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs]: ...
-
+ def resource_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs]:
+ """An array of ResourceLogs.
+ For data coming from a single resource this array will typically contain one
+ element. Intermediary nodes (such as OpenTelemetry Collector) that receive
+ data from multiple origins typically batch the data before forwarding further and
+ in that case this array will contain multiple elements.
+ """
+ pass
def __init__(self,
*,
resource_logs : typing.Optional[typing.Iterable[opentelemetry.proto.logs.v1.logs_pb2.ResourceLogs]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"resource_logs",b"resource_logs"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["resource_logs",b"resource_logs"]) -> None: ...
global___ExportLogsServiceRequest = ExportLogsServiceRequest
class ExportLogsServiceResponse(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
-
def __init__(self,
) -> None: ...
global___ExportLogsServiceResponse = ExportLogsServiceResponse
diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py
index c929cdc84c..4d55e57778 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/collector/logs/v1/logs_service_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from opentelemetry.proto.collector.logs.v1 import logs_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2
@@ -64,6 +65,7 @@ def Export(request,
options=(),
channel_credentials=None,
call_credentials=None,
+ insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
@@ -72,4 +74,4 @@ def Export(request,
opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceRequest.SerializeToString,
opentelemetry_dot_proto_dot_collector_dot_logs_dot_v1_dot_logs__service__pb2.ExportLogsServiceResponse.FromString,
options, channel_credentials,
- call_credentials, compression, wait_for_ready, timeout, metadata)
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py
index 96bb34bc8f..ba3c7902ff 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/collector/metrics/v1/metrics_service.proto
-
+"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -19,6 +19,7 @@
package='opentelemetry.proto.collector.metrics.v1',
syntax='proto3',
serialized_options=b'\n+io.opentelemetry.proto.collector.metrics.v1B\023MetricsServiceProtoP\001ZIgitpro.ttaallkk.top/open-telemetry/opentelemetry-proto/gen/go/collector/metrics/v1',
+ create_key=_descriptor._internal_create_key,
serialized_pb=b'\n>opentelemetry/proto/collector/metrics/v1/metrics_service.proto\x12(opentelemetry.proto.collector.metrics.v1\x1a,opentelemetry/proto/metrics/v1/metrics.proto\"h\n\x1b\x45xportMetricsServiceRequest\x12I\n\x10resource_metrics\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.ResourceMetrics\"\x1e\n\x1c\x45xportMetricsServiceResponse2\xac\x01\n\x0eMetricsService\x12\x99\x01\n\x06\x45xport\x12\x45.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest\x1a\x46.opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceResponse\"\x00\x42\x8f\x01\n+io.opentelemetry.proto.collector.metrics.v1B\x13MetricsServiceProtoP\x01ZIgitpro.ttaallkk.top/open-telemetry/opentelemetry-proto/gen/go/collector/metrics/v1b\x06proto3'
,
dependencies=[opentelemetry_dot_proto_dot_metrics_dot_v1_dot_metrics__pb2.DESCRIPTOR,])
@@ -32,6 +33,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource_metrics', full_name='opentelemetry.proto.collector.metrics.v1.ExportMetricsServiceRequest.resource_metrics', index=0,
@@ -39,7 +41,7 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -63,6 +65,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
@@ -108,6 +111,7 @@
file=DESCRIPTOR,
index=0,
serialized_options=None,
+ create_key=_descriptor._internal_create_key,
serialized_start=293,
serialized_end=465,
methods=[
@@ -119,6 +123,7 @@
input_type=_EXPORTMETRICSSERVICEREQUEST,
output_type=_EXPORTMETRICSSERVICERESPONSE,
serialized_options=None,
+ create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_METRICSSERVICE)
diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi
index be50774a30..1acc1de3f3 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi
+++ b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2.pyi
@@ -15,20 +15,24 @@ DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
class ExportMetricsServiceRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
RESOURCE_METRICS_FIELD_NUMBER: builtins.int
-
@property
- def resource_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics]: ...
-
+ def resource_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics]:
+ """An array of ResourceMetrics.
+ For data coming from a single resource this array will typically contain one
+ element. Intermediary nodes (such as OpenTelemetry Collector) that receive
+ data from multiple origins typically batch the data before forwarding further and
+ in that case this array will contain multiple elements.
+ """
+ pass
def __init__(self,
*,
resource_metrics : typing.Optional[typing.Iterable[opentelemetry.proto.metrics.v1.metrics_pb2.ResourceMetrics]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"resource_metrics",b"resource_metrics"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["resource_metrics",b"resource_metrics"]) -> None: ...
global___ExportMetricsServiceRequest = ExportMetricsServiceRequest
class ExportMetricsServiceResponse(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
-
def __init__(self,
) -> None: ...
global___ExportMetricsServiceResponse = ExportMetricsServiceResponse
diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py
index c58f717616..c181c44641 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/collector/metrics/v1/metrics_service_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from opentelemetry.proto.collector.metrics.v1 import metrics_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2
@@ -64,6 +65,7 @@ def Export(request,
options=(),
channel_credentials=None,
call_credentials=None,
+ insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
@@ -72,4 +74,4 @@ def Export(request,
opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceRequest.SerializeToString,
opentelemetry_dot_proto_dot_collector_dot_metrics_dot_v1_dot_metrics__service__pb2.ExportMetricsServiceResponse.FromString,
options, channel_credentials,
- call_credentials, compression, wait_for_ready, timeout, metadata)
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py
index ccdfb345d9..4648414b79 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/collector/trace/v1/trace_service.proto
-
+"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -19,6 +19,7 @@
package='opentelemetry.proto.collector.trace.v1',
syntax='proto3',
serialized_options=b'\n)io.opentelemetry.proto.collector.trace.v1B\021TraceServiceProtoP\001ZGgitpro.ttaallkk.top/open-telemetry/opentelemetry-proto/gen/go/collector/trace/v1',
+ create_key=_descriptor._internal_create_key,
serialized_pb=b'\n:opentelemetry/proto/collector/trace/v1/trace_service.proto\x12&opentelemetry.proto.collector.trace.v1\x1a(opentelemetry/proto/trace/v1/trace.proto\"`\n\x19\x45xportTraceServiceRequest\x12\x43\n\x0eresource_spans\x18\x01 \x03(\x0b\x32+.opentelemetry.proto.trace.v1.ResourceSpans\"\x1c\n\x1a\x45xportTraceServiceResponse2\xa2\x01\n\x0cTraceService\x12\x91\x01\n\x06\x45xport\x12\x41.opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest\x1a\x42.opentelemetry.proto.collector.trace.v1.ExportTraceServiceResponse\"\x00\x42\x89\x01\n)io.opentelemetry.proto.collector.trace.v1B\x11TraceServiceProtoP\x01ZGgitpro.ttaallkk.top/open-telemetry/opentelemetry-proto/gen/go/collector/trace/v1b\x06proto3'
,
dependencies=[opentelemetry_dot_proto_dot_trace_dot_v1_dot_trace__pb2.DESCRIPTOR,])
@@ -32,6 +33,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource_spans', full_name='opentelemetry.proto.collector.trace.v1.ExportTraceServiceRequest.resource_spans', index=0,
@@ -39,7 +41,7 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -63,6 +65,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
@@ -108,6 +111,7 @@
file=DESCRIPTOR,
index=0,
serialized_options=None,
+ create_key=_descriptor._internal_create_key,
serialized_start=273,
serialized_end=435,
methods=[
@@ -119,6 +123,7 @@
input_type=_EXPORTTRACESERVICEREQUEST,
output_type=_EXPORTTRACESERVICERESPONSE,
serialized_options=None,
+ create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_TRACESERVICE)
diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi
index f40bd9a179..7ed93e76de 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi
+++ b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2.pyi
@@ -15,20 +15,24 @@ DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
class ExportTraceServiceRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
RESOURCE_SPANS_FIELD_NUMBER: builtins.int
-
@property
- def resource_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans]: ...
-
+ def resource_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans]:
+ """An array of ResourceSpans.
+ For data coming from a single resource this array will typically contain one
+ element. Intermediary nodes (such as OpenTelemetry Collector) that receive
+ data from multiple origins typically batch the data before forwarding further and
+ in that case this array will contain multiple elements.
+ """
+ pass
def __init__(self,
*,
resource_spans : typing.Optional[typing.Iterable[opentelemetry.proto.trace.v1.trace_pb2.ResourceSpans]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"resource_spans",b"resource_spans"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["resource_spans",b"resource_spans"]) -> None: ...
global___ExportTraceServiceRequest = ExportTraceServiceRequest
class ExportTraceServiceResponse(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
-
def __init__(self,
) -> None: ...
global___ExportTraceServiceResponse = ExportTraceServiceResponse
diff --git a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py
index 3b9efef6d6..81dbbe59f3 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/collector/trace/v1/trace_service_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from opentelemetry.proto.collector.trace.v1 import trace_service_pb2 as opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2
@@ -64,6 +65,7 @@ def Export(request,
options=(),
channel_credentials=None,
call_credentials=None,
+ insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
@@ -72,4 +74,4 @@ def Export(request,
opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceRequest.SerializeToString,
opentelemetry_dot_proto_dot_collector_dot_trace_dot_v1_dot_trace__service__pb2.ExportTraceServiceResponse.FromString,
options, channel_credentials,
- call_credentials, compression, wait_for_ready, timeout, metadata)
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py
index 5371e9facf..4578f9409f 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/common/v1/common.proto
-
+"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -18,6 +18,7 @@
package='opentelemetry.proto.common.v1',
syntax='proto3',
serialized_options=b'\n io.opentelemetry.proto.common.v1B\013CommonProtoP\001Z>github.com/open-telemetry/opentelemetry-proto/gen/go/common/v1',
+ create_key=_descriptor._internal_create_key,
serialized_pb=b'\n*opentelemetry/proto/common/v1/common.proto\x12\x1dopentelemetry.proto.common.v1\"\x8c\x02\n\x08\x41nyValue\x12\x16\n\x0cstring_value\x18\x01 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x02 \x01(\x08H\x00\x12\x13\n\tint_value\x18\x03 \x01(\x03H\x00\x12\x16\n\x0c\x64ouble_value\x18\x04 \x01(\x01H\x00\x12@\n\x0b\x61rray_value\x18\x05 \x01(\x0b\x32).opentelemetry.proto.common.v1.ArrayValueH\x00\x12\x43\n\x0ckvlist_value\x18\x06 \x01(\x0b\x32+.opentelemetry.proto.common.v1.KeyValueListH\x00\x12\x15\n\x0b\x62ytes_value\x18\x07 \x01(\x0cH\x00\x42\x07\n\x05value\"E\n\nArrayValue\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\"G\n\x0cKeyValueList\x12\x37\n\x06values\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\"O\n\x08KeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x36\n\x05value\x18\x02 \x01(\x0b\x32\'.opentelemetry.proto.common.v1.AnyValue\"0\n\x0eStringKeyValue\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x18\x01\"7\n\x16InstrumentationLibrary\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0f\n\x07version\x18\x02 \x01(\tBq\n io.opentelemetry.proto.common.v1B\x0b\x43ommonProtoP\x01Z>github.com/open-telemetry/opentelemetry-proto/gen/go/common/v1b\x06proto3'
)
@@ -30,6 +31,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='string_value', full_name='opentelemetry.proto.common.v1.AnyValue.string_value', index=0,
@@ -37,49 +39,49 @@
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bool_value', full_name='opentelemetry.proto.common.v1.AnyValue.bool_value', index=1,
number=2, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='int_value', full_name='opentelemetry.proto.common.v1.AnyValue.int_value', index=2,
number=3, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='double_value', full_name='opentelemetry.proto.common.v1.AnyValue.double_value', index=3,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='array_value', full_name='opentelemetry.proto.common.v1.AnyValue.array_value', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='kvlist_value', full_name='opentelemetry.proto.common.v1.AnyValue.kvlist_value', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bytes_value', full_name='opentelemetry.proto.common.v1.AnyValue.bytes_value', index=6,
number=7, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -93,7 +95,9 @@
oneofs=[
_descriptor.OneofDescriptor(
name='value', full_name='opentelemetry.proto.common.v1.AnyValue.value',
- index=0, containing_type=None, fields=[]),
+ index=0, containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[]),
],
serialized_start=78,
serialized_end=346,
@@ -106,6 +110,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='values', full_name='opentelemetry.proto.common.v1.ArrayValue.values', index=0,
@@ -113,7 +118,7 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -137,6 +142,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='values', full_name='opentelemetry.proto.common.v1.KeyValueList.values', index=0,
@@ -144,7 +150,7 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -168,6 +174,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='opentelemetry.proto.common.v1.KeyValue.key', index=0,
@@ -175,14 +182,14 @@
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='opentelemetry.proto.common.v1.KeyValue.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -206,6 +213,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='opentelemetry.proto.common.v1.StringKeyValue.key', index=0,
@@ -213,14 +221,14 @@
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='opentelemetry.proto.common.v1.StringKeyValue.value', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -244,6 +252,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='opentelemetry.proto.common.v1.InstrumentationLibrary.name', index=0,
@@ -251,14 +260,14 @@
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='opentelemetry.proto.common.v1.InstrumentationLibrary.version', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
diff --git a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi
index 144df2d4a0..54789ef893 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi
+++ b/opentelemetry-proto/src/opentelemetry/proto/common/v1/common_pb2.pyi
@@ -12,6 +12,10 @@ import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
class AnyValue(google.protobuf.message.Message):
+ """AnyValue is used to represent any type of attribute value. AnyValue may contain a
+ primitive value such as a string or integer or it may contain an arbitrary nested
+ object containing arrays, key-value lists and primitives.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
STRING_VALUE_FIELD_NUMBER: builtins.int
BOOL_VALUE_FIELD_NUMBER: builtins.int
@@ -24,14 +28,11 @@ class AnyValue(google.protobuf.message.Message):
bool_value: builtins.bool = ...
int_value: builtins.int = ...
double_value: builtins.float = ...
- bytes_value: builtins.bytes = ...
-
@property
def array_value(self) -> global___ArrayValue: ...
-
@property
def kvlist_value(self) -> global___KeyValueList: ...
-
+ bytes_value: builtins.bytes = ...
def __init__(self,
*,
string_value : typing.Text = ...,
@@ -42,83 +43,101 @@ class AnyValue(google.protobuf.message.Message):
kvlist_value : typing.Optional[global___KeyValueList] = ...,
bytes_value : builtins.bytes = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"array_value",b"array_value",u"bool_value",b"bool_value",u"bytes_value",b"bytes_value",u"double_value",b"double_value",u"int_value",b"int_value",u"kvlist_value",b"kvlist_value",u"string_value",b"string_value",u"value",b"value"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"array_value",b"array_value",u"bool_value",b"bool_value",u"bytes_value",b"bytes_value",u"double_value",b"double_value",u"int_value",b"int_value",u"kvlist_value",b"kvlist_value",u"string_value",b"string_value",u"value",b"value"]) -> None: ...
- def WhichOneof(self, oneof_group: typing_extensions.Literal[u"value",b"value"]) -> typing_extensions.Literal["string_value","bool_value","int_value","double_value","array_value","kvlist_value","bytes_value"]: ...
+ def HasField(self, field_name: typing_extensions.Literal["array_value",b"array_value","bool_value",b"bool_value","bytes_value",b"bytes_value","double_value",b"double_value","int_value",b"int_value","kvlist_value",b"kvlist_value","string_value",b"string_value","value",b"value"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["array_value",b"array_value","bool_value",b"bool_value","bytes_value",b"bytes_value","double_value",b"double_value","int_value",b"int_value","kvlist_value",b"kvlist_value","string_value",b"string_value","value",b"value"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["value",b"value"]) -> typing.Optional[typing_extensions.Literal["string_value","bool_value","int_value","double_value","array_value","kvlist_value","bytes_value"]]: ...
global___AnyValue = AnyValue
class ArrayValue(google.protobuf.message.Message):
+ """ArrayValue is a list of AnyValue messages. We need ArrayValue as a message
+ since oneof in AnyValue does not allow repeated fields.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
VALUES_FIELD_NUMBER: builtins.int
-
@property
- def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AnyValue]: ...
-
+ def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___AnyValue]:
+ """Array of values. The array may be empty (contain 0 elements)."""
+ pass
def __init__(self,
*,
values : typing.Optional[typing.Iterable[global___AnyValue]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"values",b"values"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ...
global___ArrayValue = ArrayValue
class KeyValueList(google.protobuf.message.Message):
+ """KeyValueList is a list of KeyValue messages. We need KeyValueList as a message
+ since `oneof` in AnyValue does not allow repeated fields. Everywhere else where we need
+ a list of KeyValue messages (e.g. in Span) we use `repeated KeyValue` directly to
+ avoid unnecessary extra wrapping (which slows down the protocol). The 2 approaches
+ are semantically equivalent.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
VALUES_FIELD_NUMBER: builtins.int
-
@property
- def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]: ...
-
+ def values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___KeyValue]:
+ """A collection of key/value pairs of key-value pairs. The list may be empty (may
+ contain 0 elements).
+ """
+ pass
def __init__(self,
*,
values : typing.Optional[typing.Iterable[global___KeyValue]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"values",b"values"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ...
global___KeyValueList = KeyValueList
class KeyValue(google.protobuf.message.Message):
+ """KeyValue is a key-value pair that is used to store Span attributes, Link
+ attributes, etc.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: typing.Text = ...
-
@property
def value(self) -> global___AnyValue: ...
-
def __init__(self,
*,
key : typing.Text = ...,
value : typing.Optional[global___AnyValue] = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"value",b"value"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"key",b"key",u"value",b"value"]) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
global___KeyValue = KeyValue
class StringKeyValue(google.protobuf.message.Message):
+ """StringKeyValue is a pair of key/value strings. This is the simpler (and faster) version
+ of KeyValue that only supports string values.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: typing.Text = ...
value: typing.Text = ...
-
def __init__(self,
*,
key : typing.Text = ...,
value : typing.Text = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"key",b"key",u"value",b"value"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
global___StringKeyValue = StringKeyValue
class InstrumentationLibrary(google.protobuf.message.Message):
+ """InstrumentationLibrary is a message representing the instrumentation library information
+ such as the fully qualified name and version.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
NAME_FIELD_NUMBER: builtins.int
VERSION_FIELD_NUMBER: builtins.int
name: typing.Text = ...
- version: typing.Text = ...
+ """An empty instrumentation library name means the name is unknown."""
+ version: typing.Text = ...
def __init__(self,
*,
name : typing.Text = ...,
version : typing.Text = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"name",b"name",u"version",b"version"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["name",b"name","version",b"version"]) -> None: ...
global___InstrumentationLibrary = InstrumentationLibrary
diff --git a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py
index b9a8468d6c..31adc70f13 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/logs/v1/logs_pb2.py
@@ -1,253 +1,194 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/logs/v1/logs.proto
-
+"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
-
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
-from opentelemetry.proto.common.v1 import (
- common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2,
-)
-from opentelemetry.proto.resource.v1 import (
- resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2,
-)
+from opentelemetry.proto.common.v1 import common_pb2 as opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2
+from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
- name="opentelemetry/proto/logs/v1/logs.proto",
- package="opentelemetry.proto.logs.v1",
- syntax="proto3",
- serialized_options=b"\n\036io.opentelemetry.proto.logs.v1B\tLogsProtoP\001Z opentelemetry.proto.resource.v1.resource_pb2.Resource: ...
+ def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource:
+ """The resource for the logs in this message.
+ If this field is not set then resource info is unknown.
+ """
+ pass
@property
- def instrumentation_library_logs(
- self,
- ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
- global___InstrumentationLibraryLogs
- ]: ...
- def __init__(
- self,
- *,
- resource: typing.Optional[
- opentelemetry.proto.resource.v1.resource_pb2.Resource
- ] = ...,
- instrumentation_library_logs: typing.Optional[
- typing.Iterable[global___InstrumentationLibraryLogs]
- ] = ...,
- schema_url: typing.Text = ...,
- ) -> None: ...
- def HasField(
- self, field_name: typing_extensions.Literal["resource", b"resource"]
- ) -> builtins.bool: ...
- def ClearField(
- self,
- field_name: typing_extensions.Literal[
- "instrumentation_library_logs",
- b"instrumentation_library_logs",
- "resource",
- b"resource",
- "schema_url",
- b"schema_url",
- ],
- ) -> None: ...
+ def instrumentation_library_logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InstrumentationLibraryLogs]:
+ """A list of InstrumentationLibraryLogs that originate from a resource."""
+ pass
+ schema_url: typing.Text = ...
+ """This schema_url applies to the data in the "resource" field. It does not apply
+ to the data in the "instrumentation_library_logs" field which have their own
+ schema_url field.
+ """
+ def __init__(self,
+ *,
+ resource : typing.Optional[opentelemetry.proto.resource.v1.resource_pb2.Resource] = ...,
+ instrumentation_library_logs : typing.Optional[typing.Iterable[global___InstrumentationLibraryLogs]] = ...,
+ schema_url : typing.Text = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["resource",b"resource"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["instrumentation_library_logs",b"instrumentation_library_logs","resource",b"resource","schema_url",b"schema_url"]) -> None: ...
global___ResourceLogs = ResourceLogs
class InstrumentationLibraryLogs(google.protobuf.message.Message):
+ """A collection of Logs produced by an InstrumentationLibrary."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
INSTRUMENTATION_LIBRARY_FIELD_NUMBER: builtins.int
LOGS_FIELD_NUMBER: builtins.int
SCHEMA_URL_FIELD_NUMBER: builtins.int
- schema_url: typing.Text = ...
@property
- def instrumentation_library(
- self,
- ) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary: ...
+ def instrumentation_library(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary:
+ """The instrumentation library information for the logs in this message.
+ Semantically when InstrumentationLibrary isn't set, it is equivalent with
+ an empty instrumentation library name (unknown).
+ """
+ pass
@property
- def logs(
- self,
- ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
- global___LogRecord
- ]: ...
- def __init__(
- self,
- *,
- instrumentation_library: typing.Optional[
- opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary
- ] = ...,
- logs: typing.Optional[typing.Iterable[global___LogRecord]] = ...,
- schema_url: typing.Text = ...,
- ) -> None: ...
- def HasField(
- self,
- field_name: typing_extensions.Literal[
- "instrumentation_library", b"instrumentation_library"
- ],
- ) -> builtins.bool: ...
- def ClearField(
- self,
- field_name: typing_extensions.Literal[
- "instrumentation_library",
- b"instrumentation_library",
- "logs",
- b"logs",
- "schema_url",
- b"schema_url",
- ],
- ) -> None: ...
+ def logs(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___LogRecord]:
+ """A list of log records."""
+ pass
+ schema_url: typing.Text = ...
+ """This schema_url applies to all logs in the "logs" field."""
+ def __init__(self,
+ *,
+ instrumentation_library : typing.Optional[opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary] = ...,
+ logs : typing.Optional[typing.Iterable[global___LogRecord]] = ...,
+ schema_url : typing.Text = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["instrumentation_library",b"instrumentation_library"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["instrumentation_library",b"instrumentation_library","logs",b"logs","schema_url",b"schema_url"]) -> None: ...
global___InstrumentationLibraryLogs = InstrumentationLibraryLogs
class LogRecord(google.protobuf.message.Message):
+ """A log record according to OpenTelemetry Log Data Model:
+ https://github.com/open-telemetry/oteps/blob/main/text/logs/0097-log-data-model.md
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
TIME_UNIX_NANO_FIELD_NUMBER: builtins.int
SEVERITY_NUMBER_FIELD_NUMBER: builtins.int
@@ -198,66 +172,74 @@ class LogRecord(google.protobuf.message.Message):
TRACE_ID_FIELD_NUMBER: builtins.int
SPAN_ID_FIELD_NUMBER: builtins.int
time_unix_nano: builtins.int = ...
+ """time_unix_nano is the time when the event occurred.
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
+ Value of 0 indicates unknown or missing timestamp.
+ """
+
severity_number: global___SeverityNumber.V = ...
+ """Numerical value of the severity, normalized to values described in Log Data Model.
+ [Optional].
+ """
+
severity_text: typing.Text = ...
+ """The severity text (also known as log level). The original string representation as
+ it is known at the source. [Optional].
+ """
+
name: typing.Text = ...
+ """Short event identifier that does not contain varying parts. Name describes
+ what happened (e.g. "ProcessStarted"). Recommended to be no longer than 50
+ characters. Not guaranteed to be unique in any way. [Optional].
+ """
+
+ @property
+ def body(self) -> opentelemetry.proto.common.v1.common_pb2.AnyValue:
+ """A value containing the body of the log record. Can be for example a human-readable
+ string message (including multi-line) describing the event in a free form or it can
+ be a structured data composed of arrays and maps of other values. [Optional].
+ """
+ pass
+ @property
+ def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
+ """Additional attributes that describe the specific event occurrence. [Optional]."""
+ pass
dropped_attributes_count: builtins.int = ...
flags: builtins.int = ...
+ """Flags, a bit field. 8 least significant bits are the trace flags as
+ defined in W3C Trace Context specification. 24 most significant bits are reserved
+ and must be set to 0. Readers must not assume that 24 most significant bits
+ will be zero and must correctly mask the bits when reading 8-bit trace flag (use
+ flags & TRACE_FLAGS_MASK). [Optional].
+ """
+
trace_id: builtins.bytes = ...
+ """A unique identifier for a trace. All logs from the same trace share
+ the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes
+ is considered invalid. Can be set for logs that are part of request processing
+ and have an assigned trace id. [Optional].
+ """
+
span_id: builtins.bytes = ...
- @property
- def body(self) -> opentelemetry.proto.common.v1.common_pb2.AnyValue: ...
- @property
- def attributes(
- self,
- ) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[
- opentelemetry.proto.common.v1.common_pb2.KeyValue
- ]: ...
- def __init__(
- self,
- *,
- time_unix_nano: builtins.int = ...,
- severity_number: global___SeverityNumber.V = ...,
- severity_text: typing.Text = ...,
- name: typing.Text = ...,
- body: typing.Optional[
- opentelemetry.proto.common.v1.common_pb2.AnyValue
- ] = ...,
- attributes: typing.Optional[
- typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]
- ] = ...,
- dropped_attributes_count: builtins.int = ...,
- flags: builtins.int = ...,
- trace_id: builtins.bytes = ...,
- span_id: builtins.bytes = ...,
- ) -> None: ...
- def HasField(
- self, field_name: typing_extensions.Literal["body", b"body"]
- ) -> builtins.bool: ...
- def ClearField(
- self,
- field_name: typing_extensions.Literal[
- "attributes",
- b"attributes",
- "body",
- b"body",
- "dropped_attributes_count",
- b"dropped_attributes_count",
- "flags",
- b"flags",
- "name",
- b"name",
- "severity_number",
- b"severity_number",
- "severity_text",
- b"severity_text",
- "span_id",
- b"span_id",
- "time_unix_nano",
- b"time_unix_nano",
- "trace_id",
- b"trace_id",
- ],
- ) -> None: ...
+ """A unique identifier for a span within a trace, assigned when the span
+ is created. The ID is an 8-byte array. An ID with all zeroes is considered
+ invalid. Can be set for logs that are part of a particular processing span.
+ If span_id is present trace_id SHOULD be also present. [Optional].
+ """
+ def __init__(self,
+ *,
+ time_unix_nano : builtins.int = ...,
+ severity_number : global___SeverityNumber.V = ...,
+ severity_text : typing.Text = ...,
+ name : typing.Text = ...,
+ body : typing.Optional[opentelemetry.proto.common.v1.common_pb2.AnyValue] = ...,
+ attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ...,
+ dropped_attributes_count : builtins.int = ...,
+ flags : builtins.int = ...,
+ trace_id : builtins.bytes = ...,
+ span_id : builtins.bytes = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["body",b"body"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","body",b"body","dropped_attributes_count",b"dropped_attributes_count","flags",b"flags","name",b"name","severity_number",b"severity_number","severity_text",b"severity_text","span_id",b"span_id","time_unix_nano",b"time_unix_nano","trace_id",b"trace_id"]) -> None: ...
global___LogRecord = LogRecord
diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/configservice_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/configservice_pb2.py
deleted file mode 100644
index c2a8deb607..0000000000
--- a/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/configservice_pb2.py
+++ /dev/null
@@ -1,267 +0,0 @@
-# -*- coding: utf-8 -*-
-# Generated by the protocol buffer compiler. DO NOT EDIT!
-# source: opentelemetry/proto/metrics/experimental/configservice.proto
-
-from google.protobuf import descriptor as _descriptor
-from google.protobuf import message as _message
-from google.protobuf import reflection as _reflection
-from google.protobuf import symbol_database as _symbol_database
-# @@protoc_insertion_point(imports)
-
-_sym_db = _symbol_database.Default()
-
-
-from opentelemetry.proto.resource.v1 import resource_pb2 as opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2
-
-
-DESCRIPTOR = _descriptor.FileDescriptor(
- name='opentelemetry/proto/metrics/experimental/configservice.proto',
- package='opentelemetry.proto.metrics.experimental',
- syntax='proto3',
- serialized_options=b'\n+io.opentelemetry.proto.metrics.experimentalB\030MetricConfigServiceProtoP\001ZIgitpro.ttaallkk.top/open-telemetry/opentelemetry-proto/gen/go/metrics/experimental',
- serialized_pb=b'\n.opentelemetry.proto.metrics.experimental.MetricConfigResponseB\x94\x01\n+io.opentelemetry.proto.metrics.experimentalB\x18MetricConfigServiceProtoP\x01ZIgitpro.ttaallkk.top/open-telemetry/opentelemetry-proto/gen/go/metrics/experimentalb\x06proto3'
- ,
- dependencies=[opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2.DESCRIPTOR,])
-
-
-
-
-_METRICCONFIGREQUEST = _descriptor.Descriptor(
- name='MetricConfigRequest',
- full_name='opentelemetry.proto.metrics.experimental.MetricConfigRequest',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='resource', full_name='opentelemetry.proto.metrics.experimental.MetricConfigRequest.resource', index=0,
- number=1, type=11, cpp_type=10, label=1,
- has_default_value=False, default_value=None,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='last_known_fingerprint', full_name='opentelemetry.proto.metrics.experimental.MetricConfigRequest.last_known_fingerprint', index=1,
- number=2, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value=b"",
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=154,
- serialized_end=268,
-)
-
-
-_METRICCONFIGRESPONSE_SCHEDULE_PATTERN = _descriptor.Descriptor(
- name='Pattern',
- full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.Pattern',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='equals', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.Pattern.equals', index=0,
- number=1, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='starts_with', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.Pattern.starts_with', index=1,
- number=2, type=9, cpp_type=9, label=1,
- has_default_value=False, default_value=b"".decode('utf-8'),
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- _descriptor.OneofDescriptor(
- name='match', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.Pattern.match',
- index=0, containing_type=None, fields=[]),
- ],
- serialized_start=692,
- serialized_end=751,
-)
-
-_METRICCONFIGRESPONSE_SCHEDULE = _descriptor.Descriptor(
- name='Schedule',
- full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='exclusion_patterns', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.exclusion_patterns', index=0,
- number=1, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='inclusion_patterns', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.inclusion_patterns', index=1,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='period_sec', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.period_sec', index=2,
- number=3, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[_METRICCONFIGRESPONSE_SCHEDULE_PATTERN, ],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=442,
- serialized_end=751,
-)
-
-_METRICCONFIGRESPONSE = _descriptor.Descriptor(
- name='MetricConfigResponse',
- full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse',
- filename=None,
- file=DESCRIPTOR,
- containing_type=None,
- fields=[
- _descriptor.FieldDescriptor(
- name='fingerprint', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.fingerprint', index=0,
- number=1, type=12, cpp_type=9, label=1,
- has_default_value=False, default_value=b"",
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='schedules', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.schedules', index=1,
- number=2, type=11, cpp_type=10, label=3,
- has_default_value=False, default_value=[],
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- _descriptor.FieldDescriptor(
- name='suggested_wait_time_sec', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.suggested_wait_time_sec', index=2,
- number=3, type=5, cpp_type=1, label=1,
- has_default_value=False, default_value=0,
- message_type=None, enum_type=None, containing_type=None,
- is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
- ],
- extensions=[
- ],
- nested_types=[_METRICCONFIGRESPONSE_SCHEDULE, ],
- enum_types=[
- ],
- serialized_options=None,
- is_extendable=False,
- syntax='proto3',
- extension_ranges=[],
- oneofs=[
- ],
- serialized_start=271,
- serialized_end=751,
-)
-
-_METRICCONFIGREQUEST.fields_by_name['resource'].message_type = opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2._RESOURCE
-_METRICCONFIGRESPONSE_SCHEDULE_PATTERN.containing_type = _METRICCONFIGRESPONSE_SCHEDULE
-_METRICCONFIGRESPONSE_SCHEDULE_PATTERN.oneofs_by_name['match'].fields.append(
- _METRICCONFIGRESPONSE_SCHEDULE_PATTERN.fields_by_name['equals'])
-_METRICCONFIGRESPONSE_SCHEDULE_PATTERN.fields_by_name['equals'].containing_oneof = _METRICCONFIGRESPONSE_SCHEDULE_PATTERN.oneofs_by_name['match']
-_METRICCONFIGRESPONSE_SCHEDULE_PATTERN.oneofs_by_name['match'].fields.append(
- _METRICCONFIGRESPONSE_SCHEDULE_PATTERN.fields_by_name['starts_with'])
-_METRICCONFIGRESPONSE_SCHEDULE_PATTERN.fields_by_name['starts_with'].containing_oneof = _METRICCONFIGRESPONSE_SCHEDULE_PATTERN.oneofs_by_name['match']
-_METRICCONFIGRESPONSE_SCHEDULE.fields_by_name['exclusion_patterns'].message_type = _METRICCONFIGRESPONSE_SCHEDULE_PATTERN
-_METRICCONFIGRESPONSE_SCHEDULE.fields_by_name['inclusion_patterns'].message_type = _METRICCONFIGRESPONSE_SCHEDULE_PATTERN
-_METRICCONFIGRESPONSE_SCHEDULE.containing_type = _METRICCONFIGRESPONSE
-_METRICCONFIGRESPONSE.fields_by_name['schedules'].message_type = _METRICCONFIGRESPONSE_SCHEDULE
-DESCRIPTOR.message_types_by_name['MetricConfigRequest'] = _METRICCONFIGREQUEST
-DESCRIPTOR.message_types_by_name['MetricConfigResponse'] = _METRICCONFIGRESPONSE
-_sym_db.RegisterFileDescriptor(DESCRIPTOR)
-
-MetricConfigRequest = _reflection.GeneratedProtocolMessageType('MetricConfigRequest', (_message.Message,), {
- 'DESCRIPTOR' : _METRICCONFIGREQUEST,
- '__module__' : 'opentelemetry.proto.metrics.experimental.configservice_pb2'
- # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.experimental.MetricConfigRequest)
- })
-_sym_db.RegisterMessage(MetricConfigRequest)
-
-MetricConfigResponse = _reflection.GeneratedProtocolMessageType('MetricConfigResponse', (_message.Message,), {
-
- 'Schedule' : _reflection.GeneratedProtocolMessageType('Schedule', (_message.Message,), {
-
- 'Pattern' : _reflection.GeneratedProtocolMessageType('Pattern', (_message.Message,), {
- 'DESCRIPTOR' : _METRICCONFIGRESPONSE_SCHEDULE_PATTERN,
- '__module__' : 'opentelemetry.proto.metrics.experimental.configservice_pb2'
- # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.Pattern)
- })
- ,
- 'DESCRIPTOR' : _METRICCONFIGRESPONSE_SCHEDULE,
- '__module__' : 'opentelemetry.proto.metrics.experimental.configservice_pb2'
- # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule)
- })
- ,
- 'DESCRIPTOR' : _METRICCONFIGRESPONSE,
- '__module__' : 'opentelemetry.proto.metrics.experimental.configservice_pb2'
- # @@protoc_insertion_point(class_scope:opentelemetry.proto.metrics.experimental.MetricConfigResponse)
- })
-_sym_db.RegisterMessage(MetricConfigResponse)
-_sym_db.RegisterMessage(MetricConfigResponse.Schedule)
-_sym_db.RegisterMessage(MetricConfigResponse.Schedule.Pattern)
-
-
-DESCRIPTOR._options = None
-
-_METRICCONFIG = _descriptor.ServiceDescriptor(
- name='MetricConfig',
- full_name='opentelemetry.proto.metrics.experimental.MetricConfig',
- file=DESCRIPTOR,
- index=0,
- serialized_options=None,
- serialized_start=754,
- serialized_end=915,
- methods=[
- _descriptor.MethodDescriptor(
- name='GetMetricConfig',
- full_name='opentelemetry.proto.metrics.experimental.MetricConfig.GetMetricConfig',
- index=0,
- containing_service=None,
- input_type=_METRICCONFIGREQUEST,
- output_type=_METRICCONFIGRESPONSE,
- serialized_options=None,
- ),
-])
-_sym_db.RegisterServiceDescriptor(_METRICCONFIG)
-
-DESCRIPTOR.services_by_name['MetricConfig'] = _METRICCONFIG
-
-# @@protoc_insertion_point(module_scope)
diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/configservice_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/configservice_pb2.pyi
deleted file mode 100644
index 7218e03264..0000000000
--- a/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/configservice_pb2.pyi
+++ /dev/null
@@ -1,88 +0,0 @@
-"""
-@generated by mypy-protobuf. Do not edit manually!
-isort:skip_file
-"""
-import builtins
-import google.protobuf.descriptor
-import google.protobuf.internal.containers
-import google.protobuf.message
-import opentelemetry.proto.resource.v1.resource_pb2
-import typing
-import typing_extensions
-
-DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
-
-class MetricConfigRequest(google.protobuf.message.Message):
- DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
- RESOURCE_FIELD_NUMBER: builtins.int
- LAST_KNOWN_FINGERPRINT_FIELD_NUMBER: builtins.int
- last_known_fingerprint: builtins.bytes = ...
-
- @property
- def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: ...
-
- def __init__(self,
- *,
- resource : typing.Optional[opentelemetry.proto.resource.v1.resource_pb2.Resource] = ...,
- last_known_fingerprint : builtins.bytes = ...,
- ) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"resource",b"resource"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"last_known_fingerprint",b"last_known_fingerprint",u"resource",b"resource"]) -> None: ...
-global___MetricConfigRequest = MetricConfigRequest
-
-class MetricConfigResponse(google.protobuf.message.Message):
- DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
- class Schedule(google.protobuf.message.Message):
- DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
- class Pattern(google.protobuf.message.Message):
- DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
- EQUALS_FIELD_NUMBER: builtins.int
- STARTS_WITH_FIELD_NUMBER: builtins.int
- equals: typing.Text = ...
- starts_with: typing.Text = ...
-
- def __init__(self,
- *,
- equals : typing.Text = ...,
- starts_with : typing.Text = ...,
- ) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"equals",b"equals",u"match",b"match",u"starts_with",b"starts_with"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"equals",b"equals",u"match",b"match",u"starts_with",b"starts_with"]) -> None: ...
- def WhichOneof(self, oneof_group: typing_extensions.Literal[u"match",b"match"]) -> typing_extensions.Literal["equals","starts_with"]: ...
-
- EXCLUSION_PATTERNS_FIELD_NUMBER: builtins.int
- INCLUSION_PATTERNS_FIELD_NUMBER: builtins.int
- PERIOD_SEC_FIELD_NUMBER: builtins.int
- period_sec: builtins.int = ...
-
- @property
- def exclusion_patterns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MetricConfigResponse.Schedule.Pattern]: ...
-
- @property
- def inclusion_patterns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MetricConfigResponse.Schedule.Pattern]: ...
-
- def __init__(self,
- *,
- exclusion_patterns : typing.Optional[typing.Iterable[global___MetricConfigResponse.Schedule.Pattern]] = ...,
- inclusion_patterns : typing.Optional[typing.Iterable[global___MetricConfigResponse.Schedule.Pattern]] = ...,
- period_sec : builtins.int = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"exclusion_patterns",b"exclusion_patterns",u"inclusion_patterns",b"inclusion_patterns",u"period_sec",b"period_sec"]) -> None: ...
-
- FINGERPRINT_FIELD_NUMBER: builtins.int
- SCHEDULES_FIELD_NUMBER: builtins.int
- SUGGESTED_WAIT_TIME_SEC_FIELD_NUMBER: builtins.int
- fingerprint: builtins.bytes = ...
- suggested_wait_time_sec: builtins.int = ...
-
- @property
- def schedules(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MetricConfigResponse.Schedule]: ...
-
- def __init__(self,
- *,
- fingerprint : builtins.bytes = ...,
- schedules : typing.Optional[typing.Iterable[global___MetricConfigResponse.Schedule]] = ...,
- suggested_wait_time_sec : builtins.int = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"fingerprint",b"fingerprint",u"schedules",b"schedules",u"suggested_wait_time_sec",b"suggested_wait_time_sec"]) -> None: ...
-global___MetricConfigResponse = MetricConfigResponse
diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/configservice_pb2_grpc.py b/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/configservice_pb2_grpc.py
deleted file mode 100644
index a2b2408446..0000000000
--- a/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/configservice_pb2_grpc.py
+++ /dev/null
@@ -1,82 +0,0 @@
-# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
-import grpc
-
-from opentelemetry.proto.metrics.experimental import configservice_pb2 as opentelemetry_dot_proto_dot_metrics_dot_experimental_dot_configservice__pb2
-
-
-class MetricConfigStub(object):
- """MetricConfig is a service that enables updating metric schedules, trace
- parameters, and other configurations on the SDK without having to restart the
- instrumented application. The collector can also serve as the configuration
- service, acting as a bridge between third-party configuration services and
- the SDK, piping updated configs from a third-party source to an instrumented
- application.
- """
-
- def __init__(self, channel):
- """Constructor.
-
- Args:
- channel: A grpc.Channel.
- """
- self.GetMetricConfig = channel.unary_unary(
- '/opentelemetry.proto.metrics.experimental.MetricConfig/GetMetricConfig',
- request_serializer=opentelemetry_dot_proto_dot_metrics_dot_experimental_dot_configservice__pb2.MetricConfigRequest.SerializeToString,
- response_deserializer=opentelemetry_dot_proto_dot_metrics_dot_experimental_dot_configservice__pb2.MetricConfigResponse.FromString,
- )
-
-
-class MetricConfigServicer(object):
- """MetricConfig is a service that enables updating metric schedules, trace
- parameters, and other configurations on the SDK without having to restart the
- instrumented application. The collector can also serve as the configuration
- service, acting as a bridge between third-party configuration services and
- the SDK, piping updated configs from a third-party source to an instrumented
- application.
- """
-
- def GetMetricConfig(self, request, context):
- """Missing associated documentation comment in .proto file"""
- context.set_code(grpc.StatusCode.UNIMPLEMENTED)
- context.set_details('Method not implemented!')
- raise NotImplementedError('Method not implemented!')
-
-
-def add_MetricConfigServicer_to_server(servicer, server):
- rpc_method_handlers = {
- 'GetMetricConfig': grpc.unary_unary_rpc_method_handler(
- servicer.GetMetricConfig,
- request_deserializer=opentelemetry_dot_proto_dot_metrics_dot_experimental_dot_configservice__pb2.MetricConfigRequest.FromString,
- response_serializer=opentelemetry_dot_proto_dot_metrics_dot_experimental_dot_configservice__pb2.MetricConfigResponse.SerializeToString,
- ),
- }
- generic_handler = grpc.method_handlers_generic_handler(
- 'opentelemetry.proto.metrics.experimental.MetricConfig', rpc_method_handlers)
- server.add_generic_rpc_handlers((generic_handler,))
-
-
- # This class is part of an EXPERIMENTAL API.
-class MetricConfig(object):
- """MetricConfig is a service that enables updating metric schedules, trace
- parameters, and other configurations on the SDK without having to restart the
- instrumented application. The collector can also serve as the configuration
- service, acting as a bridge between third-party configuration services and
- the SDK, piping updated configs from a third-party source to an instrumented
- application.
- """
-
- @staticmethod
- def GetMetricConfig(request,
- target,
- options=(),
- channel_credentials=None,
- call_credentials=None,
- compression=None,
- wait_for_ready=None,
- timeout=None,
- metadata=None):
- return grpc.experimental.unary_unary(request, target, '/opentelemetry.proto.metrics.experimental.MetricConfig/GetMetricConfig',
- opentelemetry_dot_proto_dot_metrics_dot_experimental_dot_configservice__pb2.MetricConfigRequest.SerializeToString,
- opentelemetry_dot_proto_dot_metrics_dot_experimental_dot_configservice__pb2.MetricConfigResponse.FromString,
- options, channel_credentials,
- call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/metrics_config_service_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/metrics_config_service_pb2.py
index 8f1b43c3d2..212840ca03 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/metrics_config_service_pb2.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/metrics_config_service_pb2.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/metrics/experimental/metrics_config_service.proto
-
+"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -19,6 +19,7 @@
package='opentelemetry.proto.metrics.experimental',
syntax='proto3',
serialized_options=b'\n+io.opentelemetry.proto.metrics.experimentalB\030MetricConfigServiceProtoP\001ZIgitpro.ttaallkk.top/open-telemetry/opentelemetry-proto/gen/go/metrics/experimental',
+ create_key=_descriptor._internal_create_key,
serialized_pb=b'\nEopentelemetry/proto/metrics/experimental/metrics_config_service.proto\x12(opentelemetry.proto.metrics.experimental\x1a.opentelemetry/proto/resource/v1/resource.proto\"r\n\x13MetricConfigRequest\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12\x1e\n\x16last_known_fingerprint\x18\x02 \x01(\x0c\"\xe0\x03\n\x14MetricConfigResponse\x12\x13\n\x0b\x66ingerprint\x18\x01 \x01(\x0c\x12Z\n\tschedules\x18\x02 \x03(\x0b\x32G.opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule\x12\x1f\n\x17suggested_wait_time_sec\x18\x03 \x01(\x05\x1a\xb5\x02\n\x08Schedule\x12k\n\x12\x65xclusion_patterns\x18\x01 \x03(\x0b\x32O.opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.Pattern\x12k\n\x12inclusion_patterns\x18\x02 \x03(\x0b\x32O.opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.Pattern\x12\x12\n\nperiod_sec\x18\x03 \x01(\x05\x1a;\n\x07Pattern\x12\x10\n\x06\x65quals\x18\x01 \x01(\tH\x00\x12\x15\n\x0bstarts_with\x18\x02 \x01(\tH\x00\x42\x07\n\x05match2\xa1\x01\n\x0cMetricConfig\x12\x90\x01\n\x0fGetMetricConfig\x12=.opentelemetry.proto.metrics.experimental.MetricConfigRequest\x1a>.opentelemetry.proto.metrics.experimental.MetricConfigResponseB\x94\x01\n+io.opentelemetry.proto.metrics.experimentalB\x18MetricConfigServiceProtoP\x01ZIgitpro.ttaallkk.top/open-telemetry/opentelemetry-proto/gen/go/metrics/experimentalb\x06proto3'
,
dependencies=[opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2.DESCRIPTOR,])
@@ -32,6 +33,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource', full_name='opentelemetry.proto.metrics.experimental.MetricConfigRequest.resource', index=0,
@@ -39,14 +41,14 @@
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_known_fingerprint', full_name='opentelemetry.proto.metrics.experimental.MetricConfigRequest.last_known_fingerprint', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -70,6 +72,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='equals', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.Pattern.equals', index=0,
@@ -77,14 +80,14 @@
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='starts_with', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.Pattern.starts_with', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -98,7 +101,9 @@
oneofs=[
_descriptor.OneofDescriptor(
name='match', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.Pattern.match',
- index=0, containing_type=None, fields=[]),
+ index=0, containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[]),
],
serialized_start=701,
serialized_end=760,
@@ -110,6 +115,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='exclusion_patterns', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.exclusion_patterns', index=0,
@@ -117,21 +123,21 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='inclusion_patterns', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.inclusion_patterns', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='period_sec', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.Schedule.period_sec', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -154,6 +160,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='fingerprint', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.fingerprint', index=0,
@@ -161,21 +168,21 @@
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='schedules', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.schedules', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='suggested_wait_time_sec', full_name='opentelemetry.proto.metrics.experimental.MetricConfigResponse.suggested_wait_time_sec', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -247,6 +254,7 @@
file=DESCRIPTOR,
index=0,
serialized_options=None,
+ create_key=_descriptor._internal_create_key,
serialized_start=763,
serialized_end=924,
methods=[
@@ -258,6 +266,7 @@
input_type=_METRICCONFIGREQUEST,
output_type=_METRICCONFIGRESPONSE,
serialized_options=None,
+ create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_METRICCONFIG)
diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/metrics_config_service_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/metrics_config_service_pb2.pyi
index 7218e03264..ee8050802b 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/metrics_config_service_pb2.pyi
+++ b/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/metrics_config_service_pb2.pyi
@@ -16,50 +16,72 @@ class MetricConfigRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
RESOURCE_FIELD_NUMBER: builtins.int
LAST_KNOWN_FINGERPRINT_FIELD_NUMBER: builtins.int
- last_known_fingerprint: builtins.bytes = ...
-
@property
- def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: ...
+ def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource:
+ """Required. The resource for which configuration should be returned."""
+ pass
+ last_known_fingerprint: builtins.bytes = ...
+ """Optional. The value of MetricConfigResponse.fingerprint for the last
+ configuration that the caller received and successfully applied.
+ """
def __init__(self,
*,
resource : typing.Optional[opentelemetry.proto.resource.v1.resource_pb2.Resource] = ...,
last_known_fingerprint : builtins.bytes = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"resource",b"resource"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"last_known_fingerprint",b"last_known_fingerprint",u"resource",b"resource"]) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["resource",b"resource"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["last_known_fingerprint",b"last_known_fingerprint","resource",b"resource"]) -> None: ...
global___MetricConfigRequest = MetricConfigRequest
class MetricConfigResponse(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
class Schedule(google.protobuf.message.Message):
+ """A Schedule is used to apply a particular scheduling configuration to
+ a metric. If a metric name matches a schedule's patterns, then the metric
+ adopts the configuration specified by the schedule.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
class Pattern(google.protobuf.message.Message):
+ """A light-weight pattern that can match 1 or more
+ metrics, for which this schedule will apply. The string is used to
+ match against metric names. It should not exceed 100k characters.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
EQUALS_FIELD_NUMBER: builtins.int
STARTS_WITH_FIELD_NUMBER: builtins.int
equals: typing.Text = ...
+ """matches the metric name exactly"""
+
starts_with: typing.Text = ...
+ """prefix-matches the metric name"""
def __init__(self,
*,
equals : typing.Text = ...,
starts_with : typing.Text = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"equals",b"equals",u"match",b"match",u"starts_with",b"starts_with"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"equals",b"equals",u"match",b"match",u"starts_with",b"starts_with"]) -> None: ...
- def WhichOneof(self, oneof_group: typing_extensions.Literal[u"match",b"match"]) -> typing_extensions.Literal["equals","starts_with"]: ...
+ def HasField(self, field_name: typing_extensions.Literal["equals",b"equals","match",b"match","starts_with",b"starts_with"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["equals",b"equals","match",b"match","starts_with",b"starts_with"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["match",b"match"]) -> typing.Optional[typing_extensions.Literal["equals","starts_with"]]: ...
EXCLUSION_PATTERNS_FIELD_NUMBER: builtins.int
INCLUSION_PATTERNS_FIELD_NUMBER: builtins.int
PERIOD_SEC_FIELD_NUMBER: builtins.int
- period_sec: builtins.int = ...
-
@property
- def exclusion_patterns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MetricConfigResponse.Schedule.Pattern]: ...
-
+ def exclusion_patterns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MetricConfigResponse.Schedule.Pattern]:
+ """Metrics with names that match a rule in the inclusion_patterns are
+ targeted by this schedule. Metrics that match the exclusion_patterns
+ are not targeted for this schedule, even if they match an inclusion
+ pattern.
+ """
+ pass
@property
def inclusion_patterns(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MetricConfigResponse.Schedule.Pattern]: ...
+ period_sec: builtins.int = ...
+ """Describes the collection period for each metric in seconds.
+ A period of 0 means to not export.
+ """
def __init__(self,
*,
@@ -67,16 +89,42 @@ class MetricConfigResponse(google.protobuf.message.Message):
inclusion_patterns : typing.Optional[typing.Iterable[global___MetricConfigResponse.Schedule.Pattern]] = ...,
period_sec : builtins.int = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"exclusion_patterns",b"exclusion_patterns",u"inclusion_patterns",b"inclusion_patterns",u"period_sec",b"period_sec"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["exclusion_patterns",b"exclusion_patterns","inclusion_patterns",b"inclusion_patterns","period_sec",b"period_sec"]) -> None: ...
FINGERPRINT_FIELD_NUMBER: builtins.int
SCHEDULES_FIELD_NUMBER: builtins.int
SUGGESTED_WAIT_TIME_SEC_FIELD_NUMBER: builtins.int
fingerprint: builtins.bytes = ...
- suggested_wait_time_sec: builtins.int = ...
+ """Optional. The fingerprint associated with this MetricConfigResponse. Each
+ change in configs yields a different fingerprint. The resource SHOULD copy
+ this value to MetricConfigRequest.last_known_fingerprint for the next
+ configuration request. If there are no changes between fingerprint and
+ MetricConfigRequest.last_known_fingerprint, then all other fields besides
+ fingerprint in the response are optional, or the same as the last update if
+ present.
+
+ The exact mechanics of generating the fingerprint is up to the
+ implementation. However, a fingerprint must be deterministically determined
+ by the configurations -- the same configuration will generate the same
+ fingerprint on any instance of an implementation. Hence using a timestamp is
+ unacceptable, but a deterministic hash is fine.
+ """
@property
- def schedules(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MetricConfigResponse.Schedule]: ...
+ def schedules(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___MetricConfigResponse.Schedule]:
+ """A single metric may match multiple schedules. In such cases, the schedule
+ that specifies the smallest period is applied.
+
+ Note, for optimization purposes, it is recommended to use as few schedules
+ as possible to capture all required metric updates. Where you can be
+ conservative, do take full advantage of the inclusion/exclusion patterns to
+ capture as much of your targeted metrics.
+ """
+ pass
+ suggested_wait_time_sec: builtins.int = ...
+ """Optional. The client is suggested to wait this long (in seconds) before
+ pinging the configuration service again.
+ """
def __init__(self,
*,
@@ -84,5 +132,5 @@ class MetricConfigResponse(google.protobuf.message.Message):
schedules : typing.Optional[typing.Iterable[global___MetricConfigResponse.Schedule]] = ...,
suggested_wait_time_sec : builtins.int = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"fingerprint",b"fingerprint",u"schedules",b"schedules",u"suggested_wait_time_sec",b"suggested_wait_time_sec"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["fingerprint",b"fingerprint","schedules",b"schedules","suggested_wait_time_sec",b"suggested_wait_time_sec"]) -> None: ...
global___MetricConfigResponse = MetricConfigResponse
diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/metrics_config_service_pb2_grpc.py b/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/metrics_config_service_pb2_grpc.py
index 829bf58712..409ddfa261 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/metrics_config_service_pb2_grpc.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/metrics/experimental/metrics_config_service_pb2_grpc.py
@@ -1,4 +1,5 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
+"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from opentelemetry.proto.metrics.experimental import metrics_config_service_pb2 as opentelemetry_dot_proto_dot_metrics_dot_experimental_dot_metrics__config__service__pb2
@@ -36,7 +37,7 @@ class MetricConfigServicer(object):
"""
def GetMetricConfig(self, request, context):
- """Missing associated documentation comment in .proto file"""
+ """Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
@@ -71,6 +72,7 @@ def GetMetricConfig(request,
options=(),
channel_credentials=None,
call_credentials=None,
+ insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
@@ -79,4 +81,4 @@ def GetMetricConfig(request,
opentelemetry_dot_proto_dot_metrics_dot_experimental_dot_metrics__config__service__pb2.MetricConfigRequest.SerializeToString,
opentelemetry_dot_proto_dot_metrics_dot_experimental_dot_metrics__config__service__pb2.MetricConfigResponse.FromString,
options, channel_credentials,
- call_credentials, compression, wait_for_ready, timeout, metadata)
+ insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py
index 77ad3c5c91..c28faca33d 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/metrics/v1/metrics.proto
-
+"""Generated protocol buffer code."""
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
@@ -21,6 +21,7 @@
package='opentelemetry.proto.metrics.v1',
syntax='proto3',
serialized_options=b'\n!io.opentelemetry.proto.metrics.v1B\014MetricsProtoP\001Z?github.com/open-telemetry/opentelemetry-proto/gen/go/metrics/v1',
+ create_key=_descriptor._internal_create_key,
serialized_pb=b'\n,opentelemetry/proto/metrics/v1/metrics.proto\x12\x1eopentelemetry.proto.metrics.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"\xca\x01\n\x0fResourceMetrics\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12\x66\n\x1finstrumentation_library_metrics\x18\x02 \x03(\x0b\x32=.opentelemetry.proto.metrics.v1.InstrumentationLibraryMetrics\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xc4\x01\n\x1dInstrumentationLibraryMetrics\x12V\n\x17instrumentation_library\x18\x01 \x01(\x0b\x32\x35.opentelemetry.proto.common.v1.InstrumentationLibrary\x12\x37\n\x07metrics\x18\x02 \x03(\x0b\x32&.opentelemetry.proto.metrics.v1.Metric\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xf6\x03\n\x06Metric\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x02 \x01(\t\x12\x0c\n\x04unit\x18\x03 \x01(\t\x12\x41\n\tint_gauge\x18\x04 \x01(\x0b\x32(.opentelemetry.proto.metrics.v1.IntGaugeB\x02\x18\x01H\x00\x12\x36\n\x05gauge\x18\x05 \x01(\x0b\x32%.opentelemetry.proto.metrics.v1.GaugeH\x00\x12=\n\x07int_sum\x18\x06 \x01(\x0b\x32&.opentelemetry.proto.metrics.v1.IntSumB\x02\x18\x01H\x00\x12\x32\n\x03sum\x18\x07 \x01(\x0b\x32#.opentelemetry.proto.metrics.v1.SumH\x00\x12I\n\rint_histogram\x18\x08 \x01(\x0b\x32,.opentelemetry.proto.metrics.v1.IntHistogramB\x02\x18\x01H\x00\x12>\n\thistogram\x18\t \x01(\x0b\x32).opentelemetry.proto.metrics.v1.HistogramH\x00\x12:\n\x07summary\x18\x0b \x01(\x0b\x32\'.opentelemetry.proto.metrics.v1.SummaryH\x00\x42\x06\n\x04\x64\x61ta\"Q\n\x08IntGauge\x12\x41\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32,.opentelemetry.proto.metrics.v1.IntDataPoint:\x02\x18\x01\"M\n\x05Gauge\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\"\xbe\x01\n\x06IntSum\x12\x41\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32,.opentelemetry.proto.metrics.v1.IntDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\x12\x14\n\x0cis_monotonic\x18\x03 \x01(\x08:\x02\x18\x01\"\xba\x01\n\x03Sum\x12\x44\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32/.opentelemetry.proto.metrics.v1.NumberDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\x12\x14\n\x0cis_monotonic\x18\x03 \x01(\x08\"\xb7\x01\n\x0cIntHistogram\x12J\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x35.opentelemetry.proto.metrics.v1.IntHistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality:\x02\x18\x01\"\xad\x01\n\tHistogram\x12G\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x32.opentelemetry.proto.metrics.v1.HistogramDataPoint\x12W\n\x17\x61ggregation_temporality\x18\x02 \x01(\x0e\x32\x36.opentelemetry.proto.metrics.v1.AggregationTemporality\"P\n\x07Summary\x12\x45\n\x0b\x64\x61ta_points\x18\x01 \x03(\x0b\x32\x30.opentelemetry.proto.metrics.v1.SummaryDataPoint\"\xd6\x01\n\x0cIntDataPoint\x12=\n\x06labels\x18\x01 \x03(\x0b\x32-.opentelemetry.proto.common.v1.StringKeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05value\x18\x04 \x01(\x10\x12>\n\texemplars\x18\x05 \x03(\x0b\x32+.opentelemetry.proto.metrics.v1.IntExemplar:\x02\x18\x01\"\xb4\x02\n\x0fNumberDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x41\n\x06labels\x18\x01 \x03(\x0b\x32-.opentelemetry.proto.common.v1.StringKeyValueB\x02\x18\x01\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\x13\n\tas_double\x18\x04 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12;\n\texemplars\x18\x05 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.ExemplarB\x07\n\x05value\"\x9c\x02\n\x15IntHistogramDataPoint\x12=\n\x06labels\x18\x01 \x03(\x0b\x32-.opentelemetry.proto.common.v1.StringKeyValue\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x0b\n\x03sum\x18\x05 \x01(\x10\x12\x15\n\rbucket_counts\x18\x06 \x03(\x06\x12\x17\n\x0f\x65xplicit_bounds\x18\x07 \x03(\x01\x12>\n\texemplars\x18\x08 \x03(\x0b\x32+.opentelemetry.proto.metrics.v1.IntExemplar:\x02\x18\x01\"\xd3\x02\n\x12HistogramDataPoint\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x41\n\x06labels\x18\x01 \x03(\x0b\x32-.opentelemetry.proto.common.v1.StringKeyValueB\x02\x18\x01\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x0b\n\x03sum\x18\x05 \x01(\x01\x12\x15\n\rbucket_counts\x18\x06 \x03(\x06\x12\x17\n\x0f\x65xplicit_bounds\x18\x07 \x03(\x01\x12;\n\texemplars\x18\x08 \x03(\x0b\x32(.opentelemetry.proto.metrics.v1.Exemplar\"\xf3\x02\n\x10SummaryDataPoint\x12;\n\nattributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12\x41\n\x06labels\x18\x01 \x03(\x0b\x32-.opentelemetry.proto.common.v1.StringKeyValueB\x02\x18\x01\x12\x1c\n\x14start_time_unix_nano\x18\x02 \x01(\x06\x12\x16\n\x0etime_unix_nano\x18\x03 \x01(\x06\x12\r\n\x05\x63ount\x18\x04 \x01(\x06\x12\x0b\n\x03sum\x18\x05 \x01(\x01\x12Y\n\x0fquantile_values\x18\x06 \x03(\x0b\x32@.opentelemetry.proto.metrics.v1.SummaryDataPoint.ValueAtQuantile\x1a\x32\n\x0fValueAtQuantile\x12\x10\n\x08quantile\x18\x01 \x01(\x01\x12\r\n\x05value\x18\x02 \x01(\x01\"\xa3\x01\n\x0bIntExemplar\x12\x46\n\x0f\x66iltered_labels\x18\x01 \x03(\x0b\x32-.opentelemetry.proto.common.v1.StringKeyValue\x12\x16\n\x0etime_unix_nano\x18\x02 \x01(\x06\x12\r\n\x05value\x18\x03 \x01(\x10\x12\x0f\n\x07span_id\x18\x04 \x01(\x0c\x12\x10\n\x08trace_id\x18\x05 \x01(\x0c:\x02\x18\x01\"\x87\x02\n\x08\x45xemplar\x12\x44\n\x13\x66iltered_attributes\x18\x07 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12J\n\x0f\x66iltered_labels\x18\x01 \x03(\x0b\x32-.opentelemetry.proto.common.v1.StringKeyValueB\x02\x18\x01\x12\x16\n\x0etime_unix_nano\x18\x02 \x01(\x06\x12\x13\n\tas_double\x18\x03 \x01(\x01H\x00\x12\x10\n\x06\x61s_int\x18\x06 \x01(\x10H\x00\x12\x0f\n\x07span_id\x18\x04 \x01(\x0c\x12\x10\n\x08trace_id\x18\x05 \x01(\x0c\x42\x07\n\x05value*\x8c\x01\n\x16\x41ggregationTemporality\x12\'\n#AGGREGATION_TEMPORALITY_UNSPECIFIED\x10\x00\x12!\n\x1d\x41GGREGATION_TEMPORALITY_DELTA\x10\x01\x12&\n\"AGGREGATION_TEMPORALITY_CUMULATIVE\x10\x02\x42t\n!io.opentelemetry.proto.metrics.v1B\x0cMetricsProtoP\x01Z?github.com/open-telemetry/opentelemetry-proto/gen/go/metrics/v1b\x06proto3'
,
dependencies=[opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2.DESCRIPTOR,opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2.DESCRIPTOR,])
@@ -30,19 +31,23 @@
full_name='opentelemetry.proto.metrics.v1.AggregationTemporality',
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='AGGREGATION_TEMPORALITY_UNSPECIFIED', index=0, number=0,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AGGREGATION_TEMPORALITY_DELTA', index=1, number=1,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='AGGREGATION_TEMPORALITY_CUMULATIVE', index=2, number=2,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
@@ -64,6 +69,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource', full_name='opentelemetry.proto.metrics.v1.ResourceMetrics.resource', index=0,
@@ -71,21 +77,21 @@
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='instrumentation_library_metrics', full_name='opentelemetry.proto.metrics.v1.ResourceMetrics.instrumentation_library_metrics', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='schema_url', full_name='opentelemetry.proto.metrics.v1.ResourceMetrics.schema_url', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -109,6 +115,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='instrumentation_library', full_name='opentelemetry.proto.metrics.v1.InstrumentationLibraryMetrics.instrumentation_library', index=0,
@@ -116,21 +123,21 @@
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='metrics', full_name='opentelemetry.proto.metrics.v1.InstrumentationLibraryMetrics.metrics', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='schema_url', full_name='opentelemetry.proto.metrics.v1.InstrumentationLibraryMetrics.schema_url', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -154,6 +161,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='opentelemetry.proto.metrics.v1.Metric.name', index=0,
@@ -161,70 +169,70 @@
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='description', full_name='opentelemetry.proto.metrics.v1.Metric.description', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='unit', full_name='opentelemetry.proto.metrics.v1.Metric.unit', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='int_gauge', full_name='opentelemetry.proto.metrics.v1.Metric.int_gauge', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=b'\030\001', file=DESCRIPTOR),
+ serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='gauge', full_name='opentelemetry.proto.metrics.v1.Metric.gauge', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='int_sum', full_name='opentelemetry.proto.metrics.v1.Metric.int_sum', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=b'\030\001', file=DESCRIPTOR),
+ serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sum', full_name='opentelemetry.proto.metrics.v1.Metric.sum', index=6,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='int_histogram', full_name='opentelemetry.proto.metrics.v1.Metric.int_histogram', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=b'\030\001', file=DESCRIPTOR),
+ serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='histogram', full_name='opentelemetry.proto.metrics.v1.Metric.histogram', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='summary', full_name='opentelemetry.proto.metrics.v1.Metric.summary', index=9,
number=11, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -238,7 +246,9 @@
oneofs=[
_descriptor.OneofDescriptor(
name='data', full_name='opentelemetry.proto.metrics.v1.Metric.data',
- index=0, containing_type=None, fields=[]),
+ index=0, containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[]),
],
serialized_start=577,
serialized_end=1079,
@@ -251,6 +261,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data_points', full_name='opentelemetry.proto.metrics.v1.IntGauge.data_points', index=0,
@@ -258,7 +269,7 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -282,6 +293,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data_points', full_name='opentelemetry.proto.metrics.v1.Gauge.data_points', index=0,
@@ -289,7 +301,7 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -313,6 +325,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data_points', full_name='opentelemetry.proto.metrics.v1.IntSum.data_points', index=0,
@@ -320,21 +333,21 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='aggregation_temporality', full_name='opentelemetry.proto.metrics.v1.IntSum.aggregation_temporality', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_monotonic', full_name='opentelemetry.proto.metrics.v1.IntSum.is_monotonic', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -358,6 +371,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data_points', full_name='opentelemetry.proto.metrics.v1.Sum.data_points', index=0,
@@ -365,21 +379,21 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='aggregation_temporality', full_name='opentelemetry.proto.metrics.v1.Sum.aggregation_temporality', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_monotonic', full_name='opentelemetry.proto.metrics.v1.Sum.is_monotonic', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -403,6 +417,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data_points', full_name='opentelemetry.proto.metrics.v1.IntHistogram.data_points', index=0,
@@ -410,14 +425,14 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='aggregation_temporality', full_name='opentelemetry.proto.metrics.v1.IntHistogram.aggregation_temporality', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -441,6 +456,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data_points', full_name='opentelemetry.proto.metrics.v1.Histogram.data_points', index=0,
@@ -448,14 +464,14 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='aggregation_temporality', full_name='opentelemetry.proto.metrics.v1.Histogram.aggregation_temporality', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -479,6 +495,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='data_points', full_name='opentelemetry.proto.metrics.v1.Summary.data_points', index=0,
@@ -486,7 +503,7 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -510,6 +527,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='labels', full_name='opentelemetry.proto.metrics.v1.IntDataPoint.labels', index=0,
@@ -517,35 +535,35 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='start_time_unix_nano', full_name='opentelemetry.proto.metrics.v1.IntDataPoint.start_time_unix_nano', index=1,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='time_unix_nano', full_name='opentelemetry.proto.metrics.v1.IntDataPoint.time_unix_nano', index=2,
number=3, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='opentelemetry.proto.metrics.v1.IntDataPoint.value', index=3,
number=4, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='exemplars', full_name='opentelemetry.proto.metrics.v1.IntDataPoint.exemplars', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -569,6 +587,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='attributes', full_name='opentelemetry.proto.metrics.v1.NumberDataPoint.attributes', index=0,
@@ -576,49 +595,49 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='opentelemetry.proto.metrics.v1.NumberDataPoint.labels', index=1,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=b'\030\001', file=DESCRIPTOR),
+ serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='start_time_unix_nano', full_name='opentelemetry.proto.metrics.v1.NumberDataPoint.start_time_unix_nano', index=2,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='time_unix_nano', full_name='opentelemetry.proto.metrics.v1.NumberDataPoint.time_unix_nano', index=3,
number=3, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='as_double', full_name='opentelemetry.proto.metrics.v1.NumberDataPoint.as_double', index=4,
number=4, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='as_int', full_name='opentelemetry.proto.metrics.v1.NumberDataPoint.as_int', index=5,
number=6, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='exemplars', full_name='opentelemetry.proto.metrics.v1.NumberDataPoint.exemplars', index=6,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -632,7 +651,9 @@
oneofs=[
_descriptor.OneofDescriptor(
name='value', full_name='opentelemetry.proto.metrics.v1.NumberDataPoint.value',
- index=0, containing_type=None, fields=[]),
+ index=0, containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[]),
],
serialized_start=2287,
serialized_end=2595,
@@ -645,6 +666,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='labels', full_name='opentelemetry.proto.metrics.v1.IntHistogramDataPoint.labels', index=0,
@@ -652,56 +674,56 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='start_time_unix_nano', full_name='opentelemetry.proto.metrics.v1.IntHistogramDataPoint.start_time_unix_nano', index=1,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='time_unix_nano', full_name='opentelemetry.proto.metrics.v1.IntHistogramDataPoint.time_unix_nano', index=2,
number=3, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='count', full_name='opentelemetry.proto.metrics.v1.IntHistogramDataPoint.count', index=3,
number=4, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sum', full_name='opentelemetry.proto.metrics.v1.IntHistogramDataPoint.sum', index=4,
number=5, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bucket_counts', full_name='opentelemetry.proto.metrics.v1.IntHistogramDataPoint.bucket_counts', index=5,
number=6, type=6, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='explicit_bounds', full_name='opentelemetry.proto.metrics.v1.IntHistogramDataPoint.explicit_bounds', index=6,
number=7, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='exemplars', full_name='opentelemetry.proto.metrics.v1.IntHistogramDataPoint.exemplars', index=7,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -725,6 +747,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='attributes', full_name='opentelemetry.proto.metrics.v1.HistogramDataPoint.attributes', index=0,
@@ -732,63 +755,63 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='opentelemetry.proto.metrics.v1.HistogramDataPoint.labels', index=1,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=b'\030\001', file=DESCRIPTOR),
+ serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='start_time_unix_nano', full_name='opentelemetry.proto.metrics.v1.HistogramDataPoint.start_time_unix_nano', index=2,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='time_unix_nano', full_name='opentelemetry.proto.metrics.v1.HistogramDataPoint.time_unix_nano', index=3,
number=3, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='count', full_name='opentelemetry.proto.metrics.v1.HistogramDataPoint.count', index=4,
number=4, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sum', full_name='opentelemetry.proto.metrics.v1.HistogramDataPoint.sum', index=5,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bucket_counts', full_name='opentelemetry.proto.metrics.v1.HistogramDataPoint.bucket_counts', index=6,
number=6, type=6, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='explicit_bounds', full_name='opentelemetry.proto.metrics.v1.HistogramDataPoint.explicit_bounds', index=7,
number=7, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='exemplars', full_name='opentelemetry.proto.metrics.v1.HistogramDataPoint.exemplars', index=8,
number=8, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -812,6 +835,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='quantile', full_name='opentelemetry.proto.metrics.v1.SummaryDataPoint.ValueAtQuantile.quantile', index=0,
@@ -819,14 +843,14 @@
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='opentelemetry.proto.metrics.v1.SummaryDataPoint.ValueAtQuantile.value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -849,6 +873,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='attributes', full_name='opentelemetry.proto.metrics.v1.SummaryDataPoint.attributes', index=0,
@@ -856,49 +881,49 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='labels', full_name='opentelemetry.proto.metrics.v1.SummaryDataPoint.labels', index=1,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=b'\030\001', file=DESCRIPTOR),
+ serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='start_time_unix_nano', full_name='opentelemetry.proto.metrics.v1.SummaryDataPoint.start_time_unix_nano', index=2,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='time_unix_nano', full_name='opentelemetry.proto.metrics.v1.SummaryDataPoint.time_unix_nano', index=3,
number=3, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='count', full_name='opentelemetry.proto.metrics.v1.SummaryDataPoint.count', index=4,
number=4, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sum', full_name='opentelemetry.proto.metrics.v1.SummaryDataPoint.sum', index=5,
number=5, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='quantile_values', full_name='opentelemetry.proto.metrics.v1.SummaryDataPoint.quantile_values', index=6,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -922,6 +947,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='filtered_labels', full_name='opentelemetry.proto.metrics.v1.IntExemplar.filtered_labels', index=0,
@@ -929,35 +955,35 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='time_unix_nano', full_name='opentelemetry.proto.metrics.v1.IntExemplar.time_unix_nano', index=1,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='opentelemetry.proto.metrics.v1.IntExemplar.value', index=2,
number=3, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='span_id', full_name='opentelemetry.proto.metrics.v1.IntExemplar.span_id', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trace_id', full_name='opentelemetry.proto.metrics.v1.IntExemplar.trace_id', index=4,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -981,6 +1007,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='filtered_attributes', full_name='opentelemetry.proto.metrics.v1.Exemplar.filtered_attributes', index=0,
@@ -988,49 +1015,49 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='filtered_labels', full_name='opentelemetry.proto.metrics.v1.Exemplar.filtered_labels', index=1,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=b'\030\001', file=DESCRIPTOR),
+ serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='time_unix_nano', full_name='opentelemetry.proto.metrics.v1.Exemplar.time_unix_nano', index=2,
number=2, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='as_double', full_name='opentelemetry.proto.metrics.v1.Exemplar.as_double', index=3,
number=3, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='as_int', full_name='opentelemetry.proto.metrics.v1.Exemplar.as_int', index=4,
number=6, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='span_id', full_name='opentelemetry.proto.metrics.v1.Exemplar.span_id', index=5,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trace_id', full_name='opentelemetry.proto.metrics.v1.Exemplar.trace_id', index=6,
number=5, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -1044,7 +1071,9 @@
oneofs=[
_descriptor.OneofDescriptor(
name='value', full_name='opentelemetry.proto.metrics.v1.Exemplar.value',
- index=0, containing_type=None, fields=[]),
+ index=0, containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[]),
],
serialized_start=3767,
serialized_end=4030,
diff --git a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi
index fb181488cd..4fb9c9ff93 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi
+++ b/opentelemetry-proto/src/opentelemetry/proto/metrics/v1/metrics_pb2.pyi
@@ -14,30 +14,177 @@ import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
-global___AggregationTemporality = AggregationTemporality
-class _AggregationTemporality(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[AggregationTemporality.V], builtins.type):
+class AggregationTemporality(_AggregationTemporality, metaclass=_AggregationTemporalityEnumTypeWrapper):
+ """AggregationTemporality defines how a metric aggregator reports aggregated
+ values. It describes how those values relate to the time interval over
+ which they are aggregated.
+ """
+ pass
+class _AggregationTemporality:
+ V = typing.NewType('V', builtins.int)
+class _AggregationTemporalityEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_AggregationTemporality.V], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ...
AGGREGATION_TEMPORALITY_UNSPECIFIED = AggregationTemporality.V(0)
+ """UNSPECIFIED is the default AggregationTemporality, it MUST not be used."""
+
AGGREGATION_TEMPORALITY_DELTA = AggregationTemporality.V(1)
+ """DELTA is an AggregationTemporality for a metric aggregator which reports
+ changes since last report time. Successive metrics contain aggregation of
+ values from continuous and non-overlapping intervals.
+
+ The values for a DELTA metric are based only on the time interval
+ associated with one measurement cycle. There is no dependency on
+ previous measurements like is the case for CUMULATIVE metrics.
+
+ For example, consider a system measuring the number of requests that
+ it receives and reports the sum of these requests every second as a
+ DELTA metric:
+
+ 1. The system starts receiving at time=t_0.
+ 2. A request is received, the system measures 1 request.
+ 3. A request is received, the system measures 1 request.
+ 4. A request is received, the system measures 1 request.
+ 5. The 1 second collection cycle ends. A metric is exported for the
+ number of requests received over the interval of time t_0 to
+ t_0+1 with a value of 3.
+ 6. A request is received, the system measures 1 request.
+ 7. A request is received, the system measures 1 request.
+ 8. The 1 second collection cycle ends. A metric is exported for the
+ number of requests received over the interval of time t_0+1 to
+ t_0+2 with a value of 2.
+ """
+
AGGREGATION_TEMPORALITY_CUMULATIVE = AggregationTemporality.V(2)
-class AggregationTemporality(metaclass=_AggregationTemporality):
- V = typing.NewType('V', builtins.int)
+ """CUMULATIVE is an AggregationTemporality for a metric aggregator which
+ reports changes since a fixed start time. This means that current values
+ of a CUMULATIVE metric depend on all previous measurements since the
+ start time. Because of this, the sender is required to retain this state
+ in some form. If this state is lost or invalidated, the CUMULATIVE metric
+ values MUST be reset and a new fixed start time following the last
+ reported measurement time sent MUST be used.
+
+ For example, consider a system measuring the number of requests that
+ it receives and reports the sum of these requests every second as a
+ CUMULATIVE metric:
+
+ 1. The system starts receiving at time=t_0.
+ 2. A request is received, the system measures 1 request.
+ 3. A request is received, the system measures 1 request.
+ 4. A request is received, the system measures 1 request.
+ 5. The 1 second collection cycle ends. A metric is exported for the
+ number of requests received over the interval of time t_0 to
+ t_0+1 with a value of 3.
+ 6. A request is received, the system measures 1 request.
+ 7. A request is received, the system measures 1 request.
+ 8. The 1 second collection cycle ends. A metric is exported for the
+ number of requests received over the interval of time t_0 to
+ t_0+2 with a value of 5.
+ 9. The system experiences a fault and loses state.
+ 10. The system recovers and resumes receiving at time=t_1.
+ 11. A request is received, the system measures 1 request.
+ 12. The 1 second collection cycle ends. A metric is exported for the
+ number of requests received over the interval of time t_1 to
+ t_0+1 with a value of 1.
+
+ Note: Even though, when reporting changes since last report time, using
+ CUMULATIVE is valid, it is not recommended. This may cause problems for
+ systems that do not use start_time to determine when the aggregation
+ value was reset (e.g. Prometheus).
+ """
+
+
AGGREGATION_TEMPORALITY_UNSPECIFIED = AggregationTemporality.V(0)
+"""UNSPECIFIED is the default AggregationTemporality, it MUST not be used."""
+
AGGREGATION_TEMPORALITY_DELTA = AggregationTemporality.V(1)
+"""DELTA is an AggregationTemporality for a metric aggregator which reports
+changes since last report time. Successive metrics contain aggregation of
+values from continuous and non-overlapping intervals.
+
+The values for a DELTA metric are based only on the time interval
+associated with one measurement cycle. There is no dependency on
+previous measurements like is the case for CUMULATIVE metrics.
+
+For example, consider a system measuring the number of requests that
+it receives and reports the sum of these requests every second as a
+DELTA metric:
+
+ 1. The system starts receiving at time=t_0.
+ 2. A request is received, the system measures 1 request.
+ 3. A request is received, the system measures 1 request.
+ 4. A request is received, the system measures 1 request.
+ 5. The 1 second collection cycle ends. A metric is exported for the
+ number of requests received over the interval of time t_0 to
+ t_0+1 with a value of 3.
+ 6. A request is received, the system measures 1 request.
+ 7. A request is received, the system measures 1 request.
+ 8. The 1 second collection cycle ends. A metric is exported for the
+ number of requests received over the interval of time t_0+1 to
+ t_0+2 with a value of 2.
+"""
+
AGGREGATION_TEMPORALITY_CUMULATIVE = AggregationTemporality.V(2)
+"""CUMULATIVE is an AggregationTemporality for a metric aggregator which
+reports changes since a fixed start time. This means that current values
+of a CUMULATIVE metric depend on all previous measurements since the
+start time. Because of this, the sender is required to retain this state
+in some form. If this state is lost or invalidated, the CUMULATIVE metric
+values MUST be reset and a new fixed start time following the last
+reported measurement time sent MUST be used.
+
+For example, consider a system measuring the number of requests that
+it receives and reports the sum of these requests every second as a
+CUMULATIVE metric:
+
+ 1. The system starts receiving at time=t_0.
+ 2. A request is received, the system measures 1 request.
+ 3. A request is received, the system measures 1 request.
+ 4. A request is received, the system measures 1 request.
+ 5. The 1 second collection cycle ends. A metric is exported for the
+ number of requests received over the interval of time t_0 to
+ t_0+1 with a value of 3.
+ 6. A request is received, the system measures 1 request.
+ 7. A request is received, the system measures 1 request.
+ 8. The 1 second collection cycle ends. A metric is exported for the
+ number of requests received over the interval of time t_0 to
+ t_0+2 with a value of 5.
+ 9. The system experiences a fault and loses state.
+ 10. The system recovers and resumes receiving at time=t_1.
+ 11. A request is received, the system measures 1 request.
+ 12. The 1 second collection cycle ends. A metric is exported for the
+ number of requests received over the interval of time t_1 to
+ t_0+1 with a value of 1.
+
+Note: Even though, when reporting changes since last report time, using
+CUMULATIVE is valid, it is not recommended. This may cause problems for
+systems that do not use start_time to determine when the aggregation
+value was reset (e.g. Prometheus).
+"""
+
+global___AggregationTemporality = AggregationTemporality
+
class ResourceMetrics(google.protobuf.message.Message):
+ """A collection of InstrumentationLibraryMetrics from a Resource."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
RESOURCE_FIELD_NUMBER: builtins.int
INSTRUMENTATION_LIBRARY_METRICS_FIELD_NUMBER: builtins.int
SCHEMA_URL_FIELD_NUMBER: builtins.int
- schema_url: typing.Text = ...
-
@property
- def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: ...
-
+ def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource:
+ """The resource for the metrics in this message.
+ If this field is not set then no resource info is known.
+ """
+ pass
@property
- def instrumentation_library_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InstrumentationLibraryMetrics]: ...
+ def instrumentation_library_metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InstrumentationLibraryMetrics]:
+ """A list of metrics that originate from a resource."""
+ pass
+ schema_url: typing.Text = ...
+ """This schema_url applies to the data in the "resource" field. It does not apply
+ to the data in the "instrumentation_library_metrics" field which have their own
+ schema_url field.
+ """
def __init__(self,
*,
@@ -45,22 +192,29 @@ class ResourceMetrics(google.protobuf.message.Message):
instrumentation_library_metrics : typing.Optional[typing.Iterable[global___InstrumentationLibraryMetrics]] = ...,
schema_url : typing.Text = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"resource",b"resource"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"instrumentation_library_metrics",b"instrumentation_library_metrics",u"resource",b"resource",u"schema_url",b"schema_url"]) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["resource",b"resource"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["instrumentation_library_metrics",b"instrumentation_library_metrics","resource",b"resource","schema_url",b"schema_url"]) -> None: ...
global___ResourceMetrics = ResourceMetrics
class InstrumentationLibraryMetrics(google.protobuf.message.Message):
+ """A collection of Metrics produced by an InstrumentationLibrary."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
INSTRUMENTATION_LIBRARY_FIELD_NUMBER: builtins.int
METRICS_FIELD_NUMBER: builtins.int
SCHEMA_URL_FIELD_NUMBER: builtins.int
- schema_url: typing.Text = ...
-
@property
- def instrumentation_library(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary: ...
-
+ def instrumentation_library(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary:
+ """The instrumentation library information for the metrics in this message.
+ Semantically when InstrumentationLibrary isn't set, it is equivalent with
+ an empty instrumentation library name (unknown).
+ """
+ pass
@property
- def metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Metric]: ...
+ def metrics(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Metric]:
+ """A list of metrics that originate from an instrumentation library."""
+ pass
+ schema_url: typing.Text = ...
+ """This schema_url applies to all metrics in the "metrics" field."""
def __init__(self,
*,
@@ -68,11 +222,97 @@ class InstrumentationLibraryMetrics(google.protobuf.message.Message):
metrics : typing.Optional[typing.Iterable[global___Metric]] = ...,
schema_url : typing.Text = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"instrumentation_library",b"instrumentation_library"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"instrumentation_library",b"instrumentation_library",u"metrics",b"metrics",u"schema_url",b"schema_url"]) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["instrumentation_library",b"instrumentation_library"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["instrumentation_library",b"instrumentation_library","metrics",b"metrics","schema_url",b"schema_url"]) -> None: ...
global___InstrumentationLibraryMetrics = InstrumentationLibraryMetrics
class Metric(google.protobuf.message.Message):
+ """Defines a Metric which has one or more timeseries. The following is a
+ brief summary of the Metric data model. For more details, see:
+
+ https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/metrics/datamodel.md
+
+
+ The data model and relation between entities is shown in the
+ diagram below. Here, "DataPoint" is the term used to refer to any
+ one of the specific data point value types, and "points" is the term used
+ to refer to any one of the lists of points contained in the Metric.
+
+ - Metric is composed of a metadata and data.
+ - Metadata part contains a name, description, unit.
+ - Data is one of the possible types (Sum, Gauge, Histogram, Summary).
+ - DataPoint contains timestamps, attributes, and one of the possible value type
+ fields.
+
+ Metric
+ +------------+
+ |name |
+ |description |
+ |unit | +------------------------------------+
+ |data |---> |Gauge, Sum, Histogram, Summary, ... |
+ +------------+ +------------------------------------+
+
+ Data [One of Gauge, Sum, Histogram, Summary, ...]
+ +-----------+
+ |... | // Metadata about the Data.
+ |points |--+
+ +-----------+ |
+ | +---------------------------+
+ | |DataPoint 1 |
+ v |+------+------+ +------+ |
+ +-----+ ||label |label |...|label | |
+ | 1 |-->||value1|value2|...|valueN| |
+ +-----+ |+------+------+ +------+ |
+ | . | |+-----+ |
+ | . | ||value| |
+ | . | |+-----+ |
+ | . | +---------------------------+
+ | . | .
+ | . | .
+ | . | .
+ | . | +---------------------------+
+ | . | |DataPoint M |
+ +-----+ |+------+------+ +------+ |
+ | M |-->||label |label |...|label | |
+ +-----+ ||value1|value2|...|valueN| |
+ |+------+------+ +------+ |
+ |+-----+ |
+ ||value| |
+ |+-----+ |
+ +---------------------------+
+
+ Each distinct type of DataPoint represents the output of a specific
+ aggregation function, the result of applying the DataPoint's
+ associated function of to one or more measurements.
+
+ All DataPoint types have three common fields:
+ - Attributes includes key-value pairs associated with the data point
+ - TimeUnixNano is required, set to the end time of the aggregation
+ - StartTimeUnixNano is optional, but strongly encouraged for DataPoints
+ having an AggregationTemporality field, as discussed below.
+
+ Both TimeUnixNano and StartTimeUnixNano values are expressed as
+ UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
+
+ # TimeUnixNano
+
+ This field is required, having consistent interpretation across
+ DataPoint types. TimeUnixNano is the moment corresponding to when
+ the data point's aggregate value was captured.
+
+ Data points with the 0 value for TimeUnixNano SHOULD be rejected
+ by consumers.
+
+ # StartTimeUnixNano
+
+ StartTimeUnixNano in general allows detecting when a sequence of
+ observations is unbroken. This field indicates to consumers the
+ start time for points with cumulative and delta
+ AggregationTemporality, and it should be included whenever possible
+ to support correct rate calculation. Although it may be omitted
+ when the start time is truly unknown, setting StartTimeUnixNano is
+ strongly encouraged.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
NAME_FIELD_NUMBER: builtins.int
DESCRIPTION_FIELD_NUMBER: builtins.int
@@ -85,30 +325,51 @@ class Metric(google.protobuf.message.Message):
HISTOGRAM_FIELD_NUMBER: builtins.int
SUMMARY_FIELD_NUMBER: builtins.int
name: typing.Text = ...
+ """name of the metric, including its DNS name prefix. It must be unique."""
+
description: typing.Text = ...
+ """description of the metric, which can be used in documentation."""
+
unit: typing.Text = ...
+ """unit in which the metric value is reported. Follows the format
+ described by http://unitsofmeasure.org/ucum.html.
+ """
@property
- def int_gauge(self) -> global___IntGauge: ...
-
+ def int_gauge(self) -> global___IntGauge:
+ """IntGauge and IntSum are deprecated and will be removed soon.
+ 1. Old senders and receivers that are not aware of this change will
+ continue using the `int_gauge` and `int_sum` fields.
+ 2. New senders, which are aware of this change MUST send only `gauge`
+ and `sum` fields.
+ 3. New receivers, which are aware of this change MUST convert these into
+ `gauge` and `sum` by using the provided as_int field in the oneof values.
+ This field will be removed in ~3 months, on July 1, 2021.
+ """
+ pass
@property
def gauge(self) -> global___Gauge: ...
-
@property
- def int_sum(self) -> global___IntSum: ...
-
+ def int_sum(self) -> global___IntSum:
+ """This field will be removed in ~3 months, on July 1, 2021."""
+ pass
@property
def sum(self) -> global___Sum: ...
-
@property
- def int_histogram(self) -> global___IntHistogram: ...
-
+ def int_histogram(self) -> global___IntHistogram:
+ """IntHistogram is deprecated and will be removed soon.
+ 1. Old senders and receivers that are not aware of this change will
+ continue using the `int_histogram` field.
+ 2. New senders, which are aware of this change MUST send only `histogram`.
+ 3. New receivers, which are aware of this change MUST convert this into
+ `histogram` by simply converting all int64 values into float.
+ This field will be removed in ~3 months, on July 1, 2021.
+ """
+ pass
@property
def histogram(self) -> global___Histogram: ...
-
@property
def summary(self) -> global___Summary: ...
-
def __init__(self,
*,
name : typing.Text = ...,
@@ -122,49 +383,76 @@ class Metric(google.protobuf.message.Message):
histogram : typing.Optional[global___Histogram] = ...,
summary : typing.Optional[global___Summary] = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"data",b"data",u"gauge",b"gauge",u"histogram",b"histogram",u"int_gauge",b"int_gauge",u"int_histogram",b"int_histogram",u"int_sum",b"int_sum",u"sum",b"sum",u"summary",b"summary"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"data",b"data",u"description",b"description",u"gauge",b"gauge",u"histogram",b"histogram",u"int_gauge",b"int_gauge",u"int_histogram",b"int_histogram",u"int_sum",b"int_sum",u"name",b"name",u"sum",b"sum",u"summary",b"summary",u"unit",b"unit"]) -> None: ...
- def WhichOneof(self, oneof_group: typing_extensions.Literal[u"data",b"data"]) -> typing_extensions.Literal["int_gauge","gauge","int_sum","sum","int_histogram","histogram","summary"]: ...
+ def HasField(self, field_name: typing_extensions.Literal["data",b"data","gauge",b"gauge","histogram",b"histogram","int_gauge",b"int_gauge","int_histogram",b"int_histogram","int_sum",b"int_sum","sum",b"sum","summary",b"summary"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["data",b"data","description",b"description","gauge",b"gauge","histogram",b"histogram","int_gauge",b"int_gauge","int_histogram",b"int_histogram","int_sum",b"int_sum","name",b"name","sum",b"sum","summary",b"summary","unit",b"unit"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["data",b"data"]) -> typing.Optional[typing_extensions.Literal["int_gauge","gauge","int_sum","sum","int_histogram","histogram","summary"]]: ...
global___Metric = Metric
class IntGauge(google.protobuf.message.Message):
+ """IntGauge is deprecated. Use Gauge with an integer value in NumberDataPoint.
+
+ IntGauge represents the type of a int scalar metric that always exports the
+ "current value" for every data point. It should be used for an "unknown"
+ aggregation.
+
+ A Gauge does not support different aggregation temporalities. Given the
+ aggregation is unknown, points cannot be combined using the same
+ aggregation, regardless of aggregation temporalities. Therefore,
+ AggregationTemporality is not included. Consequently, this also means
+ "StartTimeUnixNano" is ignored for all data points.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
DATA_POINTS_FIELD_NUMBER: builtins.int
-
@property
def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IntDataPoint]: ...
-
def __init__(self,
*,
data_points : typing.Optional[typing.Iterable[global___IntDataPoint]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"data_points",b"data_points"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["data_points",b"data_points"]) -> None: ...
global___IntGauge = IntGauge
class Gauge(google.protobuf.message.Message):
+ """Gauge represents the type of a double scalar metric that always exports the
+ "current value" for every data point. It should be used for an "unknown"
+ aggregation.
+
+ A Gauge does not support different aggregation temporalities. Given the
+ aggregation is unknown, points cannot be combined using the same
+ aggregation, regardless of aggregation temporalities. Therefore,
+ AggregationTemporality is not included. Consequently, this also means
+ "StartTimeUnixNano" is ignored for all data points.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
DATA_POINTS_FIELD_NUMBER: builtins.int
-
@property
def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NumberDataPoint]: ...
-
def __init__(self,
*,
data_points : typing.Optional[typing.Iterable[global___NumberDataPoint]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"data_points",b"data_points"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["data_points",b"data_points"]) -> None: ...
global___Gauge = Gauge
class IntSum(google.protobuf.message.Message):
+ """IntSum is deprecated. Use Sum with an integer value in NumberDataPoint.
+
+ IntSum represents the type of a numeric int scalar metric that is calculated as
+ a sum of all reported measurements over a time interval.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
DATA_POINTS_FIELD_NUMBER: builtins.int
AGGREGATION_TEMPORALITY_FIELD_NUMBER: builtins.int
IS_MONOTONIC_FIELD_NUMBER: builtins.int
- aggregation_temporality: global___AggregationTemporality.V = ...
- is_monotonic: builtins.bool = ...
-
@property
def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IntDataPoint]: ...
+ aggregation_temporality: global___AggregationTemporality.V = ...
+ """aggregation_temporality describes if the aggregator reports delta changes
+ since last report time, or cumulative changes since a fixed start time.
+ """
+
+ is_monotonic: builtins.bool = ...
+ """If "true" means that the sum is monotonic."""
def __init__(self,
*,
@@ -172,19 +460,26 @@ class IntSum(google.protobuf.message.Message):
aggregation_temporality : global___AggregationTemporality.V = ...,
is_monotonic : builtins.bool = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"aggregation_temporality",b"aggregation_temporality",u"data_points",b"data_points",u"is_monotonic",b"is_monotonic"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["aggregation_temporality",b"aggregation_temporality","data_points",b"data_points","is_monotonic",b"is_monotonic"]) -> None: ...
global___IntSum = IntSum
class Sum(google.protobuf.message.Message):
+ """Sum represents the type of a numeric double scalar metric that is calculated
+ as a sum of all reported measurements over a time interval.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
DATA_POINTS_FIELD_NUMBER: builtins.int
AGGREGATION_TEMPORALITY_FIELD_NUMBER: builtins.int
IS_MONOTONIC_FIELD_NUMBER: builtins.int
- aggregation_temporality: global___AggregationTemporality.V = ...
- is_monotonic: builtins.bool = ...
-
@property
def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___NumberDataPoint]: ...
+ aggregation_temporality: global___AggregationTemporality.V = ...
+ """aggregation_temporality describes if the aggregator reports delta changes
+ since last report time, or cumulative changes since a fixed start time.
+ """
+
+ is_monotonic: builtins.bool = ...
+ """If "true" means that the sum is monotonic."""
def __init__(self,
*,
@@ -192,74 +487,113 @@ class Sum(google.protobuf.message.Message):
aggregation_temporality : global___AggregationTemporality.V = ...,
is_monotonic : builtins.bool = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"aggregation_temporality",b"aggregation_temporality",u"data_points",b"data_points",u"is_monotonic",b"is_monotonic"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["aggregation_temporality",b"aggregation_temporality","data_points",b"data_points","is_monotonic",b"is_monotonic"]) -> None: ...
global___Sum = Sum
class IntHistogram(google.protobuf.message.Message):
+ """IntHistogram is deprecated, replaced by Histogram points using double-
+ valued exemplars.
+
+ This represents the type of a metric that is calculated by aggregating as a
+ Histogram of all reported int measurements over a time interval.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
DATA_POINTS_FIELD_NUMBER: builtins.int
AGGREGATION_TEMPORALITY_FIELD_NUMBER: builtins.int
- aggregation_temporality: global___AggregationTemporality.V = ...
-
@property
def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IntHistogramDataPoint]: ...
+ aggregation_temporality: global___AggregationTemporality.V = ...
+ """aggregation_temporality describes if the aggregator reports delta changes
+ since last report time, or cumulative changes since a fixed start time.
+ """
def __init__(self,
*,
data_points : typing.Optional[typing.Iterable[global___IntHistogramDataPoint]] = ...,
aggregation_temporality : global___AggregationTemporality.V = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"aggregation_temporality",b"aggregation_temporality",u"data_points",b"data_points"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["aggregation_temporality",b"aggregation_temporality","data_points",b"data_points"]) -> None: ...
global___IntHistogram = IntHistogram
class Histogram(google.protobuf.message.Message):
+ """Histogram represents the type of a metric that is calculated by aggregating
+ as a Histogram of all reported double measurements over a time interval.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
DATA_POINTS_FIELD_NUMBER: builtins.int
AGGREGATION_TEMPORALITY_FIELD_NUMBER: builtins.int
- aggregation_temporality: global___AggregationTemporality.V = ...
-
@property
def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___HistogramDataPoint]: ...
+ aggregation_temporality: global___AggregationTemporality.V = ...
+ """aggregation_temporality describes if the aggregator reports delta changes
+ since last report time, or cumulative changes since a fixed start time.
+ """
def __init__(self,
*,
data_points : typing.Optional[typing.Iterable[global___HistogramDataPoint]] = ...,
aggregation_temporality : global___AggregationTemporality.V = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"aggregation_temporality",b"aggregation_temporality",u"data_points",b"data_points"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["aggregation_temporality",b"aggregation_temporality","data_points",b"data_points"]) -> None: ...
global___Histogram = Histogram
class Summary(google.protobuf.message.Message):
+ """Summary metric data are used to convey quantile summaries,
+ a Prometheus (see: https://prometheus.io/docs/concepts/metric_types/#summary)
+ and OpenMetrics (see: https://github.com/OpenObservability/OpenMetrics/blob/4dbf6075567ab43296eed941037c12951faafb92/protos/prometheus.proto#L45)
+ data type. These data points cannot always be merged in a meaningful way.
+ While they can be useful in some applications, histogram data points are
+ recommended for new applications.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
DATA_POINTS_FIELD_NUMBER: builtins.int
-
@property
def data_points(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SummaryDataPoint]: ...
-
def __init__(self,
*,
data_points : typing.Optional[typing.Iterable[global___SummaryDataPoint]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"data_points",b"data_points"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["data_points",b"data_points"]) -> None: ...
global___Summary = Summary
class IntDataPoint(google.protobuf.message.Message):
+ """IntDataPoint is a single data point in a timeseries that describes the
+ time-varying values of a int64 metric.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
LABELS_FIELD_NUMBER: builtins.int
START_TIME_UNIX_NANO_FIELD_NUMBER: builtins.int
TIME_UNIX_NANO_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
EXEMPLARS_FIELD_NUMBER: builtins.int
+ @property
+ def labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]:
+ """The set of labels that uniquely identify this timeseries."""
+ pass
start_time_unix_nano: builtins.int = ...
+ """StartTimeUnixNano is optional but strongly encouraged, see the
+ the detiled comments above Metric.
+
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
+ 1970.
+ """
+
time_unix_nano: builtins.int = ...
- value: builtins.int = ...
+ """TimeUnixNano is required, see the detailed comments above Metric.
- @property
- def labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]: ...
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
+ 1970.
+ """
- @property
- def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IntExemplar]: ...
+ value: builtins.int = ...
+ """value itself."""
+ @property
+ def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IntExemplar]:
+ """(Optional) List of exemplars collected from
+ measurements that were used to form the data point
+ """
+ pass
def __init__(self,
*,
labels : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]] = ...,
@@ -268,10 +602,13 @@ class IntDataPoint(google.protobuf.message.Message):
value : builtins.int = ...,
exemplars : typing.Optional[typing.Iterable[global___IntExemplar]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"exemplars",b"exemplars",u"labels",b"labels",u"start_time_unix_nano",b"start_time_unix_nano",u"time_unix_nano",b"time_unix_nano",u"value",b"value"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["exemplars",b"exemplars","labels",b"labels","start_time_unix_nano",b"start_time_unix_nano","time_unix_nano",b"time_unix_nano","value",b"value"]) -> None: ...
global___IntDataPoint = IntDataPoint
class NumberDataPoint(google.protobuf.message.Message):
+ """NumberDataPoint is a single data point in a timeseries that describes the
+ time-varying value of a double metric.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
ATTRIBUTES_FIELD_NUMBER: builtins.int
LABELS_FIELD_NUMBER: builtins.int
@@ -280,20 +617,47 @@ class NumberDataPoint(google.protobuf.message.Message):
AS_DOUBLE_FIELD_NUMBER: builtins.int
AS_INT_FIELD_NUMBER: builtins.int
EXEMPLARS_FIELD_NUMBER: builtins.int
+ @property
+ def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
+ """The set of key/value pairs that uniquely identify the timeseries from
+ where this point belongs. The list may be empty (may contain 0 elements).
+ """
+ pass
+ @property
+ def labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]:
+ """Labels is deprecated and will be removed soon.
+ 1. Old senders and receivers that are not aware of this change will
+ continue using the `labels` field.
+ 2. New senders, which are aware of this change MUST send only `attributes`.
+ 3. New receivers, which are aware of this change MUST convert this into
+ `labels` by simply converting all int64 values into float.
+
+ This field will be removed in ~3 months, on July 1, 2021.
+ """
+ pass
start_time_unix_nano: builtins.int = ...
- time_unix_nano: builtins.int = ...
- as_double: builtins.float = ...
- as_int: builtins.int = ...
+ """StartTimeUnixNano is optional but strongly encouraged, see the
+ the detiled comments above Metric.
- @property
- def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: ...
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
+ 1970.
+ """
- @property
- def labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]: ...
+ time_unix_nano: builtins.int = ...
+ """TimeUnixNano is required, see the detailed comments above Metric.
- @property
- def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Exemplar]: ...
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
+ 1970.
+ """
+ as_double: builtins.float = ...
+ as_int: builtins.int = ...
+ @property
+ def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Exemplar]:
+ """(Optional) List of exemplars collected from
+ measurements that were used to form the data point
+ """
+ pass
def __init__(self,
*,
attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ...,
@@ -304,12 +668,25 @@ class NumberDataPoint(google.protobuf.message.Message):
as_int : builtins.int = ...,
exemplars : typing.Optional[typing.Iterable[global___Exemplar]] = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"as_double",b"as_double",u"as_int",b"as_int",u"value",b"value"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"as_double",b"as_double",u"as_int",b"as_int",u"attributes",b"attributes",u"exemplars",b"exemplars",u"labels",b"labels",u"start_time_unix_nano",b"start_time_unix_nano",u"time_unix_nano",b"time_unix_nano",u"value",b"value"]) -> None: ...
- def WhichOneof(self, oneof_group: typing_extensions.Literal[u"value",b"value"]) -> typing_extensions.Literal["as_double","as_int"]: ...
+ def HasField(self, field_name: typing_extensions.Literal["as_double",b"as_double","as_int",b"as_int","value",b"value"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["as_double",b"as_double","as_int",b"as_int","attributes",b"attributes","exemplars",b"exemplars","labels",b"labels","start_time_unix_nano",b"start_time_unix_nano","time_unix_nano",b"time_unix_nano","value",b"value"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["value",b"value"]) -> typing.Optional[typing_extensions.Literal["as_double","as_int"]]: ...
global___NumberDataPoint = NumberDataPoint
class IntHistogramDataPoint(google.protobuf.message.Message):
+ """IntHistogramDataPoint is deprecated; use HistogramDataPoint.
+
+ This is a single data point in a timeseries that describes
+ the time-varying values of a Histogram of int values. A Histogram contains
+ summary statistics for a population of values, it may optionally contain
+ the distribution of those values across a set of buckets.
+
+ If the histogram contains the distribution of values, then both
+ "explicit_bounds" and "bucket counts" fields must be defined.
+ If the histogram does not contain the distribution of values, then both
+ "explicit_bounds" and "bucket_counts" must be omitted and only "count" and
+ "sum" are known.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
LABELS_FIELD_NUMBER: builtins.int
START_TIME_UNIX_NANO_FIELD_NUMBER: builtins.int
@@ -319,19 +696,72 @@ class IntHistogramDataPoint(google.protobuf.message.Message):
BUCKET_COUNTS_FIELD_NUMBER: builtins.int
EXPLICIT_BOUNDS_FIELD_NUMBER: builtins.int
EXEMPLARS_FIELD_NUMBER: builtins.int
+ @property
+ def labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]:
+ """The set of labels that uniquely identify this timeseries."""
+ pass
start_time_unix_nano: builtins.int = ...
+ """StartTimeUnixNano is optional but strongly encouraged, see the
+ the detiled comments above Metric.
+
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
+ 1970.
+ """
+
time_unix_nano: builtins.int = ...
+ """TimeUnixNano is required, see the detailed comments above Metric.
+
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
+ 1970.
+ """
+
count: builtins.int = ...
+ """count is the number of values in the population. Must be non-negative. This
+ value must be equal to the sum of the "count" fields in buckets if a
+ histogram is provided.
+ """
+
sum: builtins.int = ...
- bucket_counts: google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int] = ...
- explicit_bounds: google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float] = ...
+ """sum of the values in the population. If count is zero then this field
+ must be zero. This value must be equal to the sum of the "sum" fields in
+ buckets if a histogram is provided.
+ """
@property
- def labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]: ...
+ def bucket_counts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
+ """bucket_counts is an optional field contains the count values of histogram
+ for each bucket.
+ The sum of the bucket_counts must equal the value in the count field.
+
+ The number of elements in bucket_counts array must be by one greater than
+ the number of elements in explicit_bounds array.
+ """
+ pass
@property
- def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IntExemplar]: ...
+ def explicit_bounds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]:
+ """explicit_bounds specifies buckets with explicitly defined bounds for values.
+
+ This defines size(explicit_bounds) + 1 (= N) buckets. The boundaries for
+ bucket at index i are:
+
+ (-infinity, explicit_bounds[i]] for i == 0
+ (explicit_bounds[i-1], explicit_bounds[i]] for 0 < i < N-1
+ (explicit_bounds[i], +infinity) for i == N-1
+
+ The values in the explicit_bounds array must be strictly increasing.
+ Histogram buckets are inclusive of their upper boundary, except the last
+ bucket where the boundary is at infinity. This format is intentionally
+ compatible with the OpenMetrics histogram definition.
+ """
+ pass
+ @property
+ def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___IntExemplar]:
+ """(Optional) List of exemplars collected from
+ measurements that were used to form the data point
+ """
+ pass
def __init__(self,
*,
labels : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]] = ...,
@@ -343,10 +773,21 @@ class IntHistogramDataPoint(google.protobuf.message.Message):
explicit_bounds : typing.Optional[typing.Iterable[builtins.float]] = ...,
exemplars : typing.Optional[typing.Iterable[global___IntExemplar]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"bucket_counts",b"bucket_counts",u"count",b"count",u"exemplars",b"exemplars",u"explicit_bounds",b"explicit_bounds",u"labels",b"labels",u"start_time_unix_nano",b"start_time_unix_nano",u"sum",b"sum",u"time_unix_nano",b"time_unix_nano"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["bucket_counts",b"bucket_counts","count",b"count","exemplars",b"exemplars","explicit_bounds",b"explicit_bounds","labels",b"labels","start_time_unix_nano",b"start_time_unix_nano","sum",b"sum","time_unix_nano",b"time_unix_nano"]) -> None: ...
global___IntHistogramDataPoint = IntHistogramDataPoint
class HistogramDataPoint(google.protobuf.message.Message):
+ """HistogramDataPoint is a single data point in a timeseries that describes the
+ time-varying values of a Histogram of double values. A Histogram contains
+ summary statistics for a population of values, it may optionally contain the
+ distribution of those values across a set of buckets.
+
+ If the histogram contains the distribution of values, then both
+ "explicit_bounds" and "bucket counts" fields must be defined.
+ If the histogram does not contain the distribution of values, then both
+ "explicit_bounds" and "bucket_counts" must be omitted and only "count" and
+ "sum" are known.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
ATTRIBUTES_FIELD_NUMBER: builtins.int
LABELS_FIELD_NUMBER: builtins.int
@@ -357,22 +798,92 @@ class HistogramDataPoint(google.protobuf.message.Message):
BUCKET_COUNTS_FIELD_NUMBER: builtins.int
EXPLICIT_BOUNDS_FIELD_NUMBER: builtins.int
EXEMPLARS_FIELD_NUMBER: builtins.int
+ @property
+ def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
+ """The set of key/value pairs that uniquely identify the timeseries from
+ where this point belongs. The list may be empty (may contain 0 elements).
+ """
+ pass
+ @property
+ def labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]:
+ """Labels is deprecated and will be removed soon.
+ 1. Old senders and receivers that are not aware of this change will
+ continue using the `labels` field.
+ 2. New senders, which are aware of this change MUST send only `attributes`.
+ 3. New receivers, which are aware of this change MUST convert this into
+ `labels` by simply converting all int64 values into float.
+
+ This field will be removed in ~3 months, on July 1, 2021.
+ """
+ pass
start_time_unix_nano: builtins.int = ...
+ """StartTimeUnixNano is optional but strongly encouraged, see the
+ the detiled comments above Metric.
+
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
+ 1970.
+ """
+
time_unix_nano: builtins.int = ...
+ """TimeUnixNano is required, see the detailed comments above Metric.
+
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
+ 1970.
+ """
+
count: builtins.int = ...
+ """count is the number of values in the population. Must be non-negative. This
+ value must be equal to the sum of the "count" fields in buckets if a
+ histogram is provided.
+ """
+
sum: builtins.float = ...
- bucket_counts: google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int] = ...
- explicit_bounds: google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float] = ...
+ """sum of the values in the population. If count is zero then this field
+ must be zero. This value must be equal to the sum of the "sum" fields in
+ buckets if a histogram is provided.
- @property
- def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: ...
+ Note: Sum should only be filled out when measuring non-negative discrete
+ events, and is assumed to be monotonic over the values of these events.
+ Negative events *can* be recorded, but sum should not be filled out when
+ doing so. This is specifically to enforce compatibility w/ OpenMetrics,
+ see: https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#histogram
+ """
@property
- def labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]: ...
+ def bucket_counts(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]:
+ """bucket_counts is an optional field contains the count values of histogram
+ for each bucket.
+
+ The sum of the bucket_counts must equal the value in the count field.
+ The number of elements in bucket_counts array must be by one greater than
+ the number of elements in explicit_bounds array.
+ """
+ pass
@property
- def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Exemplar]: ...
+ def explicit_bounds(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]:
+ """explicit_bounds specifies buckets with explicitly defined bounds for values.
+
+ This defines size(explicit_bounds) + 1 (= N) buckets. The boundaries for
+ bucket at index i are:
+
+ (-infinity, explicit_bounds[i]] for i == 0
+ (explicit_bounds[i-1], explicit_bounds[i]] for 0 < i < N-1
+ (explicit_bounds[i], +infinity) for i == N-1
+ The values in the explicit_bounds array must be strictly increasing.
+
+ Histogram buckets are inclusive of their upper boundary, except the last
+ bucket where the boundary is at infinity. This format is intentionally
+ compatible with the OpenMetrics histogram definition.
+ """
+ pass
+ @property
+ def exemplars(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Exemplar]:
+ """(Optional) List of exemplars collected from
+ measurements that were used to form the data point
+ """
+ pass
def __init__(self,
*,
attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ...,
@@ -385,24 +896,44 @@ class HistogramDataPoint(google.protobuf.message.Message):
explicit_bounds : typing.Optional[typing.Iterable[builtins.float]] = ...,
exemplars : typing.Optional[typing.Iterable[global___Exemplar]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"attributes",b"attributes",u"bucket_counts",b"bucket_counts",u"count",b"count",u"exemplars",b"exemplars",u"explicit_bounds",b"explicit_bounds",u"labels",b"labels",u"start_time_unix_nano",b"start_time_unix_nano",u"sum",b"sum",u"time_unix_nano",b"time_unix_nano"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","bucket_counts",b"bucket_counts","count",b"count","exemplars",b"exemplars","explicit_bounds",b"explicit_bounds","labels",b"labels","start_time_unix_nano",b"start_time_unix_nano","sum",b"sum","time_unix_nano",b"time_unix_nano"]) -> None: ...
global___HistogramDataPoint = HistogramDataPoint
class SummaryDataPoint(google.protobuf.message.Message):
+ """SummaryDataPoint is a single data point in a timeseries that describes the
+ time-varying values of a Summary metric.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
class ValueAtQuantile(google.protobuf.message.Message):
+ """Represents the value at a given quantile of a distribution.
+
+ To record Min and Max values following conventions are used:
+ - The 1.0 quantile is equivalent to the maximum value observed.
+ - The 0.0 quantile is equivalent to the minimum value observed.
+
+ See the following issue for more context:
+ https://github.com/open-telemetry/opentelemetry-proto/issues/125
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
QUANTILE_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
quantile: builtins.float = ...
+ """The quantile of a distribution. Must be in the interval
+ [0.0, 1.0].
+ """
+
value: builtins.float = ...
+ """The value at the given quantile of a distribution.
+
+ Quantile values must NOT be negative.
+ """
def __init__(self,
*,
quantile : builtins.float = ...,
value : builtins.float = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"quantile",b"quantile",u"value",b"value"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["quantile",b"quantile","value",b"value"]) -> None: ...
ATTRIBUTES_FIELD_NUMBER: builtins.int
LABELS_FIELD_NUMBER: builtins.int
@@ -411,20 +942,59 @@ class SummaryDataPoint(google.protobuf.message.Message):
COUNT_FIELD_NUMBER: builtins.int
SUM_FIELD_NUMBER: builtins.int
QUANTILE_VALUES_FIELD_NUMBER: builtins.int
+ @property
+ def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
+ """The set of key/value pairs that uniquely identify the timeseries from
+ where this point belongs. The list may be empty (may contain 0 elements).
+ """
+ pass
+ @property
+ def labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]:
+ """Labels is deprecated and will be removed soon.
+ 1. Old senders and receivers that are not aware of this change will
+ continue using the `labels` field.
+ 2. New senders, which are aware of this change MUST send only `attributes`.
+ 3. New receivers, which are aware of this change MUST convert this into
+ `labels` by simply converting all int64 values into float.
+
+ This field will be removed in ~3 months, on July 1, 2021.
+ """
+ pass
start_time_unix_nano: builtins.int = ...
+ """StartTimeUnixNano is optional but strongly encouraged, see the
+ the detiled comments above Metric.
+
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
+ 1970.
+ """
+
time_unix_nano: builtins.int = ...
+ """TimeUnixNano is required, see the detailed comments above Metric.
+
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
+ 1970.
+ """
+
count: builtins.int = ...
- sum: builtins.float = ...
+ """count is the number of values in the population. Must be non-negative."""
- @property
- def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: ...
+ sum: builtins.float = ...
+ """sum of the values in the population. If count is zero then this field
+ must be zero.
- @property
- def labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]: ...
+ Note: Sum should only be filled out when measuring non-negative discrete
+ events, and is assumed to be monotonic over the values of these events.
+ Negative events *can* be recorded, but sum should not be filled out when
+ doing so. This is specifically to enforce compatibility w/ OpenMetrics,
+ see: https://github.com/OpenObservability/OpenMetrics/blob/main/specification/OpenMetrics.md#summary
+ """
@property
- def quantile_values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SummaryDataPoint.ValueAtQuantile]: ...
-
+ def quantile_values(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___SummaryDataPoint.ValueAtQuantile]:
+ """(Optional) list of values at different quantiles of the distribution calculated
+ from the current snapshot. The quantiles must be strictly increasing.
+ """
+ pass
def __init__(self,
*,
attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ...,
@@ -435,23 +1005,49 @@ class SummaryDataPoint(google.protobuf.message.Message):
sum : builtins.float = ...,
quantile_values : typing.Optional[typing.Iterable[global___SummaryDataPoint.ValueAtQuantile]] = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"attributes",b"attributes",u"count",b"count",u"labels",b"labels",u"quantile_values",b"quantile_values",u"start_time_unix_nano",b"start_time_unix_nano",u"sum",b"sum",u"time_unix_nano",b"time_unix_nano"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","count",b"count","labels",b"labels","quantile_values",b"quantile_values","start_time_unix_nano",b"start_time_unix_nano","sum",b"sum","time_unix_nano",b"time_unix_nano"]) -> None: ...
global___SummaryDataPoint = SummaryDataPoint
class IntExemplar(google.protobuf.message.Message):
+ """A representation of an exemplar, which is a sample input int measurement.
+ Exemplars also hold information about the environment when the measurement
+ was recorded, for example the span and trace ID of the active span when the
+ exemplar was recorded.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
FILTERED_LABELS_FIELD_NUMBER: builtins.int
TIME_UNIX_NANO_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
SPAN_ID_FIELD_NUMBER: builtins.int
TRACE_ID_FIELD_NUMBER: builtins.int
+ @property
+ def filtered_labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]:
+ """The set of labels that were filtered out by the aggregator, but recorded
+ alongside the original measurement. Only labels that were filtered out
+ by the aggregator should be included
+ """
+ pass
time_unix_nano: builtins.int = ...
+ """time_unix_nano is the exact time when this exemplar was recorded
+
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
+ 1970.
+ """
+
value: builtins.int = ...
+ """Numerical int value of the measurement that was recorded."""
+
span_id: builtins.bytes = ...
- trace_id: builtins.bytes = ...
+ """(Optional) Span ID of the exemplar trace.
+ span_id may be missing if the measurement is not recorded inside a trace
+ or if the trace is not sampled.
+ """
- @property
- def filtered_labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]: ...
+ trace_id: builtins.bytes = ...
+ """(Optional) Trace ID of the exemplar trace.
+ trace_id may be missing if the measurement is not recorded inside a trace
+ or if the trace is not sampled.
+ """
def __init__(self,
*,
@@ -461,10 +1057,15 @@ class IntExemplar(google.protobuf.message.Message):
span_id : builtins.bytes = ...,
trace_id : builtins.bytes = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"filtered_labels",b"filtered_labels",u"span_id",b"span_id",u"time_unix_nano",b"time_unix_nano",u"trace_id",b"trace_id",u"value",b"value"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["filtered_labels",b"filtered_labels","span_id",b"span_id","time_unix_nano",b"time_unix_nano","trace_id",b"trace_id","value",b"value"]) -> None: ...
global___IntExemplar = IntExemplar
class Exemplar(google.protobuf.message.Message):
+ """A representation of an exemplar, which is a sample input measurement.
+ Exemplars also hold information about the environment when the measurement
+ was recorded, for example the span and trace ID of the active span when the
+ exemplar was recorded.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
FILTERED_ATTRIBUTES_FIELD_NUMBER: builtins.int
FILTERED_LABELS_FIELD_NUMBER: builtins.int
@@ -473,17 +1074,46 @@ class Exemplar(google.protobuf.message.Message):
AS_INT_FIELD_NUMBER: builtins.int
SPAN_ID_FIELD_NUMBER: builtins.int
TRACE_ID_FIELD_NUMBER: builtins.int
+ @property
+ def filtered_attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
+ """The set of key/value pairs that were filtered out by the aggregator, but
+ recorded alongside the original measurement. Only key/value pairs that were
+ filtered out by the aggregator should be included
+ """
+ pass
+ @property
+ def filtered_labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]:
+ """Labels is deprecated and will be removed soon.
+ 1. Old senders and receivers that are not aware of this change will
+ continue using the `filtered_labels` field.
+ 2. New senders, which are aware of this change MUST send only
+ `filtered_attributes`.
+ 3. New receivers, which are aware of this change MUST convert this into
+ `filtered_labels` by simply converting all int64 values into float.
+
+ This field will be removed in ~3 months, on July 1, 2021.
+ """
+ pass
time_unix_nano: builtins.int = ...
+ """time_unix_nano is the exact time when this exemplar was recorded
+
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January
+ 1970.
+ """
+
as_double: builtins.float = ...
as_int: builtins.int = ...
span_id: builtins.bytes = ...
- trace_id: builtins.bytes = ...
+ """(Optional) Span ID of the exemplar trace.
+ span_id may be missing if the measurement is not recorded inside a trace
+ or if the trace is not sampled.
+ """
- @property
- def filtered_attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: ...
-
- @property
- def filtered_labels(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.StringKeyValue]: ...
+ trace_id: builtins.bytes = ...
+ """(Optional) Trace ID of the exemplar trace.
+ trace_id may be missing if the measurement is not recorded inside a trace
+ or if the trace is not sampled.
+ """
def __init__(self,
*,
@@ -495,7 +1125,7 @@ class Exemplar(google.protobuf.message.Message):
span_id : builtins.bytes = ...,
trace_id : builtins.bytes = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"as_double",b"as_double",u"as_int",b"as_int",u"value",b"value"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"as_double",b"as_double",u"as_int",b"as_int",u"filtered_attributes",b"filtered_attributes",u"filtered_labels",b"filtered_labels",u"span_id",b"span_id",u"time_unix_nano",b"time_unix_nano",u"trace_id",b"trace_id",u"value",b"value"]) -> None: ...
- def WhichOneof(self, oneof_group: typing_extensions.Literal[u"value",b"value"]) -> typing_extensions.Literal["as_double","as_int"]: ...
+ def HasField(self, field_name: typing_extensions.Literal["as_double",b"as_double","as_int",b"as_int","value",b"value"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["as_double",b"as_double","as_int",b"as_int","filtered_attributes",b"filtered_attributes","filtered_labels",b"filtered_labels","span_id",b"span_id","time_unix_nano",b"time_unix_nano","trace_id",b"trace_id","value",b"value"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["value",b"value"]) -> typing.Optional[typing_extensions.Literal["as_double","as_int"]]: ...
global___Exemplar = Exemplar
diff --git a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py
index cf7af9fbb8..f64160f602 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/resource/v1/resource.proto
-
+"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -19,6 +19,7 @@
package='opentelemetry.proto.resource.v1',
syntax='proto3',
serialized_options=b'\n\"io.opentelemetry.proto.resource.v1B\rResourceProtoP\001Z@github.com/open-telemetry/opentelemetry-proto/gen/go/resource/v1',
+ create_key=_descriptor._internal_create_key,
serialized_pb=b'\n.opentelemetry/proto/resource/v1/resource.proto\x12\x1fopentelemetry.proto.resource.v1\x1a*opentelemetry/proto/common/v1/common.proto\"i\n\x08Resource\x12;\n\nattributes\x18\x01 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x02 \x01(\rBw\n\"io.opentelemetry.proto.resource.v1B\rResourceProtoP\x01Z@github.com/open-telemetry/opentelemetry-proto/gen/go/resource/v1b\x06proto3'
,
dependencies=[opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2.DESCRIPTOR,])
@@ -32,6 +33,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='attributes', full_name='opentelemetry.proto.resource.v1.Resource.attributes', index=0,
@@ -39,14 +41,14 @@
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dropped_attributes_count', full_name='opentelemetry.proto.resource.v1.Resource.dropped_attributes_count', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
diff --git a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.pyi
index c2c5881753..957a7b6d1b 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.pyi
+++ b/opentelemetry-proto/src/opentelemetry/proto/resource/v1/resource_pb2.pyi
@@ -13,18 +13,23 @@ import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
class Resource(google.protobuf.message.Message):
+ """Resource information."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
ATTRIBUTES_FIELD_NUMBER: builtins.int
DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int
- dropped_attributes_count: builtins.int = ...
-
@property
- def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: ...
+ def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
+ """Set of labels that describe the resource."""
+ pass
+ dropped_attributes_count: builtins.int = ...
+ """dropped_attributes_count is the number of dropped attributes. If the value is 0, then
+ no attributes were dropped.
+ """
def __init__(self,
*,
attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ...,
dropped_attributes_count : builtins.int = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"attributes",b"attributes",u"dropped_attributes_count",b"dropped_attributes_count"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","dropped_attributes_count",b"dropped_attributes_count"]) -> None: ...
global___Resource = Resource
diff --git a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.py
index a30df48e2c..99428b3c89 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/trace/v1/trace_config.proto
-
+"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -18,6 +18,7 @@
package='opentelemetry.proto.trace.v1',
syntax='proto3',
serialized_options=b'\n\037io.opentelemetry.proto.trace.v1B\020TraceConfigProtoP\001ZGgitpro.ttaallkk.top/open-telemetry/opentelemetry-proto/gen/go/collector/trace/v1',
+ create_key=_descriptor._internal_create_key,
serialized_pb=b'\n/opentelemetry/proto/trace/v1/trace_config.proto\x12\x1copentelemetry.proto.trace.v1\"\xc8\x03\n\x0bTraceConfig\x12I\n\x10\x63onstant_sampler\x18\x01 \x01(\x0b\x32-.opentelemetry.proto.trace.v1.ConstantSamplerH\x00\x12O\n\x14trace_id_ratio_based\x18\x02 \x01(\x0b\x32/.opentelemetry.proto.trace.v1.TraceIdRatioBasedH\x00\x12R\n\x15rate_limiting_sampler\x18\x03 \x01(\x0b\x32\x31.opentelemetry.proto.trace.v1.RateLimitingSamplerH\x00\x12 \n\x18max_number_of_attributes\x18\x04 \x01(\x03\x12\"\n\x1amax_number_of_timed_events\x18\x05 \x01(\x03\x12\x30\n(max_number_of_attributes_per_timed_event\x18\x06 \x01(\x03\x12\x1b\n\x13max_number_of_links\x18\x07 \x01(\x03\x12)\n!max_number_of_attributes_per_link\x18\x08 \x01(\x03\x42\t\n\x07sampler\"\xa9\x01\n\x0f\x43onstantSampler\x12P\n\x08\x64\x65\x63ision\x18\x01 \x01(\x0e\x32>.opentelemetry.proto.trace.v1.ConstantSampler.ConstantDecision\"D\n\x10\x43onstantDecision\x12\x0e\n\nALWAYS_OFF\x10\x00\x12\r\n\tALWAYS_ON\x10\x01\x12\x11\n\rALWAYS_PARENT\x10\x02\"*\n\x11TraceIdRatioBased\x12\x15\n\rsamplingRatio\x18\x01 \x01(\x01\"\"\n\x13RateLimitingSampler\x12\x0b\n\x03qps\x18\x01 \x01(\x03\x42~\n\x1fio.opentelemetry.proto.trace.v1B\x10TraceConfigProtoP\x01ZGgitpro.ttaallkk.top/open-telemetry/opentelemetry-proto/gen/go/collector/trace/v1b\x06proto3'
)
@@ -28,19 +29,23 @@
full_name='opentelemetry.proto.trace.v1.ConstantSampler.ConstantDecision',
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='ALWAYS_OFF', index=0, number=0,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ALWAYS_ON', index=1, number=1,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ALWAYS_PARENT', index=2, number=2,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
@@ -56,6 +61,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='constant_sampler', full_name='opentelemetry.proto.trace.v1.TraceConfig.constant_sampler', index=0,
@@ -63,56 +69,56 @@
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trace_id_ratio_based', full_name='opentelemetry.proto.trace.v1.TraceConfig.trace_id_ratio_based', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='rate_limiting_sampler', full_name='opentelemetry.proto.trace.v1.TraceConfig.rate_limiting_sampler', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_number_of_attributes', full_name='opentelemetry.proto.trace.v1.TraceConfig.max_number_of_attributes', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_number_of_timed_events', full_name='opentelemetry.proto.trace.v1.TraceConfig.max_number_of_timed_events', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_number_of_attributes_per_timed_event', full_name='opentelemetry.proto.trace.v1.TraceConfig.max_number_of_attributes_per_timed_event', index=5,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_number_of_links', full_name='opentelemetry.proto.trace.v1.TraceConfig.max_number_of_links', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='max_number_of_attributes_per_link', full_name='opentelemetry.proto.trace.v1.TraceConfig.max_number_of_attributes_per_link', index=7,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -126,7 +132,9 @@
oneofs=[
_descriptor.OneofDescriptor(
name='sampler', full_name='opentelemetry.proto.trace.v1.TraceConfig.sampler',
- index=0, containing_type=None, fields=[]),
+ index=0, containing_type=None,
+ create_key=_descriptor._internal_create_key,
+ fields=[]),
],
serialized_start=82,
serialized_end=538,
@@ -139,6 +147,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='decision', full_name='opentelemetry.proto.trace.v1.ConstantSampler.decision', index=0,
@@ -146,7 +155,7 @@
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -171,6 +180,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='samplingRatio', full_name='opentelemetry.proto.trace.v1.TraceIdRatioBased.samplingRatio', index=0,
@@ -178,7 +188,7 @@
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -202,6 +212,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='qps', full_name='opentelemetry.proto.trace.v1.RateLimitingSampler.qps', index=0,
@@ -209,7 +220,7 @@
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
diff --git a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.pyi
index e02bf3d5ba..8290baf58e 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.pyi
+++ b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_config_pb2.pyi
@@ -12,6 +12,9 @@ import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
class TraceConfig(google.protobuf.message.Message):
+ """Global configuration of the trace service. All fields must be specified, or
+ the default (zero) values will be used for each type.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
CONSTANT_SAMPLER_FIELD_NUMBER: builtins.int
TRACE_ID_RATIO_BASED_FIELD_NUMBER: builtins.int
@@ -21,20 +24,26 @@ class TraceConfig(google.protobuf.message.Message):
MAX_NUMBER_OF_ATTRIBUTES_PER_TIMED_EVENT_FIELD_NUMBER: builtins.int
MAX_NUMBER_OF_LINKS_FIELD_NUMBER: builtins.int
MAX_NUMBER_OF_ATTRIBUTES_PER_LINK_FIELD_NUMBER: builtins.int
- max_number_of_attributes: builtins.int = ...
- max_number_of_timed_events: builtins.int = ...
- max_number_of_attributes_per_timed_event: builtins.int = ...
- max_number_of_links: builtins.int = ...
- max_number_of_attributes_per_link: builtins.int = ...
-
@property
def constant_sampler(self) -> global___ConstantSampler: ...
-
@property
def trace_id_ratio_based(self) -> global___TraceIdRatioBased: ...
-
@property
def rate_limiting_sampler(self) -> global___RateLimitingSampler: ...
+ max_number_of_attributes: builtins.int = ...
+ """The global default max number of attributes per span."""
+
+ max_number_of_timed_events: builtins.int = ...
+ """The global default max number of annotation events per span."""
+
+ max_number_of_attributes_per_timed_event: builtins.int = ...
+ """The global default max number of attributes per timed event."""
+
+ max_number_of_links: builtins.int = ...
+ """The global default max number of link entries per span."""
+
+ max_number_of_attributes_per_link: builtins.int = ...
+ """The global default max number of attributes per span."""
def __init__(self,
*,
@@ -47,54 +56,68 @@ class TraceConfig(google.protobuf.message.Message):
max_number_of_links : builtins.int = ...,
max_number_of_attributes_per_link : builtins.int = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"constant_sampler",b"constant_sampler",u"rate_limiting_sampler",b"rate_limiting_sampler",u"sampler",b"sampler",u"trace_id_ratio_based",b"trace_id_ratio_based"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"constant_sampler",b"constant_sampler",u"max_number_of_attributes",b"max_number_of_attributes",u"max_number_of_attributes_per_link",b"max_number_of_attributes_per_link",u"max_number_of_attributes_per_timed_event",b"max_number_of_attributes_per_timed_event",u"max_number_of_links",b"max_number_of_links",u"max_number_of_timed_events",b"max_number_of_timed_events",u"rate_limiting_sampler",b"rate_limiting_sampler",u"sampler",b"sampler",u"trace_id_ratio_based",b"trace_id_ratio_based"]) -> None: ...
- def WhichOneof(self, oneof_group: typing_extensions.Literal[u"sampler",b"sampler"]) -> typing_extensions.Literal["constant_sampler","trace_id_ratio_based","rate_limiting_sampler"]: ...
+ def HasField(self, field_name: typing_extensions.Literal["constant_sampler",b"constant_sampler","rate_limiting_sampler",b"rate_limiting_sampler","sampler",b"sampler","trace_id_ratio_based",b"trace_id_ratio_based"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["constant_sampler",b"constant_sampler","max_number_of_attributes",b"max_number_of_attributes","max_number_of_attributes_per_link",b"max_number_of_attributes_per_link","max_number_of_attributes_per_timed_event",b"max_number_of_attributes_per_timed_event","max_number_of_links",b"max_number_of_links","max_number_of_timed_events",b"max_number_of_timed_events","rate_limiting_sampler",b"rate_limiting_sampler","sampler",b"sampler","trace_id_ratio_based",b"trace_id_ratio_based"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["sampler",b"sampler"]) -> typing.Optional[typing_extensions.Literal["constant_sampler","trace_id_ratio_based","rate_limiting_sampler"]]: ...
global___TraceConfig = TraceConfig
class ConstantSampler(google.protobuf.message.Message):
+ """Sampler that always makes a constant decision on span sampling."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
- class _ConstantDecision(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[ConstantDecision.V], builtins.type):
+ class ConstantDecision(_ConstantDecision, metaclass=_ConstantDecisionEnumTypeWrapper):
+ """How spans should be sampled:
+ - Always off
+ - Always on
+ - Always follow the parent Span's decision (off if no parent).
+ """
+ pass
+ class _ConstantDecision:
+ V = typing.NewType('V', builtins.int)
+ class _ConstantDecisionEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ConstantDecision.V], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ...
ALWAYS_OFF = ConstantSampler.ConstantDecision.V(0)
ALWAYS_ON = ConstantSampler.ConstantDecision.V(1)
ALWAYS_PARENT = ConstantSampler.ConstantDecision.V(2)
- class ConstantDecision(metaclass=_ConstantDecision):
- V = typing.NewType('V', builtins.int)
+
ALWAYS_OFF = ConstantSampler.ConstantDecision.V(0)
ALWAYS_ON = ConstantSampler.ConstantDecision.V(1)
ALWAYS_PARENT = ConstantSampler.ConstantDecision.V(2)
DECISION_FIELD_NUMBER: builtins.int
decision: global___ConstantSampler.ConstantDecision.V = ...
-
def __init__(self,
*,
decision : global___ConstantSampler.ConstantDecision.V = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"decision",b"decision"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["decision",b"decision"]) -> None: ...
global___ConstantSampler = ConstantSampler
class TraceIdRatioBased(google.protobuf.message.Message):
+ """Sampler that tries to uniformly sample traces with a given ratio.
+ The ratio of sampling a trace is equal to that of the specified ratio.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
SAMPLINGRATIO_FIELD_NUMBER: builtins.int
samplingRatio: builtins.float = ...
+ """The desired ratio of sampling. Must be within [0.0, 1.0]."""
def __init__(self,
*,
samplingRatio : builtins.float = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"samplingRatio",b"samplingRatio"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["samplingRatio",b"samplingRatio"]) -> None: ...
global___TraceIdRatioBased = TraceIdRatioBased
class RateLimitingSampler(google.protobuf.message.Message):
+ """Sampler that tries to sample with a rate per time window."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
QPS_FIELD_NUMBER: builtins.int
qps: builtins.int = ...
+ """Rate per second."""
def __init__(self,
*,
qps : builtins.int = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"qps",b"qps"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["qps",b"qps"]) -> None: ...
global___RateLimitingSampler = RateLimitingSampler
diff --git a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py
index b5ae44795c..d46196bcf5 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py
+++ b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: opentelemetry/proto/trace/v1/trace.proto
-
+"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
@@ -20,6 +20,7 @@
package='opentelemetry.proto.trace.v1',
syntax='proto3',
serialized_options=b'\n\037io.opentelemetry.proto.trace.v1B\nTraceProtoP\001Z=github.com/open-telemetry/opentelemetry-proto/gen/go/trace/v1',
+ create_key=_descriptor._internal_create_key,
serialized_pb=b'\n(opentelemetry/proto/trace/v1/trace.proto\x12\x1copentelemetry.proto.trace.v1\x1a*opentelemetry/proto/common/v1/common.proto\x1a.opentelemetry/proto/resource/v1/resource.proto\"\xc2\x01\n\rResourceSpans\x12;\n\x08resource\x18\x01 \x01(\x0b\x32).opentelemetry.proto.resource.v1.Resource\x12`\n\x1dinstrumentation_library_spans\x18\x02 \x03(\x0b\x32\x39.opentelemetry.proto.trace.v1.InstrumentationLibrarySpans\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xbc\x01\n\x1bInstrumentationLibrarySpans\x12V\n\x17instrumentation_library\x18\x01 \x01(\x0b\x32\x35.opentelemetry.proto.common.v1.InstrumentationLibrary\x12\x31\n\x05spans\x18\x02 \x03(\x0b\x32\".opentelemetry.proto.trace.v1.Span\x12\x12\n\nschema_url\x18\x03 \x01(\t\"\xe6\x07\n\x04Span\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12\x16\n\x0eparent_span_id\x18\x04 \x01(\x0c\x12\x0c\n\x04name\x18\x05 \x01(\t\x12\x39\n\x04kind\x18\x06 \x01(\x0e\x32+.opentelemetry.proto.trace.v1.Span.SpanKind\x12\x1c\n\x14start_time_unix_nano\x18\x07 \x01(\x06\x12\x1a\n\x12\x65nd_time_unix_nano\x18\x08 \x01(\x06\x12;\n\nattributes\x18\t \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\n \x01(\r\x12\x38\n\x06\x65vents\x18\x0b \x03(\x0b\x32(.opentelemetry.proto.trace.v1.Span.Event\x12\x1c\n\x14\x64ropped_events_count\x18\x0c \x01(\r\x12\x36\n\x05links\x18\r \x03(\x0b\x32\'.opentelemetry.proto.trace.v1.Span.Link\x12\x1b\n\x13\x64ropped_links_count\x18\x0e \x01(\r\x12\x34\n\x06status\x18\x0f \x01(\x0b\x32$.opentelemetry.proto.trace.v1.Status\x1a\x8c\x01\n\x05\x45vent\x12\x16\n\x0etime_unix_nano\x18\x01 \x01(\x06\x12\x0c\n\x04name\x18\x02 \x01(\t\x12;\n\nattributes\x18\x03 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x04 \x01(\r\x1a\x9d\x01\n\x04Link\x12\x10\n\x08trace_id\x18\x01 \x01(\x0c\x12\x0f\n\x07span_id\x18\x02 \x01(\x0c\x12\x13\n\x0btrace_state\x18\x03 \x01(\t\x12;\n\nattributes\x18\x04 \x03(\x0b\x32\'.opentelemetry.proto.common.v1.KeyValue\x12 \n\x18\x64ropped_attributes_count\x18\x05 \x01(\r\"\x99\x01\n\x08SpanKind\x12\x19\n\x15SPAN_KIND_UNSPECIFIED\x10\x00\x12\x16\n\x12SPAN_KIND_INTERNAL\x10\x01\x12\x14\n\x10SPAN_KIND_SERVER\x10\x02\x12\x14\n\x10SPAN_KIND_CLIENT\x10\x03\x12\x16\n\x12SPAN_KIND_PRODUCER\x10\x04\x12\x16\n\x12SPAN_KIND_CONSUMER\x10\x05\"\xdd\x07\n\x06Status\x12V\n\x0f\x64\x65precated_code\x18\x01 \x01(\x0e\x32\x39.opentelemetry.proto.trace.v1.Status.DeprecatedStatusCodeB\x02\x18\x01\x12\x0f\n\x07message\x18\x02 \x01(\t\x12=\n\x04\x63ode\x18\x03 \x01(\x0e\x32/.opentelemetry.proto.trace.v1.Status.StatusCode\"\xda\x05\n\x14\x44\x65precatedStatusCode\x12\x1d\n\x19\x44\x45PRECATED_STATUS_CODE_OK\x10\x00\x12$\n DEPRECATED_STATUS_CODE_CANCELLED\x10\x01\x12(\n$DEPRECATED_STATUS_CODE_UNKNOWN_ERROR\x10\x02\x12+\n\'DEPRECATED_STATUS_CODE_INVALID_ARGUMENT\x10\x03\x12,\n(DEPRECATED_STATUS_CODE_DEADLINE_EXCEEDED\x10\x04\x12$\n DEPRECATED_STATUS_CODE_NOT_FOUND\x10\x05\x12)\n%DEPRECATED_STATUS_CODE_ALREADY_EXISTS\x10\x06\x12,\n(DEPRECATED_STATUS_CODE_PERMISSION_DENIED\x10\x07\x12-\n)DEPRECATED_STATUS_CODE_RESOURCE_EXHAUSTED\x10\x08\x12.\n*DEPRECATED_STATUS_CODE_FAILED_PRECONDITION\x10\t\x12\"\n\x1e\x44\x45PRECATED_STATUS_CODE_ABORTED\x10\n\x12\'\n#DEPRECATED_STATUS_CODE_OUT_OF_RANGE\x10\x0b\x12(\n$DEPRECATED_STATUS_CODE_UNIMPLEMENTED\x10\x0c\x12)\n%DEPRECATED_STATUS_CODE_INTERNAL_ERROR\x10\r\x12&\n\"DEPRECATED_STATUS_CODE_UNAVAILABLE\x10\x0e\x12$\n DEPRECATED_STATUS_CODE_DATA_LOSS\x10\x0f\x12*\n&DEPRECATED_STATUS_CODE_UNAUTHENTICATED\x10\x10\"N\n\nStatusCode\x12\x15\n\x11STATUS_CODE_UNSET\x10\x00\x12\x12\n\x0eSTATUS_CODE_OK\x10\x01\x12\x15\n\x11STATUS_CODE_ERROR\x10\x02\x42n\n\x1fio.opentelemetry.proto.trace.v1B\nTraceProtoP\x01Z=github.com/open-telemetry/opentelemetry-proto/gen/go/trace/v1b\x06proto3'
,
dependencies=[opentelemetry_dot_proto_dot_common_dot_v1_dot_common__pb2.DESCRIPTOR,opentelemetry_dot_proto_dot_resource_dot_v1_dot_resource__pb2.DESCRIPTOR,])
@@ -31,31 +32,38 @@
full_name='opentelemetry.proto.trace.v1.Span.SpanKind',
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='SPAN_KIND_UNSPECIFIED', index=0, number=0,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPAN_KIND_INTERNAL', index=1, number=1,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPAN_KIND_SERVER', index=2, number=2,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPAN_KIND_CLIENT', index=3, number=3,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPAN_KIND_PRODUCER', index=4, number=4,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SPAN_KIND_CONSUMER', index=5, number=5,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
@@ -69,75 +77,93 @@
full_name='opentelemetry.proto.trace.v1.Status.DeprecatedStatusCode',
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_OK', index=0, number=0,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_CANCELLED', index=1, number=1,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_UNKNOWN_ERROR', index=2, number=2,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_INVALID_ARGUMENT', index=3, number=3,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_DEADLINE_EXCEEDED', index=4, number=4,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_NOT_FOUND', index=5, number=5,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_ALREADY_EXISTS', index=6, number=6,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_PERMISSION_DENIED', index=7, number=7,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_RESOURCE_EXHAUSTED', index=8, number=8,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_FAILED_PRECONDITION', index=9, number=9,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_ABORTED', index=10, number=10,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_OUT_OF_RANGE', index=11, number=11,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_UNIMPLEMENTED', index=12, number=12,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_INTERNAL_ERROR', index=13, number=13,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_UNAVAILABLE', index=14, number=14,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_DATA_LOSS', index=15, number=15,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DEPRECATED_STATUS_CODE_UNAUTHENTICATED', index=16, number=16,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
@@ -151,19 +177,23 @@
full_name='opentelemetry.proto.trace.v1.Status.StatusCode',
filename=None,
file=DESCRIPTOR,
+ create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='STATUS_CODE_UNSET', index=0, number=0,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STATUS_CODE_OK', index=1, number=1,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='STATUS_CODE_ERROR', index=2, number=2,
serialized_options=None,
- type=None),
+ type=None,
+ create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
@@ -179,6 +209,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='resource', full_name='opentelemetry.proto.trace.v1.ResourceSpans.resource', index=0,
@@ -186,21 +217,21 @@
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='instrumentation_library_spans', full_name='opentelemetry.proto.trace.v1.ResourceSpans.instrumentation_library_spans', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='schema_url', full_name='opentelemetry.proto.trace.v1.ResourceSpans.schema_url', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -224,6 +255,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='instrumentation_library', full_name='opentelemetry.proto.trace.v1.InstrumentationLibrarySpans.instrumentation_library', index=0,
@@ -231,21 +263,21 @@
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='spans', full_name='opentelemetry.proto.trace.v1.InstrumentationLibrarySpans.spans', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='schema_url', full_name='opentelemetry.proto.trace.v1.InstrumentationLibrarySpans.schema_url', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -269,6 +301,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='time_unix_nano', full_name='opentelemetry.proto.trace.v1.Span.Event.time_unix_nano', index=0,
@@ -276,28 +309,28 @@
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='opentelemetry.proto.trace.v1.Span.Event.name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='attributes', full_name='opentelemetry.proto.trace.v1.Span.Event.attributes', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dropped_attributes_count', full_name='opentelemetry.proto.trace.v1.Span.Event.dropped_attributes_count', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -320,6 +353,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='trace_id', full_name='opentelemetry.proto.trace.v1.Span.Link.trace_id', index=0,
@@ -327,35 +361,35 @@
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='span_id', full_name='opentelemetry.proto.trace.v1.Span.Link.span_id', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trace_state', full_name='opentelemetry.proto.trace.v1.Span.Link.trace_state', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='attributes', full_name='opentelemetry.proto.trace.v1.Span.Link.attributes', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dropped_attributes_count', full_name='opentelemetry.proto.trace.v1.Span.Link.dropped_attributes_count', index=4,
number=5, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -378,6 +412,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='trace_id', full_name='opentelemetry.proto.trace.v1.Span.trace_id', index=0,
@@ -385,105 +420,105 @@
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='span_id', full_name='opentelemetry.proto.trace.v1.Span.span_id', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trace_state', full_name='opentelemetry.proto.trace.v1.Span.trace_state', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='parent_span_id', full_name='opentelemetry.proto.trace.v1.Span.parent_span_id', index=3,
number=4, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='name', full_name='opentelemetry.proto.trace.v1.Span.name', index=4,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='kind', full_name='opentelemetry.proto.trace.v1.Span.kind', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='start_time_unix_nano', full_name='opentelemetry.proto.trace.v1.Span.start_time_unix_nano', index=6,
number=7, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='end_time_unix_nano', full_name='opentelemetry.proto.trace.v1.Span.end_time_unix_nano', index=7,
number=8, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='attributes', full_name='opentelemetry.proto.trace.v1.Span.attributes', index=8,
number=9, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dropped_attributes_count', full_name='opentelemetry.proto.trace.v1.Span.dropped_attributes_count', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='events', full_name='opentelemetry.proto.trace.v1.Span.events', index=10,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dropped_events_count', full_name='opentelemetry.proto.trace.v1.Span.dropped_events_count', index=11,
number=12, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='links', full_name='opentelemetry.proto.trace.v1.Span.links', index=12,
number=13, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='dropped_links_count', full_name='opentelemetry.proto.trace.v1.Span.dropped_links_count', index=13,
number=14, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='status', full_name='opentelemetry.proto.trace.v1.Span.status', index=14,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
@@ -508,6 +543,7 @@
filename=None,
file=DESCRIPTOR,
containing_type=None,
+ create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='deprecated_code', full_name='opentelemetry.proto.trace.v1.Status.deprecated_code', index=0,
@@ -515,21 +551,21 @@
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=b'\030\001', file=DESCRIPTOR),
+ serialized_options=b'\030\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message', full_name='opentelemetry.proto.trace.v1.Status.message', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='code', full_name='opentelemetry.proto.trace.v1.Status.code', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
- serialized_options=None, file=DESCRIPTOR),
+ serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
diff --git a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi
index 46b37dc5d7..e187f03d5e 100644
--- a/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi
+++ b/opentelemetry-proto/src/opentelemetry/proto/trace/v1/trace_pb2.pyi
@@ -15,17 +15,26 @@ import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor = ...
class ResourceSpans(google.protobuf.message.Message):
+ """A collection of InstrumentationLibrarySpans from a Resource."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
RESOURCE_FIELD_NUMBER: builtins.int
INSTRUMENTATION_LIBRARY_SPANS_FIELD_NUMBER: builtins.int
SCHEMA_URL_FIELD_NUMBER: builtins.int
- schema_url: typing.Text = ...
-
@property
- def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource: ...
-
+ def resource(self) -> opentelemetry.proto.resource.v1.resource_pb2.Resource:
+ """The resource for the spans in this message.
+ If this field is not set then no resource info is known.
+ """
+ pass
@property
- def instrumentation_library_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InstrumentationLibrarySpans]: ...
+ def instrumentation_library_spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___InstrumentationLibrarySpans]:
+ """A list of InstrumentationLibrarySpans that originate from a resource."""
+ pass
+ schema_url: typing.Text = ...
+ """This schema_url applies to the data in the "resource" field. It does not apply
+ to the data in the "instrumentation_library_spans" field which have their own
+ schema_url field.
+ """
def __init__(self,
*,
@@ -33,22 +42,29 @@ class ResourceSpans(google.protobuf.message.Message):
instrumentation_library_spans : typing.Optional[typing.Iterable[global___InstrumentationLibrarySpans]] = ...,
schema_url : typing.Text = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"resource",b"resource"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"instrumentation_library_spans",b"instrumentation_library_spans",u"resource",b"resource",u"schema_url",b"schema_url"]) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["resource",b"resource"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["instrumentation_library_spans",b"instrumentation_library_spans","resource",b"resource","schema_url",b"schema_url"]) -> None: ...
global___ResourceSpans = ResourceSpans
class InstrumentationLibrarySpans(google.protobuf.message.Message):
+ """A collection of Spans produced by an InstrumentationLibrary."""
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
INSTRUMENTATION_LIBRARY_FIELD_NUMBER: builtins.int
SPANS_FIELD_NUMBER: builtins.int
SCHEMA_URL_FIELD_NUMBER: builtins.int
- schema_url: typing.Text = ...
-
@property
- def instrumentation_library(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary: ...
-
+ def instrumentation_library(self) -> opentelemetry.proto.common.v1.common_pb2.InstrumentationLibrary:
+ """The instrumentation library information for the spans in this message.
+ Semantically when InstrumentationLibrary isn't set, it is equivalent with
+ an empty instrumentation library name (unknown).
+ """
+ pass
@property
- def spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span]: ...
+ def spans(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span]:
+ """A list of Spans that originate from an instrumentation library."""
+ pass
+ schema_url: typing.Text = ...
+ """This schema_url applies to all spans and span events in the "spans" field."""
def __init__(self,
*,
@@ -56,41 +72,120 @@ class InstrumentationLibrarySpans(google.protobuf.message.Message):
spans : typing.Optional[typing.Iterable[global___Span]] = ...,
schema_url : typing.Text = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"instrumentation_library",b"instrumentation_library"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"instrumentation_library",b"instrumentation_library",u"schema_url",b"schema_url",u"spans",b"spans"]) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["instrumentation_library",b"instrumentation_library"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["instrumentation_library",b"instrumentation_library","schema_url",b"schema_url","spans",b"spans"]) -> None: ...
global___InstrumentationLibrarySpans = InstrumentationLibrarySpans
class Span(google.protobuf.message.Message):
+ """Span represents a single operation within a trace. Spans can be
+ nested to form a trace tree. Spans may also be linked to other spans
+ from the same or different trace and form graphs. Often, a trace
+ contains a root span that describes the end-to-end latency, and one
+ or more subspans for its sub-operations. A trace can also contain
+ multiple root spans, or none at all. Spans do not need to be
+ contiguous - there may be gaps or overlaps between spans in a trace.
+
+ The next available field id is 17.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
- class _SpanKind(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[SpanKind.V], builtins.type):
+ class SpanKind(_SpanKind, metaclass=_SpanKindEnumTypeWrapper):
+ """SpanKind is the type of span. Can be used to specify additional relationships between spans
+ in addition to a parent/child relationship.
+ """
+ pass
+ class _SpanKind:
+ V = typing.NewType('V', builtins.int)
+ class _SpanKindEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_SpanKind.V], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ...
SPAN_KIND_UNSPECIFIED = Span.SpanKind.V(0)
+ """Unspecified. Do NOT use as default.
+ Implementations MAY assume SpanKind to be INTERNAL when receiving UNSPECIFIED.
+ """
+
SPAN_KIND_INTERNAL = Span.SpanKind.V(1)
+ """Indicates that the span represents an internal operation within an application,
+ as opposed to an operation happening at the boundaries. Default value.
+ """
+
SPAN_KIND_SERVER = Span.SpanKind.V(2)
+ """Indicates that the span covers server-side handling of an RPC or other
+ remote network request.
+ """
+
SPAN_KIND_CLIENT = Span.SpanKind.V(3)
+ """Indicates that the span describes a request to some remote service."""
+
SPAN_KIND_PRODUCER = Span.SpanKind.V(4)
+ """Indicates that the span describes a producer sending a message to a broker.
+ Unlike CLIENT and SERVER, there is often no direct critical path latency relationship
+ between producer and consumer spans. A PRODUCER span ends when the message was accepted
+ by the broker while the logical processing of the message might span a much longer time.
+ """
+
SPAN_KIND_CONSUMER = Span.SpanKind.V(5)
- class SpanKind(metaclass=_SpanKind):
- V = typing.NewType('V', builtins.int)
+ """Indicates that the span describes consumer receiving a message from a broker.
+ Like the PRODUCER kind, there is often no direct critical path latency relationship
+ between producer and consumer spans.
+ """
+
+
SPAN_KIND_UNSPECIFIED = Span.SpanKind.V(0)
+ """Unspecified. Do NOT use as default.
+ Implementations MAY assume SpanKind to be INTERNAL when receiving UNSPECIFIED.
+ """
+
SPAN_KIND_INTERNAL = Span.SpanKind.V(1)
+ """Indicates that the span represents an internal operation within an application,
+ as opposed to an operation happening at the boundaries. Default value.
+ """
+
SPAN_KIND_SERVER = Span.SpanKind.V(2)
+ """Indicates that the span covers server-side handling of an RPC or other
+ remote network request.
+ """
+
SPAN_KIND_CLIENT = Span.SpanKind.V(3)
+ """Indicates that the span describes a request to some remote service."""
+
SPAN_KIND_PRODUCER = Span.SpanKind.V(4)
+ """Indicates that the span describes a producer sending a message to a broker.
+ Unlike CLIENT and SERVER, there is often no direct critical path latency relationship
+ between producer and consumer spans. A PRODUCER span ends when the message was accepted
+ by the broker while the logical processing of the message might span a much longer time.
+ """
+
SPAN_KIND_CONSUMER = Span.SpanKind.V(5)
+ """Indicates that the span describes consumer receiving a message from a broker.
+ Like the PRODUCER kind, there is often no direct critical path latency relationship
+ between producer and consumer spans.
+ """
+
class Event(google.protobuf.message.Message):
+ """Event is a time-stamped annotation of the span, consisting of user-supplied
+ text description and key-value pairs.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
TIME_UNIX_NANO_FIELD_NUMBER: builtins.int
NAME_FIELD_NUMBER: builtins.int
ATTRIBUTES_FIELD_NUMBER: builtins.int
DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int
time_unix_nano: builtins.int = ...
+ """time_unix_nano is the time the event occurred."""
+
name: typing.Text = ...
- dropped_attributes_count: builtins.int = ...
+ """name of the event.
+ This field is semantically required to be set to non-empty string.
+ """
@property
- def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: ...
+ def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
+ """attributes is a collection of attribute key/value pairs on the event."""
+ pass
+ dropped_attributes_count: builtins.int = ...
+ """dropped_attributes_count is the number of dropped attributes. If the value is 0,
+ then no attributes were dropped.
+ """
def __init__(self,
*,
@@ -99,9 +194,14 @@ class Span(google.protobuf.message.Message):
attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ...,
dropped_attributes_count : builtins.int = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"attributes",b"attributes",u"dropped_attributes_count",b"dropped_attributes_count",u"name",b"name",u"time_unix_nano",b"time_unix_nano"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","dropped_attributes_count",b"dropped_attributes_count","name",b"name","time_unix_nano",b"time_unix_nano"]) -> None: ...
class Link(google.protobuf.message.Message):
+ """A pointer from the current span to another span in the same trace or in a
+ different trace. For example, this can be used in batching operations,
+ where a single batch handler processes multiple requests from different
+ traces or when the handler receives a request from a different project.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
TRACE_ID_FIELD_NUMBER: builtins.int
SPAN_ID_FIELD_NUMBER: builtins.int
@@ -109,12 +209,24 @@ class Span(google.protobuf.message.Message):
ATTRIBUTES_FIELD_NUMBER: builtins.int
DROPPED_ATTRIBUTES_COUNT_FIELD_NUMBER: builtins.int
trace_id: builtins.bytes = ...
+ """A unique identifier of a trace that this linked span is part of. The ID is a
+ 16-byte array.
+ """
+
span_id: builtins.bytes = ...
+ """A unique identifier for the linked span. The ID is an 8-byte array."""
+
trace_state: typing.Text = ...
- dropped_attributes_count: builtins.int = ...
+ """The trace_state associated with the link."""
@property
- def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: ...
+ def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
+ """attributes is a collection of attribute key/value pairs on the link."""
+ pass
+ dropped_attributes_count: builtins.int = ...
+ """dropped_attributes_count is the number of dropped attributes. If the value is 0,
+ then no attributes were dropped.
+ """
def __init__(self,
*,
@@ -124,7 +236,7 @@ class Span(google.protobuf.message.Message):
attributes : typing.Optional[typing.Iterable[opentelemetry.proto.common.v1.common_pb2.KeyValue]] = ...,
dropped_attributes_count : builtins.int = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"attributes",b"attributes",u"dropped_attributes_count",b"dropped_attributes_count",u"span_id",b"span_id",u"trace_id",b"trace_id",u"trace_state",b"trace_state"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","dropped_attributes_count",b"dropped_attributes_count","span_id",b"span_id","trace_id",b"trace_id","trace_state",b"trace_state"]) -> None: ...
TRACE_ID_FIELD_NUMBER: builtins.int
SPAN_ID_FIELD_NUMBER: builtins.int
@@ -142,29 +254,122 @@ class Span(google.protobuf.message.Message):
DROPPED_LINKS_COUNT_FIELD_NUMBER: builtins.int
STATUS_FIELD_NUMBER: builtins.int
trace_id: builtins.bytes = ...
+ """A unique identifier for a trace. All spans from the same trace share
+ the same `trace_id`. The ID is a 16-byte array. An ID with all zeroes
+ is considered invalid.
+
+ This field is semantically required. Receiver should generate new
+ random trace_id if empty or invalid trace_id was received.
+
+ This field is required.
+ """
+
span_id: builtins.bytes = ...
+ """A unique identifier for a span within a trace, assigned when the span
+ is created. The ID is an 8-byte array. An ID with all zeroes is considered
+ invalid.
+
+ This field is semantically required. Receiver should generate new
+ random span_id if empty or invalid span_id was received.
+
+ This field is required.
+ """
+
trace_state: typing.Text = ...
+ """trace_state conveys information about request position in multiple distributed tracing graphs.
+ It is a trace_state in w3c-trace-context format: https://www.w3.org/TR/trace-context/#tracestate-header
+ See also https://github.com/w3c/distributed-tracing for more details about this field.
+ """
+
parent_span_id: builtins.bytes = ...
+ """The `span_id` of this span's parent span. If this is a root span, then this
+ field must be empty. The ID is an 8-byte array.
+ """
+
name: typing.Text = ...
+ """A description of the span's operation.
+
+ For example, the name can be a qualified method name or a file name
+ and a line number where the operation is called. A best practice is to use
+ the same display name at the same call point in an application.
+ This makes it easier to correlate spans in different traces.
+
+ This field is semantically required to be set to non-empty string.
+ When null or empty string received - receiver may use string "name"
+ as a replacement. There might be smarted algorithms implemented by
+ receiver to fix the empty span name.
+
+ This field is required.
+ """
+
kind: global___Span.SpanKind.V = ...
+ """Distinguishes between spans generated in a particular context. For example,
+ two spans with the same name may be distinguished using `CLIENT` (caller)
+ and `SERVER` (callee) to identify queueing latency associated with the span.
+ """
+
start_time_unix_nano: builtins.int = ...
+ """start_time_unix_nano is the start time of the span. On the client side, this is the time
+ kept by the local machine where the span execution starts. On the server side, this
+ is the time when the server's application handler starts running.
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
+
+ This field is semantically required and it is expected that end_time >= start_time.
+ """
+
end_time_unix_nano: builtins.int = ...
- dropped_attributes_count: builtins.int = ...
- dropped_events_count: builtins.int = ...
- dropped_links_count: builtins.int = ...
+ """end_time_unix_nano is the end time of the span. On the client side, this is the time
+ kept by the local machine where the span execution ends. On the server side, this
+ is the time when the server application handler stops running.
+ Value is UNIX Epoch time in nanoseconds since 00:00:00 UTC on 1 January 1970.
- @property
- def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]: ...
+ This field is semantically required and it is expected that end_time >= start_time.
+ """
@property
- def events(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span.Event]: ...
+ def attributes(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[opentelemetry.proto.common.v1.common_pb2.KeyValue]:
+ """attributes is a collection of key/value pairs. The value can be a string,
+ an integer, a double or the Boolean values `true` or `false`. Note, global attributes
+ like server name can be set using the resource API. Examples of attributes:
+
+ "/http/user_agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"
+ "/http/server_latency": 300
+ "abc.com/myattribute": true
+ "abc.com/score": 10.239
+ """
+ pass
+ dropped_attributes_count: builtins.int = ...
+ """dropped_attributes_count is the number of attributes that were discarded. Attributes
+ can be discarded because their keys are too long or because there are too many
+ attributes. If this value is 0, then no attributes were dropped.
+ """
@property
- def links(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span.Link]: ...
+ def events(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span.Event]:
+ """events is a collection of Event items."""
+ pass
+ dropped_events_count: builtins.int = ...
+ """dropped_events_count is the number of dropped events. If the value is 0, then no
+ events were dropped.
+ """
@property
- def status(self) -> global___Status: ...
+ def links(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___Span.Link]:
+ """links is a collection of Links, which are references from this span to a span
+ in the same or different trace.
+ """
+ pass
+ dropped_links_count: builtins.int = ...
+ """dropped_links_count is the number of dropped links after the maximum size was
+ enforced. If this value is 0, then no links were dropped.
+ """
+ @property
+ def status(self) -> global___Status:
+ """An optional final status for this span. Semantically when Status isn't set, it means
+ span's status code is unset, i.e. assume STATUS_CODE_UNSET (code = 0).
+ """
+ pass
def __init__(self,
*,
trace_id : builtins.bytes = ...,
@@ -183,13 +388,58 @@ class Span(google.protobuf.message.Message):
dropped_links_count : builtins.int = ...,
status : typing.Optional[global___Status] = ...,
) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal[u"status",b"status"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"attributes",b"attributes",u"dropped_attributes_count",b"dropped_attributes_count",u"dropped_events_count",b"dropped_events_count",u"dropped_links_count",b"dropped_links_count",u"end_time_unix_nano",b"end_time_unix_nano",u"events",b"events",u"kind",b"kind",u"links",b"links",u"name",b"name",u"parent_span_id",b"parent_span_id",u"span_id",b"span_id",u"start_time_unix_nano",b"start_time_unix_nano",u"status",b"status",u"trace_id",b"trace_id",u"trace_state",b"trace_state"]) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["status",b"status"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["attributes",b"attributes","dropped_attributes_count",b"dropped_attributes_count","dropped_events_count",b"dropped_events_count","dropped_links_count",b"dropped_links_count","end_time_unix_nano",b"end_time_unix_nano","events",b"events","kind",b"kind","links",b"links","name",b"name","parent_span_id",b"parent_span_id","span_id",b"span_id","start_time_unix_nano",b"start_time_unix_nano","status",b"status","trace_id",b"trace_id","trace_state",b"trace_state"]) -> None: ...
global___Span = Span
class Status(google.protobuf.message.Message):
+ """The Status type defines a logical error model that is suitable for different
+ programming environments, including REST APIs and RPC APIs.
+ IMPORTANT: Backward compatibility notes:
+
+ To ensure any pair of senders and receivers continues to correctly signal and
+ interpret erroneous situations, the senders and receivers MUST follow these rules:
+
+ 1. Old senders and receivers that are not aware of `code` field will continue using
+ the `deprecated_code` field to signal and interpret erroneous situation.
+
+ 2. New senders, which are aware of the `code` field MUST set both the
+ `deprecated_code` and `code` fields according to the following rules:
+
+ if code==STATUS_CODE_UNSET then `deprecated_code` MUST be
+ set to DEPRECATED_STATUS_CODE_OK.
+
+ if code==STATUS_CODE_OK then `deprecated_code` MUST be
+ set to DEPRECATED_STATUS_CODE_OK.
+
+ if code==STATUS_CODE_ERROR then `deprecated_code` MUST be
+ set to DEPRECATED_STATUS_CODE_UNKNOWN_ERROR.
+
+ These rules allow old receivers to correctly interpret data received from new senders.
+
+ 3. New receivers MUST look at both the `code` and `deprecated_code` fields in order
+ to interpret the overall status:
+
+ If code==STATUS_CODE_UNSET then the value of `deprecated_code` is the
+ carrier of the overall status according to these rules:
+
+ if deprecated_code==DEPRECATED_STATUS_CODE_OK then the receiver MUST interpret
+ the overall status to be STATUS_CODE_UNSET.
+
+ if deprecated_code!=DEPRECATED_STATUS_CODE_OK then the receiver MUST interpret
+ the overall status to be STATUS_CODE_ERROR.
+
+ If code!=STATUS_CODE_UNSET then the value of `deprecated_code` MUST be
+ ignored, the `code` field is the sole carrier of the status.
+
+ These rules allow new receivers to correctly interpret data received from old senders.
+ """
DESCRIPTOR: google.protobuf.descriptor.Descriptor = ...
- class _DeprecatedStatusCode(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[DeprecatedStatusCode.V], builtins.type):
+ class DeprecatedStatusCode(_DeprecatedStatusCode, metaclass=_DeprecatedStatusCodeEnumTypeWrapper):
+ pass
+ class _DeprecatedStatusCode:
+ V = typing.NewType('V', builtins.int)
+ class _DeprecatedStatusCodeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DeprecatedStatusCode.V], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ...
DEPRECATED_STATUS_CODE_OK = Status.DeprecatedStatusCode.V(0)
DEPRECATED_STATUS_CODE_CANCELLED = Status.DeprecatedStatusCode.V(1)
@@ -208,8 +458,7 @@ class Status(google.protobuf.message.Message):
DEPRECATED_STATUS_CODE_UNAVAILABLE = Status.DeprecatedStatusCode.V(14)
DEPRECATED_STATUS_CODE_DATA_LOSS = Status.DeprecatedStatusCode.V(15)
DEPRECATED_STATUS_CODE_UNAUTHENTICATED = Status.DeprecatedStatusCode.V(16)
- class DeprecatedStatusCode(metaclass=_DeprecatedStatusCode):
- V = typing.NewType('V', builtins.int)
+
DEPRECATED_STATUS_CODE_OK = Status.DeprecatedStatusCode.V(0)
DEPRECATED_STATUS_CODE_CANCELLED = Status.DeprecatedStatusCode.V(1)
DEPRECATED_STATUS_CODE_UNKNOWN_ERROR = Status.DeprecatedStatusCode.V(2)
@@ -228,23 +477,56 @@ class Status(google.protobuf.message.Message):
DEPRECATED_STATUS_CODE_DATA_LOSS = Status.DeprecatedStatusCode.V(15)
DEPRECATED_STATUS_CODE_UNAUTHENTICATED = Status.DeprecatedStatusCode.V(16)
- class _StatusCode(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[StatusCode.V], builtins.type):
+ class StatusCode(_StatusCode, metaclass=_StatusCodeEnumTypeWrapper):
+ """For the semantics of status codes see
+ https://github.com/open-telemetry/opentelemetry-specification/blob/main/specification/trace/api.md#set-status
+ """
+ pass
+ class _StatusCode:
+ V = typing.NewType('V', builtins.int)
+ class _StatusCodeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_StatusCode.V], builtins.type):
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor = ...
STATUS_CODE_UNSET = Status.StatusCode.V(0)
+ """The default status."""
+
STATUS_CODE_OK = Status.StatusCode.V(1)
+ """The Span has been validated by an Application developers or Operator to have
+ completed successfully.
+ """
+
STATUS_CODE_ERROR = Status.StatusCode.V(2)
- class StatusCode(metaclass=_StatusCode):
- V = typing.NewType('V', builtins.int)
+ """The Span contains an error."""
+
+
STATUS_CODE_UNSET = Status.StatusCode.V(0)
+ """The default status."""
+
STATUS_CODE_OK = Status.StatusCode.V(1)
+ """The Span has been validated by an Application developers or Operator to have
+ completed successfully.
+ """
+
STATUS_CODE_ERROR = Status.StatusCode.V(2)
+ """The Span contains an error."""
+
DEPRECATED_CODE_FIELD_NUMBER: builtins.int
MESSAGE_FIELD_NUMBER: builtins.int
CODE_FIELD_NUMBER: builtins.int
deprecated_code: global___Status.DeprecatedStatusCode.V = ...
+ """The deprecated status code. This is an optional field.
+
+ This field is deprecated and is replaced by the `code` field below. See backward
+ compatibility notes below. According to our stability guarantees this field
+ will be removed in 12 months, on Oct 22, 2021. All usage of old senders and
+ receivers that do not understand the `code` field MUST be phased out by then.
+ """
+
message: typing.Text = ...
+ """A developer-facing human readable error message."""
+
code: global___Status.StatusCode.V = ...
+ """The status code."""
def __init__(self,
*,
@@ -252,5 +534,5 @@ class Status(google.protobuf.message.Message):
message : typing.Text = ...,
code : global___Status.StatusCode.V = ...,
) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal[u"code",b"code",u"deprecated_code",b"deprecated_code",u"message",b"message"]) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["code",b"code","deprecated_code",b"deprecated_code","message",b"message"]) -> None: ...
global___Status = Status
diff --git a/pyproject.toml b/pyproject.toml
index eec7dacdcf..15e2fed2e5 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -8,11 +8,7 @@ exclude = '''
exporter/opentelemetry-exporter-jaeger-proto-grpc/src/opentelemetry/exporter/jaeger/proto/grpc/gen|
exporter/opentelemetry-exporter-jaeger-thrift/src/opentelemetry/exporter/jaeger/thrift/gen|
exporter/opentelemetry-exporter-zipkin-proto-http/src/opentelemetry/exporter/zipkin/proto/http/v2/gen|
- opentelemetry-proto/src/opentelemetry/proto/collector|
- opentelemetry-proto/src/opentelemetry/proto/common|
- opentelemetry-proto/src/opentelemetry/proto/metrics|
- opentelemetry-proto/src/opentelemetry/proto/resource|
- opentelemetry-proto/src/opentelemetry/proto/trace
+ opentelemetry-proto/src/opentelemetry/proto/.*/.*
)/
)
'''
diff --git a/scripts/build.sh b/scripts/build.sh
index 2f40f1a003..63faa001bc 100755
--- a/scripts/build.sh
+++ b/scripts/build.sh
@@ -16,7 +16,7 @@ DISTDIR=dist
mkdir -p $DISTDIR
rm -rf $DISTDIR/*
- for d in opentelemetry-api/ opentelemetry-sdk/ opentelemetry-instrumentation/ opentelemetry-proto/ opentelemetry-distro/ opentelemetry-semantic-conventions/ exporter/*/ shim/*/ propagator/*/; do
+ for d in opentelemetry-api/ opentelemetry-sdk/ opentelemetry-proto/ opentelemetry-semantic-conventions/ exporter/*/ shim/*/ propagator/*/; do
(
echo "building $d"
cd "$d"
diff --git a/tox.ini b/tox.ini
index 92a6db3569..bd48d073d2 100644
--- a/tox.ini
+++ b/tox.ini
@@ -13,9 +13,6 @@ envlist =
py3{6,7,8,9}-opentelemetry-sdk
pypy3-opentelemetry-sdk
- py3{6,7,8,9}-opentelemetry-instrumentation
- pypy3-opentelemetry-instrumentation
-
py3{6,7,8,9}-opentelemetry-semantic-conventions
pypy3-opentelemetry-semantic-conventions
@@ -23,9 +20,6 @@ envlist =
py3{6,7,8,9}-opentelemetry-getting-started
pypy3-opentelemetry-getting-started
- py3{6,7,8,9}-opentelemetry-distro
- pypy3-opentelemetry-distro
-
py3{6,7,8,9}-opentelemetry-opentracing-shim
pypy3-opentelemetry-opentracing-shim
@@ -92,10 +86,8 @@ changedir =
sdk: opentelemetry-sdk/tests
protobuf: opentelemetry-proto/tests
semantic-conventions: opentelemetry-semantic-conventions/tests
- instrumentation: opentelemetry-instrumentation/tests
getting-started: docs/getting_started/tests
opentracing-shim: shim/opentelemetry-opentracing-shim/tests
- distro: opentelemetry-distro/tests
exporter-jaeger-combined: exporter/opentelemetry-exporter-jaeger/tests
exporter-jaeger-proto-grpc: exporter/opentelemetry-exporter-jaeger-proto-grpc/tests
@@ -116,17 +108,13 @@ commands_pre =
py3{6,7,8,9}: python -m pip install -U pip setuptools wheel
; Install common packages for all the tests. These are not needed in all the
; cases but it saves a lot of boilerplate in this file.
- opentelemetry: pip install {toxinidir}/opentelemetry-api {toxinidir}/opentelemetry-semantic-conventions {toxinidir}/opentelemetry-instrumentation {toxinidir}/opentelemetry-sdk {toxinidir}/tests/util
-
- sdk: pip install {toxinidir}/opentelemetry-instrumentation
- opentracing-shim: pip install {toxinidir}/opentelemetry-instrumentation
+ opentelemetry: pip install {toxinidir}/opentelemetry-api {toxinidir}/opentelemetry-semantic-conventions {toxinidir}/opentelemetry-sdk {toxinidir}/tests/util
protobuf: pip install {toxinidir}/opentelemetry-proto
- distro: pip install {toxinidir}/opentelemetry-distro
- instrumentation: pip install {toxinidir}/opentelemetry-instrumentation
- getting-started: pip install requests==2.26.0 flask==2.0.1 -e {toxinidir}/opentelemetry-instrumentation
+ getting-started: pip install requests==2.26.0 flask==2.0.1
getting-started: pip install -e "{env:CONTRIB_REPO}#egg=opentelemetry-util-http&subdirectory=util/opentelemetry-util-http"
+ getting-started: pip install -e "{env:CONTRIB_REPO}#egg=opentelemetry-instrumentation&subdirectory=opentelemetry-instrumentation"
getting-started: pip install -e "{env:CONTRIB_REPO}#egg=opentelemetry-instrumentation-requests&subdirectory=instrumentation/opentelemetry-instrumentation-requests"
getting-started: pip install -e "{env:CONTRIB_REPO}#egg=opentelemetry-instrumentation-wsgi&subdirectory=instrumentation/opentelemetry-instrumentation-wsgi"
getting-started: pip install -e "{env:CONTRIB_REPO}#egg=opentelemetry-instrumentation-flask&subdirectory=instrumentation/opentelemetry-instrumentation-flask"
@@ -145,9 +133,7 @@ commands_pre =
exporter-otlp-proto-http: pip install {toxinidir}/exporter/opentelemetry-exporter-otlp-proto-http
exporter-jaeger-combined: pip install {toxinidir}/exporter/opentelemetry-exporter-jaeger-proto-grpc {toxinidir}/exporter/opentelemetry-exporter-jaeger-thrift {toxinidir}/exporter/opentelemetry-exporter-jaeger
- exporter-jaeger-combined: pip install {toxinidir}/opentelemetry-instrumentation
exporter-jaeger-proto-grpc: pip install {toxinidir}/exporter/opentelemetry-exporter-jaeger-proto-grpc
- exporter-jaeger-proto-grpc: pip install {toxinidir}/opentelemetry-instrumentation
exporter-jaeger-thrift: pip install {toxinidir}/exporter/opentelemetry-exporter-jaeger-thrift
opentracing-shim: pip install {toxinidir}/opentelemetry-sdk
@@ -204,7 +190,6 @@ deps =
commands_pre =
python -m pip install -e {toxinidir}/opentelemetry-api[test]
python -m pip install -e {toxinidir}/opentelemetry-semantic-conventions[test]
- python -m pip install -e {toxinidir}/opentelemetry-instrumentation[test]
python -m pip install -e {toxinidir}/opentelemetry-sdk[test]
python -m pip install -e {toxinidir}/opentelemetry-proto[test]
python -m pip install -e {toxinidir}/tests/util[test]
@@ -221,7 +206,6 @@ commands_pre =
python -m pip install -e {toxinidir}/exporter/opentelemetry-exporter-zipkin[test]
python -m pip install -e {toxinidir}/propagator/opentelemetry-propagator-b3[test]
python -m pip install -e {toxinidir}/propagator/opentelemetry-propagator-jaeger[test]
- python -m pip install -e {toxinidir}/opentelemetry-distro[test]
commands =
python scripts/eachdist.py lint --check-only
@@ -234,9 +218,6 @@ deps =
changedir = docs
-commands-pre =
- python -m pip install {toxinidir}/opentelemetry-instrumentation
-
commands =
sphinx-build -E -a -W -b html -T . _build/html
@@ -252,9 +233,12 @@ deps =
commands_pre =
pip install -e {toxinidir}/opentelemetry-api \
-e {toxinidir}/opentelemetry-semantic-conventions \
- -e {toxinidir}/opentelemetry-instrumentation \
-e {toxinidir}/opentelemetry-sdk \
-e "{env:CONTRIB_REPO}#egg=opentelemetry-util-http&subdirectory=util/opentelemetry-util-http" \
+<<<<<<< HEAD
+=======
+ -e "{env:CONTRIB_REPO}#egg=opentelemetry-instrumentation&subdirectory=opentelemetry-instrumentation" \
+>>>>>>> merge_main_3
-e "{env:CONTRIB_REPO}#egg=opentelemetry-instrumentation-requests&subdirectory=instrumentation/opentelemetry-instrumentation-requests" \
-e "{env:CONTRIB_REPO}#egg=opentelemetry-instrumentation-wsgi&subdirectory=instrumentation/opentelemetry-instrumentation-wsgi"
@@ -272,7 +256,6 @@ changedir =
commands_pre =
pip install -e {toxinidir}/opentelemetry-api \
-e {toxinidir}/opentelemetry-semantic-conventions \
- -e {toxinidir}/opentelemetry-instrumentation \
-e {toxinidir}/opentelemetry-sdk \
-e {toxinidir}/tests/util \
-e {toxinidir}/exporter/opentelemetry-exporter-opencensus \
diff --git a/website_docs/getting-started.md b/website_docs/getting-started.md
index def6372c62..4453c06eb8 100644
--- a/website_docs/getting-started.md
+++ b/website_docs/getting-started.md
@@ -1,13 +1,11 @@
---
-date: '2021-08-30T16:49:17.700Z'
+date: '2021-10-05T20:20:20.000Z'
docname: getting-started
images: {}
path: /getting-started
title: Getting Started
---
-# Getting Started
-
This guide walks you through instrumenting a Python application with `opentelemetry-python`.
For more elaborate examples, see [examples](https://github.com/open-telemetry/opentelemetry-python/tree/main/docs/examples/).