Skip to content

Commit

Permalink
Merge branch 'main' into andrew.glaude/dogstatsd-client-SPLIT
Browse files Browse the repository at this point in the history
  • Loading branch information
ajgajg1134 committed Sep 20, 2024
2 parents 844406a + 09d1ef5 commit ea71062
Show file tree
Hide file tree
Showing 12 changed files with 987 additions and 34 deletions.
7 changes: 5 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

4 changes: 2 additions & 2 deletions LICENSE-3rdparty.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
root_name: datadog-alloc, builder, build_common, datadog-profiling-ffi, data-pipeline-ffi, data-pipeline, datadog-ddsketch, datadog-trace-normalization, datadog-trace-protobuf, datadog-trace-utils, ddcommon, tinybytes, dogstatsd-client, ddcommon-ffi, datadog-crashtracker-ffi, datadog-crashtracker, ddtelemetry, datadog-profiling, ddtelemetry-ffi, symbolizer-ffi, tools, datadog-profiling-replayer, dogstatsd, datadog-ipc, datadog-ipc-macros, tarpc, tarpc-plugins, spawn_worker, cc_utils, datadog-sidecar, datadog-remote-config, datadog-dynamic-configuration, datadog-sidecar-macros, datadog-sidecar-ffi, sidecar_mockgen, datadog-trace-obfuscation, test_spawn_from_lib, datadog-serverless-trace-mini-agent, datadog-trace-mini-agent
root_name: datadog-alloc, builder, build_common, datadog-profiling-ffi, data-pipeline-ffi, data-pipeline, datadog-ddsketch, datadog-trace-normalization, datadog-trace-protobuf, datadog-trace-obfuscation, datadog-trace-utils, ddcommon, tinybytes, dogstatsd-client, ddcommon-ffi, datadog-crashtracker-ffi, datadog-crashtracker, ddtelemetry, datadog-profiling, ddtelemetry-ffi, symbolizer-ffi, tools, datadog-profiling-replayer, dogstatsd, datadog-ipc, datadog-ipc-macros, tarpc, tarpc-plugins, spawn_worker, cc_utils, datadog-sidecar, datadog-remote-config, datadog-dynamic-configuration, datadog-sidecar-macros, datadog-sidecar-ffi, sidecar_mockgen, test_spawn_from_lib, datadog-serverless-trace-mini-agent, datadog-trace-mini-agent
third_party_libraries:
- package_name: addr2line
package_version: 0.21.0
Expand Down Expand Up @@ -30830,7 +30830,7 @@ third_party_libraries:
IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
- package_name: uuid
package_version: 1.8.0
package_version: 1.10.0
repository: https://github.com/uuid-rs/uuid
license: Apache-2.0 OR MIT
licenses:
Expand Down
27 changes: 22 additions & 5 deletions data-pipeline-ffi/src/trace_exporter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use ddcommon_ffi::{
slice::{AsBytes, ByteSlice},
CharSlice, MaybeError,
};
use std::{ffi::c_char, ptr::NonNull};
use std::{ffi::c_char, ptr::NonNull, time::Duration};

/// Create a new TraceExporter instance.
///
Expand All @@ -20,6 +20,10 @@ use std::{ffi::c_char, ptr::NonNull};
/// * `language` - The language of the client library.
/// * `language_version` - The version of the language of the client library.
/// * `language_interpreter` - The interpreter of the language of the client library.
/// * `hostname` - The hostname of the application, used for stats aggregation
/// * `env` - The environment of the application, used for stats aggregation
/// * `version` - The version of the application, used for stats aggregation
/// * `service` - The service name of the application, used for stats aggregation
/// * `input_format` - The input format of the traces. Setting this to Proxy will send the trace
/// data to the Datadog Agent as is.
/// * `output_format` - The output format of the traces to send to the Datadog Agent. If using the
Expand All @@ -35,25 +39,38 @@ pub unsafe extern "C" fn ddog_trace_exporter_new(
language: CharSlice,
language_version: CharSlice,
language_interpreter: CharSlice,
hostname: CharSlice,
env: CharSlice,
version: CharSlice,
service: CharSlice,
input_format: TraceExporterInputFormat,
output_format: TraceExporterOutputFormat,
compute_stats: bool,
agent_response_callback: extern "C" fn(*const c_char),
) -> MaybeError {
let callback_wrapper = ResponseCallbackWrapper {
response_callback: agent_response_callback,
};
// TODO - handle errors - https://datadoghq.atlassian.net/browse/APMSP-1095
let exporter = TraceExporter::builder()
let mut builder = TraceExporter::builder()
.set_url(url.to_utf8_lossy().as_ref())
.set_tracer_version(tracer_version.to_utf8_lossy().as_ref())
.set_language(language.to_utf8_lossy().as_ref())
.set_language_version(language_version.to_utf8_lossy().as_ref())
.set_language_interpreter(language_interpreter.to_utf8_lossy().as_ref())
.set_hostname(hostname.to_utf8_lossy().as_ref())
.set_env(env.to_utf8_lossy().as_ref())
.set_version(version.to_utf8_lossy().as_ref())
.set_service(service.to_utf8_lossy().as_ref())
.set_input_format(input_format)
.set_output_format(output_format)
.set_response_callback(Box::new(callback_wrapper))
.build()
.unwrap();
.set_response_callback(Box::new(callback_wrapper));
if compute_stats {
builder = builder.enable_stats(Duration::from_secs(10))
// TODO: APMSP-1317 Enable peer tags aggregation and stats by span_kind based on agent
// configuration
}
let exporter = builder.build().unwrap();
out_handle.as_ptr().write(Box::new(exporter));
MaybeError::None
}
Expand Down
7 changes: 5 additions & 2 deletions data-pipeline/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,18 @@ hyper = {version = "0.14", features = ["client"], default-features = false}
log = "0.4"
rmp-serde = "1.1.1"
bytes = "1.4"
tokio = {version = "1.23", features = ["rt"], default-features = false}
either = "1.13.0"
tokio = { version = "1.23", features = ["rt", "test-util", "time"], default-features = false }

ddcommon = { path = "../ddcommon" }
datadog-trace-protobuf = { path = "../trace-protobuf" }
datadog-trace-utils = { path = "../trace-utils" }
datadog-trace-normalization = { path = "../trace-normalization" }
datadog-ddsketch = { path = "../ddsketch"}
dogstatsd-client = { path = "../dogstatsd-client"}
datadog-trace-obfuscation = { path = "../trace-obfuscation" }
uuid = { version = "1.10.0", features = ["v4"] }
tokio-util = "0.7.11"

[lib]
bench = false
Expand All @@ -35,6 +38,6 @@ harness = false
path = "benches/main.rs"

[dev-dependencies]
httpmock = "0.7.0"
criterion = "0.5.1"
rand = "0.8.5"
httpmock = "0.7.0"
63 changes: 63 additions & 0 deletions data-pipeline/examples/send-traces-with-stats.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,63 @@
// Copyright 2024-Present Datadog, Inc. https://www.datadoghq.com/
// SPDX-License-Identifier: Apache-2.0

use data_pipeline::trace_exporter::{
TraceExporter, TraceExporterInputFormat, TraceExporterOutputFormat,
};
use datadog_trace_protobuf::pb;
use std::{
collections::HashMap,
time::{Duration, SystemTime, UNIX_EPOCH},
};

fn get_span(now: i64, trace_id: u64, span_id: u64) -> pb::Span {
pb::Span {
trace_id,
span_id,
parent_id: span_id - 1,
duration: trace_id as i64 % 3 * 10_000_000 + span_id as i64 * 1_000_000,
start: now + trace_id as i64 * 1_000_000_000 + span_id as i64 * 1_000_000,
service: "data-pipeline-test".to_string(),
name: format!("test-name-{}", span_id % 2),
resource: format!("test-resource-{}", (span_id + trace_id) % 3),
error: if trace_id % 10 == 0 { 1 } else { 0 },
metrics: HashMap::from([
("_sampling_priority_v1".to_string(), 1.0),
("_dd.measured".to_string(), 1.0),
]),
..Default::default()
}
}

fn main() {
let exporter = TraceExporter::builder()
.set_url("http://localhost:8126")
.set_hostname("test")
.set_env("testing")
.set_version(env!("CARGO_PKG_VERSION"))
.set_service("data-pipeline-test")
.set_tracer_version(env!("CARGO_PKG_VERSION"))
.set_language("rust")
.set_language_version(env!("CARGO_PKG_RUST_VERSION"))
.set_input_format(TraceExporterInputFormat::V04)
.set_output_format(TraceExporterOutputFormat::V07)
.enable_stats(Duration::from_secs(10))
.build()
.unwrap();
let now = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap()
.as_nanos() as i64;

let mut traces = Vec::new();
for trace_id in 1..=100 {
let mut trace = Vec::new();
for span_id in 1..=1000 {
trace.push(get_span(now, trace_id, span_id));
}
traces.push(trace);
}
let data = rmp_serde::to_vec_named(&traces).unwrap();
exporter.send(&data, 100).unwrap();
exporter.shutdown(None).unwrap();
}
2 changes: 2 additions & 0 deletions data-pipeline/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,6 @@
#[allow(missing_docs)]
pub mod span_concentrator;
#[allow(missing_docs)]
pub mod stats_exporter;
#[allow(missing_docs)]
pub mod trace_exporter;
1 change: 0 additions & 1 deletion data-pipeline/src/span_concentrator/mod.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
// Copyright 2024-Present Datadog, Inc. https://www.datadoghq.com/
// SPDX-License-Identifier: Apache-2.0
//! This module implements the SpanConcentrator used to aggregate spans into stats
#![allow(dead_code)] // TODO: Remove once the trace exporter uses the SpanConcentrator
use std::collections::HashMap;
use std::time::{self, Duration, SystemTime};

Expand Down
Loading

0 comments on commit ea71062

Please sign in to comment.