From 3be23aba30195f4a21dc955f7c6b960475caee6e Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Wed, 1 Oct 2025 14:39:11 -0400 Subject: [PATCH 01/23] Handle proxy payload --- crates/datadog-trace-agent/Cargo.toml | 2 + crates/datadog-trace-agent/src/lib.rs | 1 + crates/datadog-trace-agent/src/mini_agent.rs | 70 +++++++++++++++++-- .../src/proxy_aggregator.rs | 38 ++++++++++ 4 files changed, 107 insertions(+), 4 deletions(-) create mode 100644 crates/datadog-trace-agent/src/proxy_aggregator.rs diff --git a/crates/datadog-trace-agent/Cargo.toml b/crates/datadog-trace-agent/Cargo.toml index dd967d1..19cb164 100644 --- a/crates/datadog-trace-agent/Cargo.toml +++ b/crates/datadog-trace-agent/Cargo.toml @@ -23,6 +23,8 @@ datadog-trace-protobuf = { git = "https://github.com/DataDog/libdatadog/", rev = datadog-trace-utils = { git = "https://github.com/DataDog/libdatadog/", rev = "4eb2b8673354f974591c61bab3f7d485b4c119e0", features = ["mini_agent"] } datadog-trace-normalization = { git = "https://github.com/DataDog/libdatadog/", rev = "4eb2b8673354f974591c61bab3f7d485b4c119e0" } datadog-trace-obfuscation = { git = "https://github.com/DataDog/libdatadog/", rev = "4eb2b8673354f974591c61bab3f7d485b4c119e0" } +reqwest = { version = "0.12.23", features = ["json"] } +bytes = "1.10.1" [dev-dependencies] rmp-serde = "1.1.1" diff --git a/crates/datadog-trace-agent/src/lib.rs b/crates/datadog-trace-agent/src/lib.rs index 165c263..5629d56 100644 --- a/crates/datadog-trace-agent/src/lib.rs +++ b/crates/datadog-trace-agent/src/lib.rs @@ -12,6 +12,7 @@ pub mod config; pub mod env_verifier; pub mod http_utils; pub mod mini_agent; +pub mod proxy_aggregator; pub mod stats_flusher; pub mod stats_processor; pub mod trace_flusher; diff --git a/crates/datadog-trace-agent/src/mini_agent.rs b/crates/datadog-trace-agent/src/mini_agent.rs index 552c69d..f950c82 100644 --- a/crates/datadog-trace-agent/src/mini_agent.rs +++ b/crates/datadog-trace-agent/src/mini_agent.rs @@ -2,6 +2,7 @@ // SPDX-License-Identifier: Apache-2.0 use ddcommon::hyper_migration; +use http_body_util::BodyExt; use hyper::service::service_fn; use hyper::{http, Method, Response, StatusCode}; use serde_json::json; @@ -9,11 +10,14 @@ use std::io; use std::net::SocketAddr; use std::sync::Arc; use std::time::Instant; -use tokio::sync::mpsc::{self, Receiver, Sender}; +use tokio::sync::{ + Mutex, + mpsc::{self, Receiver, Sender}, +}; use tracing::{debug, error}; -use crate::http_utils::log_and_create_http_response; -use crate::{config, env_verifier, stats_flusher, stats_processor, trace_flusher, trace_processor}; +use crate::http_utils::{self, log_and_create_http_response}; +use crate::{config, env_verifier, stats_flusher, stats_processor, trace_flusher, trace_processor, proxy_aggregator::{self, ProxyRequest}}; use datadog_trace_protobuf::pb; use datadog_trace_utils::trace_utils; use datadog_trace_utils::trace_utils::SendData; @@ -22,6 +26,7 @@ const MINI_AGENT_PORT: usize = 8126; const TRACE_ENDPOINT_PATH: &str = "/v0.4/traces"; const STATS_ENDPOINT_PATH: &str = "/v0.6/stats"; const INFO_ENDPOINT_PATH: &str = "/info"; +const PROFILING_ENDPOINT_PATH: &str = "/profiling/v1/input"; const TRACER_PAYLOAD_CHANNEL_BUFFER_SIZE: usize = 10; const STATS_PAYLOAD_CHANNEL_BUFFER_SIZE: usize = 10; @@ -32,6 +37,7 @@ pub struct MiniAgent { pub stats_processor: Arc, pub stats_flusher: Arc, pub env_verifier: Arc, + pub proxy_aggregator: Arc>, } impl MiniAgent { @@ -84,10 +90,17 @@ impl MiniAgent { .await; }); + // start our proxy flusher for profiling requests + let proxy_aggregator_for_flusher = self.proxy_aggregator.clone(); + tokio::spawn(async move { + let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(10)); + }); + // setup our hyper http server, where the endpoint_handler handles incoming requests let trace_processor = self.trace_processor.clone(); let stats_processor = self.stats_processor.clone(); let endpoint_config = self.config.clone(); + let proxy_aggregator = self.proxy_aggregator.clone(); let service = service_fn(move |req| { let trace_processor = trace_processor.clone(); @@ -98,6 +111,7 @@ impl MiniAgent { let endpoint_config = endpoint_config.clone(); let mini_agent_metadata = Arc::clone(&mini_agent_metadata); + let proxy_aggregator = proxy_aggregator.clone(); MiniAgent::trace_endpoint_handler( endpoint_config.clone(), @@ -107,6 +121,7 @@ impl MiniAgent { stats_processor.clone(), stats_tx.clone(), Arc::clone(&mini_agent_metadata), + proxy_aggregator.clone(), ) }); @@ -170,6 +185,7 @@ impl MiniAgent { stats_processor: Arc, stats_tx: Sender, mini_agent_metadata: Arc, + proxy_aggregator: Arc>, ) -> http::Result { match (req.method(), req.uri().path()) { (&Method::PUT | &Method::POST, TRACE_ENDPOINT_PATH) => { @@ -193,6 +209,15 @@ impl MiniAgent { ), } } + (&Method::POST, PROFILING_ENDPOINT_PATH) => { + match Self::profiling_proxy_handler(config, req, proxy_aggregator).await { + Ok(res) => Ok(res), + Err(err) => log_and_create_http_response( + &format!("Error processing profiling request: {err}"), + StatusCode::INTERNAL_SERVER_ERROR, + ), + } + } (_, INFO_ENDPOINT_PATH) => match Self::info_handler(config.dd_dogstatsd_port) { Ok(res) => Ok(res), Err(err) => log_and_create_http_response( @@ -208,13 +233,49 @@ impl MiniAgent { } } + async fn profiling_proxy_handler( + config: Arc, + request: hyper_migration::HttpRequest, + proxy_aggregator: Arc>, + ) -> Result> { + debug!("Trace Agent | Proxied request for profiling"); + + // Extract headers and body + let (parts, body) = request.into_parts(); + if let Some(response) = http_utils::verify_request_content_length( + &parts.headers, + config.max_request_content_length, + "Error processing profiling request", + ) { + return response.map_err(|e| Box::new(e) as Box); + } + + let body_bytes = body.collect().await?.to_bytes(); + + // Create proxy request + let proxy_request = ProxyRequest { + headers: parts.headers, + body: body_bytes, + target_url: format!("https://intake.profile.{}/api/v2/profile", config.dd_site), + }; + + let mut proxy_aggregator = proxy_aggregator.lock().await; + proxy_aggregator.add(proxy_request); + + Response::builder() + .status(200) + .body(hyper_migration::Body::from("Acknowledged profiling request")) + .map_err(|e| Box::new(e) as Box) + } + fn info_handler(dd_dogstatsd_port: u16) -> http::Result { let response_json = json!( { "endpoints": [ TRACE_ENDPOINT_PATH, STATS_ENDPOINT_PATH, - INFO_ENDPOINT_PATH + INFO_ENDPOINT_PATH, + PROFILING_ENDPOINT_PATH ], "client_drop_p0s": true, "config": { @@ -226,4 +287,5 @@ impl MiniAgent { .status(200) .body(hyper_migration::Body::from(response_json.to_string())) } + } diff --git a/crates/datadog-trace-agent/src/proxy_aggregator.rs b/crates/datadog-trace-agent/src/proxy_aggregator.rs new file mode 100644 index 0000000..bc6865e --- /dev/null +++ b/crates/datadog-trace-agent/src/proxy_aggregator.rs @@ -0,0 +1,38 @@ +use bytes::Bytes; // TODO: Do we use bytes? +use reqwest::header::HeaderMap; // TODO: Do we use reqwest? + +pub struct ProxyRequest { + pub headers: HeaderMap, + pub body: Bytes, + pub target_url: String, +} + +/// Takes in individual proxy requests and aggregates them into batches to be flushed to Datadog. +pub struct Aggregator { + queue: Vec, +} + +impl Default for Aggregator { + fn default() -> Self { + Aggregator { + queue: Vec::with_capacity(128), // arbitrary capacity for request queue + } + } +} + +impl Aggregator { + /// Takes in an individual proxy request. + pub fn add(&mut self, request: ProxyRequest) { + self.queue.push(request); + } + + /// Returns a batch of proxy requests. + pub fn get_batch(&mut self) -> Vec { + std::mem::take(&mut self.queue) + } + + /// Flush the queue. + pub fn clear(&mut self) { + self.queue.clear(); + } +} \ No newline at end of file From d9ea13bb91d93f7c280968cb004d2fdab0822015 Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Wed, 15 Oct 2025 14:43:48 -0400 Subject: [PATCH 02/23] Send proxy payload through mpsc channel to then get aggregated and flushed in proxy flusher --- Cargo.lock | 427 ++++++++++++------ crates/datadog-serverless-compat/src/main.rs | 11 +- crates/datadog-trace-agent/Cargo.toml | 1 + crates/datadog-trace-agent/src/config.rs | 12 +- crates/datadog-trace-agent/src/http_utils.rs | 16 + crates/datadog-trace-agent/src/lib.rs | 3 +- crates/datadog-trace-agent/src/mini_agent.rs | 66 ++- .../src/proxy_aggregator.rs | 13 +- .../datadog-trace-agent/src/proxy_flusher.rs | 240 ++++++++++ .../{aggregator.rs => trace_aggregator.rs} | 0 .../datadog-trace-agent/src/trace_flusher.rs | 2 +- .../src/trace_processor.rs | 6 + 12 files changed, 634 insertions(+), 163 deletions(-) create mode 100644 crates/datadog-trace-agent/src/proxy_flusher.rs rename crates/datadog-trace-agent/src/{aggregator.rs => trace_aggregator.rs} (100%) diff --git a/Cargo.lock b/Cargo.lock index e6fde7e..7d4aad0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -667,6 +667,30 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "critical-section" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "790eea4361631c5e7d22598ecd5723ff611904e3344ce8720784c93e3d83d40b" + +[[package]] +name = "crossbeam-channel" +version = "0.5.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "82b8f8f868b36967f9606790d1903570de9ceaf870a7bf9fbbd3016d636a2cb2" +dependencies = [ + "crossbeam-utils", +] + +[[package]] +name = "crossbeam-epoch" +version = "0.9.18" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5b82ac4a3c2ca9c3460964f020e1402edd5753411d7737aa39c3714ad1b5420e" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.21" @@ -788,6 +812,8 @@ version = "0.1.0" dependencies = [ "anyhow", "async-trait", + "bytes", + "datadog-fips", "datadog-trace-normalization", "datadog-trace-obfuscation", "datadog-trace-protobuf", @@ -797,6 +823,7 @@ dependencies = [ "http-body-util", "hyper 1.6.0", "hyper-util", + "reqwest", "rmp-serde", "serde", "serde_json", @@ -1189,6 +1216,21 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" +[[package]] +name = "foreign-types" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +dependencies = [ + "foreign-types-shared", +] + +[[package]] +name = "foreign-types-shared" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" + [[package]] name = "form_urlencoded" version = "1.2.1" @@ -1454,9 +1496,9 @@ checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70" [[package]] name = "hickory-proto" -version = "0.24.4" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "92652067c9ce6f66ce53cc38d1169daa36e6e7eb7dd3b63b5103bd9d97117248" +checksum = "f8a6fe56c0038198998a6f217ca4e7ef3a5e51f46163bd6dd60b5c71ca6c6502" dependencies = [ "async-trait", "cfg-if", @@ -1468,8 +1510,9 @@ dependencies = [ "idna", "ipnet", "once_cell", - "rand 0.8.5", - "thiserror 1.0.69", + "rand 0.9.0", + "ring", + "thiserror 2.0.12", "tinyvec", "tokio", "tracing", @@ -1478,21 +1521,21 @@ dependencies = [ [[package]] name = "hickory-resolver" -version = "0.24.4" +version = "0.25.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cbb117a1ca520e111743ab2f6688eddee69db4e0ea242545a604dce8a66fd22e" +checksum = "dc62a9a99b0bfb44d2ab95a7208ac952d31060efc16241c87eaf36406fecf87a" dependencies = [ "cfg-if", "futures-util", "hickory-proto", "ipconfig", - "lru-cache", + "moka", "once_cell", "parking_lot", - "rand 0.8.5", + "rand 0.9.0", "resolv-conf", "smallvec", - "thiserror 1.0.69", + "thiserror 2.0.12", "tokio", "tracing", ] @@ -1629,7 +1672,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2", + "socket2 0.5.8", "tokio", "tower-service", "tracing", @@ -1674,7 +1717,7 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service", - "webpki-roots", + "webpki-roots 0.26.8", ] [[package]] @@ -1708,26 +1751,49 @@ dependencies = [ "tokio", "tokio-rustls", "tower-service", - "webpki-roots", + "webpki-roots 0.26.8", +] + +[[package]] +name = "hyper-tls" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" +dependencies = [ + "bytes", + "http-body-util", + "hyper 1.6.0", + "hyper-util", + "native-tls", + "tokio", + "tokio-native-tls", + "tower-service", ] [[package]] name = "hyper-util" -version = "0.1.10" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8" dependencies = [ + "base64 0.22.1", "bytes", "futures-channel", + "futures-core", "futures-util", "http 1.3.1", "http-body 1.0.1", "hyper 1.6.0", + "ipnet", + "libc", + "percent-encoding", "pin-project-lite", - "socket2", + "socket2 0.6.0", + "system-configuration", "tokio", "tower-service", "tracing", + "windows-registry", ] [[package]] @@ -1942,7 +2008,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2", + "socket2 0.5.8", "widestring", "windows-sys 0.48.0", "winreg", @@ -1954,6 +2020,16 @@ version = "2.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "469fb0b9cefa57e3ef31275ee7cacb78f2fdca44e4765491884a2b119d4eb130" +[[package]] +name = "iri-string" +version = "0.7.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbc5ebe9c3a1a7a5127f920a418f7585e9e758e911d0466ed004f393b0e380b2" +dependencies = [ + "memchr", + "serde", +] + [[package]] name = "itertools" version = "0.11.0" @@ -2066,9 +2142,9 @@ checksum = "db13adb97ab515a3691f56e4dbab09283d0b86cb45abd991d8634a9d6f501760" [[package]] name = "libc" -version = "0.2.171" +version = "0.2.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c19937216e9d3aa9956d9bb8dfc0b0c8beb6058fc4f7a4dc4d850edf86a237d6" +checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" [[package]] name = "libloading" @@ -2090,12 +2166,6 @@ dependencies = [ "libc", ] -[[package]] -name = "linked-hash-map" -version = "0.5.6" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" - [[package]] name = "linux-raw-sys" version = "0.4.15" @@ -2133,15 +2203,6 @@ dependencies = [ "value-bag", ] -[[package]] -name = "lru-cache" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31e24f1ad8321ca0e8a1e0ac13f23cb668e6f5466c2c57319f6a5cf1cc8e3b1c" -dependencies = [ - "linked-hash-map", -] - [[package]] name = "matchers" version = "0.1.0" @@ -2212,12 +2273,47 @@ dependencies = [ "tokio", ] +[[package]] +name = "moka" +version = "0.12.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8261cd88c312e0004c1d51baad2980c66528dfdb2bee62003e643a4d8f86b077" +dependencies = [ + "crossbeam-channel", + "crossbeam-epoch", + "crossbeam-utils", + "equivalent", + "parking_lot", + "portable-atomic", + "rustc_version", + "smallvec", + "tagptr", + "uuid", +] + [[package]] name = "multimap" version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "defc4c55412d89136f966bbb339008b474350e5e6e78d2714439c386b3137a03" +[[package]] +name = "native-tls" +version = "0.2.14" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" +dependencies = [ + "libc", + "log", + "openssl", + "openssl-probe", + "openssl-sys", + "schannel", + "security-framework 2.11.1", + "security-framework-sys", + "tempfile", +] + [[package]] name = "new_debug_unreachable" version = "1.0.6" @@ -2285,6 +2381,36 @@ name = "once_cell" version = "1.21.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d75b0bedcc4fe52caa0e03d9f1151a323e4aa5e2d78ba3580400cd3c9e2bc4bc" +dependencies = [ + "critical-section", + "portable-atomic", +] + +[[package]] +name = "openssl" +version = "0.10.73" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" +dependencies = [ + "bitflags", + "cfg-if", + "foreign-types", + "libc", + "once_cell", + "openssl-macros", + "openssl-sys", +] + +[[package]] +name = "openssl-macros" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.100", +] [[package]] name = "openssl-probe" @@ -2292,6 +2418,18 @@ version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" +[[package]] +name = "openssl-sys" +version = "0.9.109" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "90096e2e47630d78b7d1c20952dc621f957103f8bc2c8359ec81290d75238571" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "option-ext" version = "0.2.0" @@ -2478,6 +2616,12 @@ dependencies = [ "windows-sys 0.59.0", ] +[[package]] +name = "portable-atomic" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f84267b20a16ea918e43c6a88433c2d54fa145c92a811b5b047ccbe153674483" + [[package]] name = "powerfmt" version = "0.2.0" @@ -2685,7 +2829,7 @@ dependencies = [ "quinn-udp", "rustc-hash 2.1.1", "rustls", - "socket2", + "socket2 0.5.8", "thiserror 2.0.12", "tokio", "tracing", @@ -2721,7 +2865,7 @@ dependencies = [ "cfg_aliases", "libc", "once_cell", - "socket2", + "socket2 0.5.8", "tracing", "windows-sys 0.59.0", ] @@ -2876,15 +3020,14 @@ checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" [[package]] name = "reqwest" -version = "0.12.15" +version = "0.12.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d19c46a6fdd48bc4dab94b6103fccc55d34c67cc0ad04653aad4ea2a07cd7bbb" +checksum = "d429f34c8092b2d42c7c93cec323bb4adeb7c67698f70839adec842ec10c7ceb" dependencies = [ "base64 0.22.1", "bytes", "encoding_rs", "futures-core", - "futures-util", "h2", "hickory-resolver", "http 1.3.1", @@ -2892,33 +3035,34 @@ dependencies = [ "http-body-util", "hyper 1.6.0", "hyper-rustls", + "hyper-tls", "hyper-util", - "ipnet", "js-sys", "log", "mime", + "native-tls", "once_cell", "percent-encoding", "pin-project-lite", "quinn", "rustls", "rustls-native-certs 0.8.1", - "rustls-pemfile", "rustls-pki-types", "serde", "serde_json", "serde_urlencoded", "sync_wrapper", "tokio", + "tokio-native-tls", "tokio-rustls", "tower", + "tower-http", "tower-service", "url", "wasm-bindgen", "wasm-bindgen-futures", "web-sys", - "webpki-roots", - "windows-registry", + "webpki-roots 1.0.3", ] [[package]] @@ -2994,6 +3138,15 @@ version = "2.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d" +[[package]] +name = "rustc_version" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" +dependencies = [ + "semver", +] + [[package]] name = "rustix" version = "0.38.44" @@ -3384,6 +3537,16 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "socket2" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "233504af464074f9d066d7b5416c5f9b894a5862a6506e306f7b816cdd6f1807" +dependencies = [ + "libc", + "windows-sys 0.59.0", +] + [[package]] name = "stable_deref_trait" version = "1.2.0" @@ -3485,6 +3648,33 @@ dependencies = [ "syn 2.0.100", ] +[[package]] +name = "system-configuration" +version = "0.6.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" +dependencies = [ + "bitflags", + "core-foundation 0.9.4", + "system-configuration-sys", +] + +[[package]] +name = "system-configuration-sys" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" +dependencies = [ + "core-foundation-sys", + "libc", +] + +[[package]] +name = "tagptr" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b2093cf4c8eb1e67749a6762251bc9cd836b6fc171623bd0a9d324d37af2417" + [[package]] name = "tempfile" version = "3.19.1" @@ -3673,7 +3863,7 @@ dependencies = [ "mio", "pin-project-lite", "signal-hook-registry", - "socket2", + "socket2 0.5.8", "tokio-macros", "windows-sys 0.52.0", ] @@ -3689,6 +3879,16 @@ dependencies = [ "syn 2.0.100", ] +[[package]] +name = "tokio-native-tls" +version = "0.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" +dependencies = [ + "native-tls", + "tokio", +] + [[package]] name = "tokio-rustls" version = "0.26.2" @@ -3773,6 +3973,24 @@ dependencies = [ "tower-service", ] +[[package]] +name = "tower-http" +version = "0.6.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" +dependencies = [ + "bitflags", + "bytes", + "futures-util", + "http 1.3.1", + "http-body 1.0.1", + "iri-string", + "pin-project-lite", + "tower", + "tower-layer", + "tower-service", +] + [[package]] name = "tower-layer" version = "0.3.3" @@ -3945,6 +4163,17 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b6c140620e7ffbb22c2dee59cafe6084a59b5ffc27a8859a5f0d494b5d52b6be" +[[package]] +name = "uuid" +version = "1.18.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" +dependencies = [ + "getrandom 0.3.2", + "js-sys", + "wasm-bindgen", +] + [[package]] name = "valuable" version = "0.1.1" @@ -3957,6 +4186,12 @@ version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5" +[[package]] +name = "vcpkg" +version = "0.2.15" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" + [[package]] name = "version_check" version = "0.9.5" @@ -4106,6 +4341,15 @@ dependencies = [ "rustls-pki-types", ] +[[package]] +name = "webpki-roots" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "32b130c0d2d49f8b6889abc456e795e82525204f27c42cf767cf0d7734e089b8" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "which" version = "4.4.2" @@ -4165,7 +4409,7 @@ dependencies = [ "windows-interface", "windows-link", "windows-result", - "windows-strings 0.4.0", + "windows-strings", ] [[package]] @@ -4192,44 +4436,35 @@ dependencies = [ [[package]] name = "windows-link" -version = "0.1.1" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76840935b766e1b0a05c0066835fb9ec80071d4c09a16f6bd5f7e655e3c14c38" +checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" [[package]] name = "windows-registry" -version = "0.4.0" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4286ad90ddb45071efd1a66dfa43eb02dd0dfbae1545ad6cc3c51cf34d7e8ba3" +checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" dependencies = [ + "windows-link", "windows-result", - "windows-strings 0.3.1", - "windows-targets 0.53.0", + "windows-strings", ] [[package]] name = "windows-result" -version = "0.3.2" +version = "0.3.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c64fd11a4fd95df68efcfee5f44a294fe71b8bc6a91993e2791938abcc712252" +checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" dependencies = [ "windows-link", ] [[package]] name = "windows-strings" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87fa48cc5d406560701792be122a10132491cff9d0aeb23583cc2dcafc847319" -dependencies = [ - "windows-link", -] - -[[package]] -name = "windows-strings" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7a2ba9642430ee452d5a7aa78d72907ebe8cfda358e8cb7918a2050581322f97" +checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" dependencies = [ "windows-link", ] @@ -4285,29 +4520,13 @@ dependencies = [ "windows_aarch64_gnullvm 0.52.6", "windows_aarch64_msvc 0.52.6", "windows_i686_gnu 0.52.6", - "windows_i686_gnullvm 0.52.6", + "windows_i686_gnullvm", "windows_i686_msvc 0.52.6", "windows_x86_64_gnu 0.52.6", "windows_x86_64_gnullvm 0.52.6", "windows_x86_64_msvc 0.52.6", ] -[[package]] -name = "windows-targets" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1e4c7e8ceaaf9cb7d7507c974735728ab453b67ef8f18febdd7c11fe59dca8b" -dependencies = [ - "windows_aarch64_gnullvm 0.53.0", - "windows_aarch64_msvc 0.53.0", - "windows_i686_gnu 0.53.0", - "windows_i686_gnullvm 0.53.0", - "windows_i686_msvc 0.53.0", - "windows_x86_64_gnu 0.53.0", - "windows_x86_64_gnullvm 0.53.0", - "windows_x86_64_msvc 0.53.0", -] - [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" @@ -4320,12 +4539,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32a4622180e7a0ec044bb555404c800bc9fd9ec262ec147edd5989ccd0c02cd3" -[[package]] -name = "windows_aarch64_gnullvm" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "86b8d5f90ddd19cb4a147a5fa63ca848db3df085e25fee3cc10b39b6eebae764" - [[package]] name = "windows_aarch64_msvc" version = "0.48.5" @@ -4338,12 +4551,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "09ec2a7bb152e2252b53fa7803150007879548bc709c039df7627cabbd05d469" -[[package]] -name = "windows_aarch64_msvc" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c7651a1f62a11b8cbd5e0d42526e55f2c99886c77e007179efff86c2b137e66c" - [[package]] name = "windows_i686_gnu" version = "0.48.5" @@ -4356,24 +4563,12 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8e9b5ad5ab802e97eb8e295ac6720e509ee4c243f69d781394014ebfe8bbfa0b" -[[package]] -name = "windows_i686_gnu" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1dc67659d35f387f5f6c479dc4e28f1d4bb90ddd1a5d3da2e5d97b42d6272c3" - [[package]] name = "windows_i686_gnullvm" version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0eee52d38c090b3caa76c563b86c3a4bd71ef1a819287c19d586d7334ae8ed66" -[[package]] -name = "windows_i686_gnullvm" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ce6ccbdedbf6d6354471319e781c0dfef054c81fbc7cf83f338a4296c0cae11" - [[package]] name = "windows_i686_msvc" version = "0.48.5" @@ -4386,12 +4581,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "240948bc05c5e7c6dabba28bf89d89ffce3e303022809e73deaefe4f6ec56c66" -[[package]] -name = "windows_i686_msvc" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "581fee95406bb13382d2f65cd4a908ca7b1e4c2f1917f143ba16efe98a589b5d" - [[package]] name = "windows_x86_64_gnu" version = "0.48.5" @@ -4404,12 +4593,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "147a5c80aabfbf0c7d901cb5895d1de30ef2907eb21fbbab29ca94c5b08b1a78" -[[package]] -name = "windows_x86_64_gnu" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2e55b5ac9ea33f2fc1716d1742db15574fd6fc8dadc51caab1c16a3d3b4190ba" - [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" @@ -4422,12 +4605,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "24d5b23dc417412679681396f2b49f3de8c1473deb516bd34410872eff51ed0d" -[[package]] -name = "windows_x86_64_gnullvm" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a6e035dd0599267ce1ee132e51c27dd29437f63325753051e71dd9e42406c57" - [[package]] name = "windows_x86_64_msvc" version = "0.48.5" @@ -4440,12 +4617,6 @@ version = "0.52.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" -[[package]] -name = "windows_x86_64_msvc" -version = "0.53.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" - [[package]] name = "winreg" version = "0.50.0" diff --git a/crates/datadog-serverless-compat/src/main.rs b/crates/datadog-serverless-compat/src/main.rs index 7f4681e..3d1f9fc 100644 --- a/crates/datadog-serverless-compat/src/main.rs +++ b/crates/datadog-serverless-compat/src/main.rs @@ -17,10 +17,12 @@ use tracing_subscriber::EnvFilter; use zstd::zstd_safe::CompressionLevel; use datadog_trace_agent::{ - aggregator::TraceAggregator, + trace_aggregator::TraceAggregator, config, env_verifier, mini_agent, stats_flusher, stats_processor, trace_flusher::{self, TraceFlusher}, trace_processor, + proxy_aggregator, + proxy_flusher, }; use datadog_trace_utils::{config_utils::read_cloud_env, trace_utils::EnvironmentType}; @@ -120,6 +122,12 @@ pub async fn main() { Arc::clone(&config), )); + let proxy_aggregator = Arc::new(TokioMutex::new(proxy_aggregator::ProxyAggregator::default())); + let proxy_flusher = Arc::new(proxy_flusher::ProxyFlusher::new( + proxy_aggregator, + Arc::clone(&config), + )); + let mini_agent = Box::new(mini_agent::MiniAgent { config: Arc::clone(&config), env_verifier, @@ -127,6 +135,7 @@ pub async fn main() { trace_flusher, stats_processor, stats_flusher, + proxy_flusher, }); tokio::spawn(async move { diff --git a/crates/datadog-trace-agent/Cargo.toml b/crates/datadog-trace-agent/Cargo.toml index 19cb164..4cb04b7 100644 --- a/crates/datadog-trace-agent/Cargo.toml +++ b/crates/datadog-trace-agent/Cargo.toml @@ -23,6 +23,7 @@ datadog-trace-protobuf = { git = "https://github.com/DataDog/libdatadog/", rev = datadog-trace-utils = { git = "https://github.com/DataDog/libdatadog/", rev = "4eb2b8673354f974591c61bab3f7d485b4c119e0", features = ["mini_agent"] } datadog-trace-normalization = { git = "https://github.com/DataDog/libdatadog/", rev = "4eb2b8673354f974591c61bab3f7d485b4c119e0" } datadog-trace-obfuscation = { git = "https://github.com/DataDog/libdatadog/", rev = "4eb2b8673354f974591c61bab3f7d485b4c119e0" } +datadog-fips = { path = "../datadog-fips", default-features = false } reqwest = { version = "0.12.23", features = ["json"] } bytes = "1.10.1" diff --git a/crates/datadog-trace-agent/src/config.rs b/crates/datadog-trace-agent/src/config.rs index 3145925..4bb40cd 100644 --- a/crates/datadog-trace-agent/src/config.rs +++ b/crates/datadog-trace-agent/src/config.rs @@ -11,7 +11,7 @@ use std::sync::OnceLock; use datadog_trace_obfuscation::obfuscation_config; use datadog_trace_utils::config_utils::{ read_cloud_env, trace_intake_url, trace_intake_url_prefixed, trace_stats_url, - trace_stats_url_prefixed, + trace_stats_url_prefixed }; use datadog_trace_utils::trace_utils; @@ -86,6 +86,9 @@ pub struct Config { pub trace_flush_interval: u64, pub trace_intake: Endpoint, pub trace_stats_intake: Endpoint, + /// how often to flush proxy requests, in seconds + pub proxy_flush_interval: u64, + pub proxy_intake: Endpoint, /// timeout for environment verification, in milliseconds pub verify_env_timeout: u64, pub proxy_url: Option, @@ -111,6 +114,7 @@ impl Config { // trace stats to) let mut trace_intake_url = trace_intake_url(&dd_site); let mut trace_stats_intake_url = trace_stats_url(&dd_site); + let proxy_intake_url = format!("https://intake.profile.{}/api/v2/profile", dd_site); // DD_APM_DD_URL env var will primarily be used for integration tests // overrides the entire trace/trace stats intake url prefix @@ -139,6 +143,7 @@ impl Config { max_request_content_length: 10 * 1024 * 1024, // 10MB in Bytes trace_flush_interval: 3, stats_flush_interval: 3, + proxy_flush_interval: 3, verify_env_timeout: 100, dd_dogstatsd_port, dd_site, @@ -149,6 +154,11 @@ impl Config { }, trace_stats_intake: Endpoint { url: hyper::Uri::from_str(&trace_stats_intake_url).unwrap(), + api_key: Some(api_key.clone()), + ..Default::default() + }, + proxy_intake: Endpoint { + url: hyper::Uri::from_str(&proxy_intake_url).unwrap(), api_key: Some(api_key), ..Default::default() }, diff --git a/crates/datadog-trace-agent/src/http_utils.rs b/crates/datadog-trace-agent/src/http_utils.rs index 7f6b940..c1d8cd9 100644 --- a/crates/datadog-trace-agent/src/http_utils.rs +++ b/crates/datadog-trace-agent/src/http_utils.rs @@ -9,6 +9,9 @@ use hyper::{ }; use serde_json::json; use tracing::{debug, error}; +use datadog_fips::reqwest_adapter::create_reqwest_client_builder; +use core::time::Duration; +use std::error::Error; /// Does two things: /// 1. Logs the given message. A success status code (within 200-299) will cause an info log to be @@ -111,6 +114,19 @@ pub fn verify_request_content_length( None } +/// Builds a reqwest client with optional proxy configuration and timeout. +/// Uses FIPS-compliant TLS when the fips feature is enabled. +pub fn build_client( + proxy_url: Option<&str>, + timeout: Duration, +) -> Result> { + let mut builder = create_reqwest_client_builder()?.timeout(timeout); + if let Some(proxy) = proxy_url { + builder = builder.proxy(reqwest::Proxy::all(proxy)?); + } + Ok(builder.build()?) +} + #[cfg(test)] mod tests { use ddcommon::hyper_migration; diff --git a/crates/datadog-trace-agent/src/lib.rs b/crates/datadog-trace-agent/src/lib.rs index 5629d56..bee80dd 100644 --- a/crates/datadog-trace-agent/src/lib.rs +++ b/crates/datadog-trace-agent/src/lib.rs @@ -7,12 +7,13 @@ #![cfg_attr(not(test), deny(clippy::todo))] #![cfg_attr(not(test), deny(clippy::unimplemented))] -pub mod aggregator; +pub mod trace_aggregator; pub mod config; pub mod env_verifier; pub mod http_utils; pub mod mini_agent; pub mod proxy_aggregator; +pub mod proxy_flusher; pub mod stats_flusher; pub mod stats_processor; pub mod trace_flusher; diff --git a/crates/datadog-trace-agent/src/mini_agent.rs b/crates/datadog-trace-agent/src/mini_agent.rs index f950c82..8469877 100644 --- a/crates/datadog-trace-agent/src/mini_agent.rs +++ b/crates/datadog-trace-agent/src/mini_agent.rs @@ -10,14 +10,11 @@ use std::io; use std::net::SocketAddr; use std::sync::Arc; use std::time::Instant; -use tokio::sync::{ - Mutex, - mpsc::{self, Receiver, Sender}, -}; +use tokio::sync::mpsc::{self, Receiver, Sender}; use tracing::{debug, error}; use crate::http_utils::{self, log_and_create_http_response}; -use crate::{config, env_verifier, stats_flusher, stats_processor, trace_flusher, trace_processor, proxy_aggregator::{self, ProxyRequest}}; +use crate::{config, env_verifier, stats_flusher, stats_processor, trace_flusher, trace_processor, proxy_aggregator::ProxyRequest, proxy_flusher}; use datadog_trace_protobuf::pb; use datadog_trace_utils::trace_utils; use datadog_trace_utils::trace_utils::SendData; @@ -29,6 +26,7 @@ const INFO_ENDPOINT_PATH: &str = "/info"; const PROFILING_ENDPOINT_PATH: &str = "/profiling/v1/input"; const TRACER_PAYLOAD_CHANNEL_BUFFER_SIZE: usize = 10; const STATS_PAYLOAD_CHANNEL_BUFFER_SIZE: usize = 10; +const PROXY_PAYLOAD_CHANNEL_BUFFER_SIZE: usize = 10; pub struct MiniAgent { pub config: Arc, @@ -37,7 +35,7 @@ pub struct MiniAgent { pub stats_processor: Arc, pub stats_flusher: Arc, pub env_verifier: Arc, - pub proxy_aggregator: Arc>, + pub proxy_flusher: Arc, } impl MiniAgent { @@ -89,18 +87,23 @@ impl MiniAgent { .start_stats_flusher(stats_config, stats_rx) .await; }); + // channels to send processed profiling requests to our proxy flusher. + let (proxy_tx, proxy_rx): ( + Sender, + Receiver, + ) = mpsc::channel(PROXY_PAYLOAD_CHANNEL_BUFFER_SIZE); // start our proxy flusher for profiling requests - let proxy_aggregator_for_flusher = self.proxy_aggregator.clone(); + let proxy_flusher = self.proxy_flusher.clone(); tokio::spawn(async move { - let mut interval = tokio::time::interval(tokio::time::Duration::from_secs(10)); + let proxy_flusher = proxy_flusher.clone(); + proxy_flusher.start_proxy_flusher(proxy_rx).await; }); // setup our hyper http server, where the endpoint_handler handles incoming requests let trace_processor = self.trace_processor.clone(); let stats_processor = self.stats_processor.clone(); let endpoint_config = self.config.clone(); - let proxy_aggregator = self.proxy_aggregator.clone(); let service = service_fn(move |req| { let trace_processor = trace_processor.clone(); @@ -111,7 +114,8 @@ impl MiniAgent { let endpoint_config = endpoint_config.clone(); let mini_agent_metadata = Arc::clone(&mini_agent_metadata); - let proxy_aggregator = proxy_aggregator.clone(); + + let proxy_tx = proxy_tx.clone(); MiniAgent::trace_endpoint_handler( endpoint_config.clone(), @@ -121,7 +125,7 @@ impl MiniAgent { stats_processor.clone(), stats_tx.clone(), Arc::clone(&mini_agent_metadata), - proxy_aggregator.clone(), + proxy_tx.clone(), ) }); @@ -185,7 +189,7 @@ impl MiniAgent { stats_processor: Arc, stats_tx: Sender, mini_agent_metadata: Arc, - proxy_aggregator: Arc>, + proxy_tx: Sender, ) -> http::Result { match (req.method(), req.uri().path()) { (&Method::PUT | &Method::POST, TRACE_ENDPOINT_PATH) => { @@ -210,7 +214,7 @@ impl MiniAgent { } } (&Method::POST, PROFILING_ENDPOINT_PATH) => { - match Self::profiling_proxy_handler(config, req, proxy_aggregator).await { + match Self::profiling_proxy_handler(config, req, proxy_tx).await { Ok(res) => Ok(res), Err(err) => log_and_create_http_response( &format!("Error processing profiling request: {err}"), @@ -236,8 +240,8 @@ impl MiniAgent { async fn profiling_proxy_handler( config: Arc, request: hyper_migration::HttpRequest, - proxy_aggregator: Arc>, - ) -> Result> { + proxy_tx: Sender, + ) -> http::Result { debug!("Trace Agent | Proxied request for profiling"); // Extract headers and body @@ -247,25 +251,37 @@ impl MiniAgent { config.max_request_content_length, "Error processing profiling request", ) { - return response.map_err(|e| Box::new(e) as Box); + return response; } - let body_bytes = body.collect().await?.to_bytes(); + let body_bytes = match body.collect().await { + Ok(collected) => collected.to_bytes(), + Err(e) => { + return log_and_create_http_response( + &format!("Error reading profiling request body: {e}"), + StatusCode::BAD_REQUEST, + ); + } + }; // Create proxy request let proxy_request = ProxyRequest { headers: parts.headers, body: body_bytes, - target_url: format!("https://intake.profile.{}/api/v2/profile", config.dd_site), + target_url: config.proxy_intake.url.to_string(), }; - let mut proxy_aggregator = proxy_aggregator.lock().await; - proxy_aggregator.add(proxy_request); - - Response::builder() - .status(200) - .body(hyper_migration::Body::from("Acknowledged profiling request")) - .map_err(|e| Box::new(e) as Box) + // Send to channel - flusher will aggregate and send + match proxy_tx.send(proxy_request).await { + Ok(_) => log_and_create_http_response( + "Successfully buffered profiling request to be flushed", + StatusCode::OK, + ), + Err(err) => log_and_create_http_response( + &format!("Error sending profiling request to the proxy flusher: {err}"), + StatusCode::INTERNAL_SERVER_ERROR, + ), + } } fn info_handler(dd_dogstatsd_port: u16) -> http::Result { diff --git a/crates/datadog-trace-agent/src/proxy_aggregator.rs b/crates/datadog-trace-agent/src/proxy_aggregator.rs index bc6865e..e50ed17 100644 --- a/crates/datadog-trace-agent/src/proxy_aggregator.rs +++ b/crates/datadog-trace-agent/src/proxy_aggregator.rs @@ -1,6 +1,7 @@ -use bytes::Bytes; // TODO: Do we use bytes? -use reqwest::header::HeaderMap; // TODO: Do we use reqwest? +use bytes::Bytes; +use reqwest::header::HeaderMap; +#[derive(Clone)] pub struct ProxyRequest { pub headers: HeaderMap, pub body: Bytes, @@ -8,19 +9,19 @@ pub struct ProxyRequest { } /// Takes in individual proxy requests and aggregates them into batches to be flushed to Datadog. -pub struct Aggregator { +pub struct ProxyAggregator { queue: Vec, } -impl Default for Aggregator { +impl Default for ProxyAggregator { fn default() -> Self { - Aggregator { + ProxyAggregator { queue: Vec::with_capacity(128), // arbitrary capacity for request queue } } } -impl Aggregator { +impl ProxyAggregator { /// Takes in an individual proxy request. pub fn add(&mut self, request: ProxyRequest) { self.queue.push(request); diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs new file mode 100644 index 0000000..94c0255 --- /dev/null +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -0,0 +1,240 @@ +// Copyright 2023-Present Datadog, Inc. https://www.datadoghq.com/ +// SPDX-License-Identifier: Apache-2.0 +use std::{sync::Arc, time}; +use tokio::sync::{Mutex, OnceCell, mpsc::Receiver}; +use tracing::{debug, error}; +use reqwest::header::HeaderMap; + +use crate::config::Config; +use crate::proxy_aggregator::{ProxyAggregator, ProxyRequest}; +use crate::http_utils::build_client; +use core::time::Duration; + +pub struct ProxyFlusher { + // Starts a proxy flusher that listens for proxy payloads + pub aggregator: Arc>, + pub config: Arc, + client: reqwest::Client, + headers: OnceCell, +} + +impl ProxyFlusher { + pub fn new(aggregator: Arc>, config: Arc) -> Self { + // let client = (|| -> Result> { + // let mut builder = create_reqwest_client_builder()?.timeout(Duration::from_secs(30)); + // if let Some(proxy) = &config.proxy_url { + // builder = builder.proxy(reqwest::Proxy::all(proxy)?); + // } + // Ok(builder.build()?) + // })() + // .unwrap_or_else(|e| { + // error!("Failed to create HTTP client: {}, using default", e); + // reqwest::Client::new() + // }); + let client = build_client(config.proxy_url.as_deref(), Duration::from_secs(30)) + .unwrap_or_else(|e| { + error!( + "Unable to parse proxy configuration: {}, no proxy will be used", + e + ); + reqwest::Client::new() + }); + ProxyFlusher { aggregator, config, client, headers: OnceCell::new() } + } + + async fn get_headers(&self, api_key: &str) -> &HeaderMap { + self.headers + .get_or_init(move || async move { + let mut headers = HeaderMap::new(); + headers.insert( + "DD-API-KEY", + api_key.parse().expect("Failed to parse API key header"), + ); + headers + }) + .await + } + + pub async fn start_proxy_flusher(&self, mut rx: Receiver) { + let aggregator = Arc::clone(&self.aggregator); + tokio::spawn(async move { + while let Some(proxy_payload) = rx.recv().await { + let mut guard = aggregator.lock().await; + guard.add(proxy_payload); + } + }); + + loop { + tokio::time::sleep(time::Duration::from_secs(self.config.proxy_flush_interval)).await; + self.flush(None).await; + } + } + + /// Flushes proxy requests by getting every available batch on the aggregator. + /// If `failed_requests` is provided, it will attempt to send those instead of fetching new requests. + /// Returns any requests that failed to send and should be retried. + async fn flush(&self, failed_requests: Option>) -> Option> { + let mut failed_batch: Option> = None; + + if let Some(requests) = failed_requests { + // If we have requests from a previous failed attempt, try to send those first + if !requests.is_empty() { + debug!("Proxy Flusher | Retrying {} failed requests", requests.len()); + let retry_result = self.send_requests(requests).await; + if retry_result.is_some() { + // Still failed, return to retry later + return retry_result; + } + } + } + + // Process new requests from the aggregator + let mut guard = self.aggregator.lock().await; + let mut requests = guard.get_batch(); + while !requests.is_empty() { + if let Some(failed) = self.send_requests(requests).await { + // Keep track of the failed batch + failed_batch = Some(failed); + // Stop processing more batches if we have a failure + break; + } + + requests = guard.get_batch(); + } + failed_batch + } + + // If we have requests from a previous failed attempt, try to send those first + // if failed_requests.as_ref().is_some_and(|r| !r.is_empty()) { + // let retries = failed_requests.unwrap_or_default(); + // debug!("Proxy Flusher | Retrying {} failed requests", retries.len()); + // requests = retries; + // } else { + // let mut aggregator = self.aggregator.lock().await; + // for pr in aggregator.get_batch() { + // requests.push(self.create_request(pr, self.config.proxy_intake.api_key.as_ref().unwrap()).await); + // } + // for request in requests { + // if let Some(failed) = Self::send_request(request).await { + // failed_batch = Some(failed); + // // Put requests back into the aggregator? + // break; + // } + // } + // } + // failed_batch + + async fn create_request( + &self, + request: ProxyRequest, + api_key: &str, + ) -> reqwest::RequestBuilder { + let mut headers = request.headers.clone(); + + // Remove headers that are not needed for the proxy request + headers.remove("host"); + headers.remove("content-length"); + + headers.extend(self.get_headers(api_key).await.clone()); + + // TODO: Figure out what client to use / how data should be sent + self.client + .post(&request.target_url) + .headers(headers) + .timeout(std::time::Duration::from_secs(30)) + .body(request.body) + } + + async fn send_requests(&self, requests: Vec) -> Option> { + if requests.is_empty() { + return None; + } + debug!("Proxy Flusher | Attempting to send {} requests", requests.len()); + + let mut failed_requests = Vec::new(); + + for request_payload in requests { + // Clone the payload before creating the request builder (which consumes body) + let cloned_payload = request_payload.clone(); + let request = self.create_request(request_payload, self.config.proxy_intake.api_key.as_ref().unwrap()).await; + let time = std::time::Instant::now(); + match request.send().await { + Ok(r) => { + let elapsed = time.elapsed(); + let url = r.url().to_string(); + let status = r.status(); + let body = r.text().await; + if status == 202 || status == 200 { + debug!("Proxy Flusher | Successfully sent request {url} in {} ms", elapsed.as_millis()); + } else { + error!("Proxy Flusher | Request failed with status {status}: {body:?}"); + failed_requests.push(cloned_payload); + } + } + Err(e) => { + error!("Proxy Flusher | Failed to send request: {e:?}"); + failed_requests.push(cloned_payload); + } + } + } + + if failed_requests.is_empty() { + None + } else { + Some(failed_requests) + } + } + + // /// Given a `reqwest::RequestBuilder`, send the request and handle retries. + // async fn send_request(request: reqwest::RequestBuilder) -> Result<(), Box> { + // debug!("Proxy Flusher | Attempting to send request"); + // let mut attempts = 0; + + // loop { + // attempts += 1; + + // let Some(cloned_request) = request.try_clone() else { + // return Err(Box::new(std::io::Error::new( + // std::io::ErrorKind::Other, + // "can't clone proxy request", + // ))); + // }; + + // let time = std::time::Instant::now(); + // let response = cloned_request.send().await; + // let elapsed = time.elapsed(); + + // match response { + // Ok(r) => { + // let url = r.url().to_string(); + // let status = r.status(); + // let body = r.text().await; + // if status == 202 || status == 200 { + // debug!( + // "Proxy Flusher | Successfully sent request in {} ms to {url}", + // elapsed.as_millis() + // ); + // } else { + // error!("Proxy Flusher | Request failed with status {status}: {body:?}"); + // } + + // return Ok(()); + // } + // Err(e) => { + // if attempts >= 3 { + // error!( + // "Proxy Flusher | Failed to send request after {} attempts: {:?}", + // attempts, e + // ); + + // return Err(Box::new(FailedProxyRequestError { + // request, + // message: e.to_string(), + // })); + // } + // } + // } + // } + // } +} + diff --git a/crates/datadog-trace-agent/src/aggregator.rs b/crates/datadog-trace-agent/src/trace_aggregator.rs similarity index 100% rename from crates/datadog-trace-agent/src/aggregator.rs rename to crates/datadog-trace-agent/src/trace_aggregator.rs diff --git a/crates/datadog-trace-agent/src/trace_flusher.rs b/crates/datadog-trace-agent/src/trace_flusher.rs index b33be20..512985e 100644 --- a/crates/datadog-trace-agent/src/trace_flusher.rs +++ b/crates/datadog-trace-agent/src/trace_flusher.rs @@ -9,7 +9,7 @@ use tracing::{debug, error}; use datadog_trace_utils::trace_utils; use datadog_trace_utils::trace_utils::SendData; -use crate::aggregator::TraceAggregator; +use crate::trace_aggregator::TraceAggregator; use crate::config::Config; #[async_trait] diff --git a/crates/datadog-trace-agent/src/trace_processor.rs b/crates/datadog-trace-agent/src/trace_processor.rs index dbdf65d..c6d8176 100644 --- a/crates/datadog-trace-agent/src/trace_processor.rs +++ b/crates/datadog-trace-agent/src/trace_processor.rs @@ -202,6 +202,12 @@ mod tests { os: "linux".to_string(), obfuscation_config: ObfuscationConfig::new().unwrap(), proxy_url: None, + proxy_flush_interval: 3, + proxy_intake: Endpoint { + url: hyper::Uri::from_static("https://proxy.agent.notdog.com/proxy"), + api_key: Some("dummy_api_key".into()), + ..Default::default() + }, tags: Tags::from_env_string("env:test,service:my-service"), } } From 601c893b26da50562a3e40c815971f12c015b901 Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Thu, 16 Oct 2025 17:11:03 -0400 Subject: [PATCH 03/23] Remove aggregator logic, just send payload through channel and then handle forwarding and retries in background task --- crates/datadog-serverless-compat/src/main.rs | 3 - crates/datadog-trace-agent/src/config.rs | 2 +- crates/datadog-trace-agent/src/lib.rs | 1 - crates/datadog-trace-agent/src/mini_agent.rs | 22 +- .../src/proxy_aggregator.rs | 39 ---- .../datadog-trace-agent/src/proxy_flusher.rs | 215 ++++-------------- 6 files changed, 51 insertions(+), 231 deletions(-) delete mode 100644 crates/datadog-trace-agent/src/proxy_aggregator.rs diff --git a/crates/datadog-serverless-compat/src/main.rs b/crates/datadog-serverless-compat/src/main.rs index 3d1f9fc..735911c 100644 --- a/crates/datadog-serverless-compat/src/main.rs +++ b/crates/datadog-serverless-compat/src/main.rs @@ -21,7 +21,6 @@ use datadog_trace_agent::{ config, env_verifier, mini_agent, stats_flusher, stats_processor, trace_flusher::{self, TraceFlusher}, trace_processor, - proxy_aggregator, proxy_flusher, }; @@ -122,9 +121,7 @@ pub async fn main() { Arc::clone(&config), )); - let proxy_aggregator = Arc::new(TokioMutex::new(proxy_aggregator::ProxyAggregator::default())); let proxy_flusher = Arc::new(proxy_flusher::ProxyFlusher::new( - proxy_aggregator, Arc::clone(&config), )); diff --git a/crates/datadog-trace-agent/src/config.rs b/crates/datadog-trace-agent/src/config.rs index 4bb40cd..24e5700 100644 --- a/crates/datadog-trace-agent/src/config.rs +++ b/crates/datadog-trace-agent/src/config.rs @@ -11,7 +11,7 @@ use std::sync::OnceLock; use datadog_trace_obfuscation::obfuscation_config; use datadog_trace_utils::config_utils::{ read_cloud_env, trace_intake_url, trace_intake_url_prefixed, trace_stats_url, - trace_stats_url_prefixed + trace_stats_url_prefixed, }; use datadog_trace_utils::trace_utils; diff --git a/crates/datadog-trace-agent/src/lib.rs b/crates/datadog-trace-agent/src/lib.rs index bee80dd..68027c4 100644 --- a/crates/datadog-trace-agent/src/lib.rs +++ b/crates/datadog-trace-agent/src/lib.rs @@ -12,7 +12,6 @@ pub mod config; pub mod env_verifier; pub mod http_utils; pub mod mini_agent; -pub mod proxy_aggregator; pub mod proxy_flusher; pub mod stats_flusher; pub mod stats_processor; diff --git a/crates/datadog-trace-agent/src/mini_agent.rs b/crates/datadog-trace-agent/src/mini_agent.rs index 8469877..8e6fe25 100644 --- a/crates/datadog-trace-agent/src/mini_agent.rs +++ b/crates/datadog-trace-agent/src/mini_agent.rs @@ -13,8 +13,9 @@ use std::time::Instant; use tokio::sync::mpsc::{self, Receiver, Sender}; use tracing::{debug, error}; -use crate::http_utils::{self, log_and_create_http_response}; -use crate::{config, env_verifier, stats_flusher, stats_processor, trace_flusher, trace_processor, proxy_aggregator::ProxyRequest, proxy_flusher}; +use crate::http_utils::{log_and_create_http_response}; +use crate::proxy_flusher::{ProxyRequest, ProxyFlusher}; +use crate::{config, env_verifier, stats_flusher, stats_processor, trace_flusher, trace_processor}; use datadog_trace_protobuf::pb; use datadog_trace_utils::trace_utils; use datadog_trace_utils::trace_utils::SendData; @@ -35,14 +36,14 @@ pub struct MiniAgent { pub stats_processor: Arc, pub stats_flusher: Arc, pub env_verifier: Arc, - pub proxy_flusher: Arc, + pub proxy_flusher: Arc, } impl MiniAgent { pub async fn start_mini_agent(&self) -> Result<(), Box> { let now = Instant::now(); - // verify we are in a google cloud funtion environment. if not, shut down the mini agent. + // verify we are in a google cloud function environment. if not, shut down the mini agent. let mini_agent_metadata = Arc::new( self.env_verifier .verify_environment( @@ -87,7 +88,8 @@ impl MiniAgent { .start_stats_flusher(stats_config, stats_rx) .await; }); - // channels to send processed profiling requests to our proxy flusher. + + // channels to send processed profiling requests to our proxy flusher let (proxy_tx, proxy_rx): ( Sender, Receiver, @@ -237,6 +239,7 @@ impl MiniAgent { } } + /// Handles incoming proxy requests for profiling - can be abstracted into a generic proxy handler for other proxy requests in the future async fn profiling_proxy_handler( config: Arc, request: hyper_migration::HttpRequest, @@ -246,13 +249,6 @@ impl MiniAgent { // Extract headers and body let (parts, body) = request.into_parts(); - if let Some(response) = http_utils::verify_request_content_length( - &parts.headers, - config.max_request_content_length, - "Error processing profiling request", - ) { - return response; - } let body_bytes = match body.collect().await { Ok(collected) => collected.to_bytes(), @@ -271,7 +267,7 @@ impl MiniAgent { target_url: config.proxy_intake.url.to_string(), }; - // Send to channel - flusher will aggregate and send + // Send to channel match proxy_tx.send(proxy_request).await { Ok(_) => log_and_create_http_response( "Successfully buffered profiling request to be flushed", diff --git a/crates/datadog-trace-agent/src/proxy_aggregator.rs b/crates/datadog-trace-agent/src/proxy_aggregator.rs deleted file mode 100644 index e50ed17..0000000 --- a/crates/datadog-trace-agent/src/proxy_aggregator.rs +++ /dev/null @@ -1,39 +0,0 @@ -use bytes::Bytes; -use reqwest::header::HeaderMap; - -#[derive(Clone)] -pub struct ProxyRequest { - pub headers: HeaderMap, - pub body: Bytes, - pub target_url: String, -} - -/// Takes in individual proxy requests and aggregates them into batches to be flushed to Datadog. -pub struct ProxyAggregator { - queue: Vec, -} - -impl Default for ProxyAggregator { - fn default() -> Self { - ProxyAggregator { - queue: Vec::with_capacity(128), // arbitrary capacity for request queue - } - } -} - -impl ProxyAggregator { - /// Takes in an individual proxy request. - pub fn add(&mut self, request: ProxyRequest) { - self.queue.push(request); - } - - /// Returns a batch of proxy requests. - pub fn get_batch(&mut self) -> Vec { - std::mem::take(&mut self.queue) - } - - /// Flush the queue. - pub fn clear(&mut self) { - self.queue.clear(); - } -} \ No newline at end of file diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index 94c0255..fe2d92e 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -1,36 +1,31 @@ // Copyright 2023-Present Datadog, Inc. https://www.datadoghq.com/ // SPDX-License-Identifier: Apache-2.0 -use std::{sync::Arc, time}; -use tokio::sync::{Mutex, OnceCell, mpsc::Receiver}; + +use bytes::Bytes; + +use std::{sync::Arc}; +use tokio::sync::{mpsc::Receiver}; use tracing::{debug, error}; use reqwest::header::HeaderMap; use crate::config::Config; -use crate::proxy_aggregator::{ProxyAggregator, ProxyRequest}; use crate::http_utils::build_client; use core::time::Duration; +pub struct ProxyRequest { + pub headers: HeaderMap, + pub body: Bytes, + pub target_url: String, +} + pub struct ProxyFlusher { - // Starts a proxy flusher that listens for proxy payloads - pub aggregator: Arc>, + /// Handles forwarding proxy requests to Datadog with retry logic pub config: Arc, client: reqwest::Client, - headers: OnceCell, } impl ProxyFlusher { - pub fn new(aggregator: Arc>, config: Arc) -> Self { - // let client = (|| -> Result> { - // let mut builder = create_reqwest_client_builder()?.timeout(Duration::from_secs(30)); - // if let Some(proxy) = &config.proxy_url { - // builder = builder.proxy(reqwest::Proxy::all(proxy)?); - // } - // Ok(builder.build()?) - // })() - // .unwrap_or_else(|e| { - // error!("Failed to create HTTP client: {}, using default", e); - // reqwest::Client::new() - // }); + pub fn new(config: Arc) -> Self { let client = build_client(config.proxy_url.as_deref(), Duration::from_secs(30)) .unwrap_or_else(|e| { error!( @@ -39,94 +34,19 @@ impl ProxyFlusher { ); reqwest::Client::new() }); - ProxyFlusher { aggregator, config, client, headers: OnceCell::new() } - } - - async fn get_headers(&self, api_key: &str) -> &HeaderMap { - self.headers - .get_or_init(move || async move { - let mut headers = HeaderMap::new(); - headers.insert( - "DD-API-KEY", - api_key.parse().expect("Failed to parse API key header"), - ); - headers - }) - .await + ProxyFlusher { config, client } } + /// Starts the proxy flusher that listens for proxy payloads from the channel and forwards them to Datadog pub async fn start_proxy_flusher(&self, mut rx: Receiver) { - let aggregator = Arc::clone(&self.aggregator); - tokio::spawn(async move { - while let Some(proxy_payload) = rx.recv().await { - let mut guard = aggregator.lock().await; - guard.add(proxy_payload); - } - }); - - loop { - tokio::time::sleep(time::Duration::from_secs(self.config.proxy_flush_interval)).await; - self.flush(None).await; + while let Some(proxy_payload) = rx.recv().await { + self.send_request(proxy_payload).await; } } - /// Flushes proxy requests by getting every available batch on the aggregator. - /// If `failed_requests` is provided, it will attempt to send those instead of fetching new requests. - /// Returns any requests that failed to send and should be retried. - async fn flush(&self, failed_requests: Option>) -> Option> { - let mut failed_batch: Option> = None; - - if let Some(requests) = failed_requests { - // If we have requests from a previous failed attempt, try to send those first - if !requests.is_empty() { - debug!("Proxy Flusher | Retrying {} failed requests", requests.len()); - let retry_result = self.send_requests(requests).await; - if retry_result.is_some() { - // Still failed, return to retry later - return retry_result; - } - } - } - - // Process new requests from the aggregator - let mut guard = self.aggregator.lock().await; - let mut requests = guard.get_batch(); - while !requests.is_empty() { - if let Some(failed) = self.send_requests(requests).await { - // Keep track of the failed batch - failed_batch = Some(failed); - // Stop processing more batches if we have a failure - break; - } - - requests = guard.get_batch(); - } - failed_batch - } - - // If we have requests from a previous failed attempt, try to send those first - // if failed_requests.as_ref().is_some_and(|r| !r.is_empty()) { - // let retries = failed_requests.unwrap_or_default(); - // debug!("Proxy Flusher | Retrying {} failed requests", retries.len()); - // requests = retries; - // } else { - // let mut aggregator = self.aggregator.lock().await; - // for pr in aggregator.get_batch() { - // requests.push(self.create_request(pr, self.config.proxy_intake.api_key.as_ref().unwrap()).await); - // } - // for request in requests { - // if let Some(failed) = Self::send_request(request).await { - // failed_batch = Some(failed); - // // Put requests back into the aggregator? - // break; - // } - // } - // } - // failed_batch - async fn create_request( &self, - request: ProxyRequest, + request: &ProxyRequest, api_key: &str, ) -> reqwest::RequestBuilder { let mut headers = request.headers.clone(); @@ -135,106 +55,53 @@ impl ProxyFlusher { headers.remove("host"); headers.remove("content-length"); - headers.extend(self.get_headers(api_key).await.clone()); + // Add headers to the request + headers.insert("DD-API-KEY", api_key.parse().expect("Failed to parse API key header")); - // TODO: Figure out what client to use / how data should be sent self.client .post(&request.target_url) .headers(headers) .timeout(std::time::Duration::from_secs(30)) - .body(request.body) + .body(request.body.clone()) } - async fn send_requests(&self, requests: Vec) -> Option> { - if requests.is_empty() { - return None; - } - debug!("Proxy Flusher | Attempting to send {} requests", requests.len()); + async fn send_request(&self, request: ProxyRequest) { + const MAX_RETRIES: u32 = 3; + let mut attempts = 0; - let mut failed_requests = Vec::new(); + loop { + attempts += 1; - for request_payload in requests { - // Clone the payload before creating the request builder (which consumes body) - let cloned_payload = request_payload.clone(); - let request = self.create_request(request_payload, self.config.proxy_intake.api_key.as_ref().unwrap()).await; + let request_builder = self.create_request(&request, self.config.proxy_intake.api_key.as_ref().unwrap()).await; let time = std::time::Instant::now(); - match request.send().await { + let response = request_builder.send().await; + let elapsed = time.elapsed(); + + match response { Ok(r) => { - let elapsed = time.elapsed(); let url = r.url().to_string(); let status = r.status(); let body = r.text().await; if status == 202 || status == 200 { - debug!("Proxy Flusher | Successfully sent request {url} in {} ms", elapsed.as_millis()); + debug!("Proxy Flusher | Successfully sent request in {} ms to {url}", elapsed.as_millis()); } else { error!("Proxy Flusher | Request failed with status {status}: {body:?}"); - failed_requests.push(cloned_payload); } + return; } Err(e) => { - error!("Proxy Flusher | Failed to send request: {e:?}"); - failed_requests.push(cloned_payload); + error!("Network error (attempt {}): {:?}", attempts, e); + if attempts >= MAX_RETRIES { + error!("Proxy Flusher | Failed to send request after {} attempts: {:?}", attempts, e); + return; + }; } } + // Exponential backoff + let backoff_ms = 100 * (2_u64.pow(attempts - 1)); + tokio::time::sleep(Duration::from_millis(backoff_ms)).await; } - if failed_requests.is_empty() { - None - } else { - Some(failed_requests) - } } - - // /// Given a `reqwest::RequestBuilder`, send the request and handle retries. - // async fn send_request(request: reqwest::RequestBuilder) -> Result<(), Box> { - // debug!("Proxy Flusher | Attempting to send request"); - // let mut attempts = 0; - - // loop { - // attempts += 1; - - // let Some(cloned_request) = request.try_clone() else { - // return Err(Box::new(std::io::Error::new( - // std::io::ErrorKind::Other, - // "can't clone proxy request", - // ))); - // }; - - // let time = std::time::Instant::now(); - // let response = cloned_request.send().await; - // let elapsed = time.elapsed(); - - // match response { - // Ok(r) => { - // let url = r.url().to_string(); - // let status = r.status(); - // let body = r.text().await; - // if status == 202 || status == 200 { - // debug!( - // "Proxy Flusher | Successfully sent request in {} ms to {url}", - // elapsed.as_millis() - // ); - // } else { - // error!("Proxy Flusher | Request failed with status {status}: {body:?}"); - // } - - // return Ok(()); - // } - // Err(e) => { - // if attempts >= 3 { - // error!( - // "Proxy Flusher | Failed to send request after {} attempts: {:?}", - // attempts, e - // ); - - // return Err(Box::new(FailedProxyRequestError { - // request, - // message: e.to_string(), - // })); - // } - // } - // } - // } - // } } From 516bc94626a1163fb35ba516a7b853dfd3961a2a Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Thu, 16 Oct 2025 17:17:32 -0400 Subject: [PATCH 04/23] cleanup --- crates/datadog-serverless-compat/src/main.rs | 2 +- .../src/{trace_aggregator.rs => aggregator.rs} | 0 crates/datadog-trace-agent/src/config.rs | 4 +--- crates/datadog-trace-agent/src/lib.rs | 2 +- crates/datadog-trace-agent/src/mini_agent.rs | 1 - crates/datadog-trace-agent/src/trace_flusher.rs | 2 +- crates/datadog-trace-agent/src/trace_processor.rs | 1 - 7 files changed, 4 insertions(+), 8 deletions(-) rename crates/datadog-trace-agent/src/{trace_aggregator.rs => aggregator.rs} (100%) diff --git a/crates/datadog-serverless-compat/src/main.rs b/crates/datadog-serverless-compat/src/main.rs index 735911c..f9555af 100644 --- a/crates/datadog-serverless-compat/src/main.rs +++ b/crates/datadog-serverless-compat/src/main.rs @@ -17,7 +17,7 @@ use tracing_subscriber::EnvFilter; use zstd::zstd_safe::CompressionLevel; use datadog_trace_agent::{ - trace_aggregator::TraceAggregator, + aggregator::TraceAggregator, config, env_verifier, mini_agent, stats_flusher, stats_processor, trace_flusher::{self, TraceFlusher}, trace_processor, diff --git a/crates/datadog-trace-agent/src/trace_aggregator.rs b/crates/datadog-trace-agent/src/aggregator.rs similarity index 100% rename from crates/datadog-trace-agent/src/trace_aggregator.rs rename to crates/datadog-trace-agent/src/aggregator.rs diff --git a/crates/datadog-trace-agent/src/config.rs b/crates/datadog-trace-agent/src/config.rs index 24e5700..fa51c1f 100644 --- a/crates/datadog-trace-agent/src/config.rs +++ b/crates/datadog-trace-agent/src/config.rs @@ -86,8 +86,7 @@ pub struct Config { pub trace_flush_interval: u64, pub trace_intake: Endpoint, pub trace_stats_intake: Endpoint, - /// how often to flush proxy requests, in seconds - pub proxy_flush_interval: u64, + /// the endpoint to forward proxy requests to pub proxy_intake: Endpoint, /// timeout for environment verification, in milliseconds pub verify_env_timeout: u64, @@ -143,7 +142,6 @@ impl Config { max_request_content_length: 10 * 1024 * 1024, // 10MB in Bytes trace_flush_interval: 3, stats_flush_interval: 3, - proxy_flush_interval: 3, verify_env_timeout: 100, dd_dogstatsd_port, dd_site, diff --git a/crates/datadog-trace-agent/src/lib.rs b/crates/datadog-trace-agent/src/lib.rs index 68027c4..a87bf56 100644 --- a/crates/datadog-trace-agent/src/lib.rs +++ b/crates/datadog-trace-agent/src/lib.rs @@ -7,7 +7,7 @@ #![cfg_attr(not(test), deny(clippy::todo))] #![cfg_attr(not(test), deny(clippy::unimplemented))] -pub mod trace_aggregator; +pub mod aggregator; pub mod config; pub mod env_verifier; pub mod http_utils; diff --git a/crates/datadog-trace-agent/src/mini_agent.rs b/crates/datadog-trace-agent/src/mini_agent.rs index 8e6fe25..f89e05e 100644 --- a/crates/datadog-trace-agent/src/mini_agent.rs +++ b/crates/datadog-trace-agent/src/mini_agent.rs @@ -299,5 +299,4 @@ impl MiniAgent { .status(200) .body(hyper_migration::Body::from(response_json.to_string())) } - } diff --git a/crates/datadog-trace-agent/src/trace_flusher.rs b/crates/datadog-trace-agent/src/trace_flusher.rs index 512985e..b33be20 100644 --- a/crates/datadog-trace-agent/src/trace_flusher.rs +++ b/crates/datadog-trace-agent/src/trace_flusher.rs @@ -9,7 +9,7 @@ use tracing::{debug, error}; use datadog_trace_utils::trace_utils; use datadog_trace_utils::trace_utils::SendData; -use crate::trace_aggregator::TraceAggregator; +use crate::aggregator::TraceAggregator; use crate::config::Config; #[async_trait] diff --git a/crates/datadog-trace-agent/src/trace_processor.rs b/crates/datadog-trace-agent/src/trace_processor.rs index c6d8176..245632c 100644 --- a/crates/datadog-trace-agent/src/trace_processor.rs +++ b/crates/datadog-trace-agent/src/trace_processor.rs @@ -202,7 +202,6 @@ mod tests { os: "linux".to_string(), obfuscation_config: ObfuscationConfig::new().unwrap(), proxy_url: None, - proxy_flush_interval: 3, proxy_intake: Endpoint { url: hyper::Uri::from_static("https://proxy.agent.notdog.com/proxy"), api_key: Some("dummy_api_key".into()), From 9502707df212d744458803925a1f8f4a147dde0d Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Fri, 17 Oct 2025 10:04:57 -0400 Subject: [PATCH 05/23] Fix error handling --- crates/datadog-serverless-compat/src/main.rs | 7 +-- crates/datadog-trace-agent/src/http_utils.rs | 6 +-- crates/datadog-trace-agent/src/mini_agent.rs | 10 ++-- .../datadog-trace-agent/src/proxy_flusher.rs | 51 +++++++++++++------ 4 files changed, 44 insertions(+), 30 deletions(-) diff --git a/crates/datadog-serverless-compat/src/main.rs b/crates/datadog-serverless-compat/src/main.rs index f9555af..32278ac 100644 --- a/crates/datadog-serverless-compat/src/main.rs +++ b/crates/datadog-serverless-compat/src/main.rs @@ -18,10 +18,9 @@ use zstd::zstd_safe::CompressionLevel; use datadog_trace_agent::{ aggregator::TraceAggregator, - config, env_verifier, mini_agent, stats_flusher, stats_processor, + config, env_verifier, mini_agent, proxy_flusher, stats_flusher, stats_processor, trace_flusher::{self, TraceFlusher}, trace_processor, - proxy_flusher, }; use datadog_trace_utils::{config_utils::read_cloud_env, trace_utils::EnvironmentType}; @@ -121,9 +120,7 @@ pub async fn main() { Arc::clone(&config), )); - let proxy_flusher = Arc::new(proxy_flusher::ProxyFlusher::new( - Arc::clone(&config), - )); + let proxy_flusher = Arc::new(proxy_flusher::ProxyFlusher::new(Arc::clone(&config))); let mini_agent = Box::new(mini_agent::MiniAgent { config: Arc::clone(&config), diff --git a/crates/datadog-trace-agent/src/http_utils.rs b/crates/datadog-trace-agent/src/http_utils.rs index c1d8cd9..9accc11 100644 --- a/crates/datadog-trace-agent/src/http_utils.rs +++ b/crates/datadog-trace-agent/src/http_utils.rs @@ -1,6 +1,8 @@ // Copyright 2023-Present Datadog, Inc. https://www.datadoghq.com/ // SPDX-License-Identifier: Apache-2.0 +use core::time::Duration; +use datadog_fips::reqwest_adapter::create_reqwest_client_builder; use ddcommon::hyper_migration; use hyper::{ header, @@ -8,10 +10,8 @@ use hyper::{ Response, StatusCode, }; use serde_json::json; -use tracing::{debug, error}; -use datadog_fips::reqwest_adapter::create_reqwest_client_builder; -use core::time::Duration; use std::error::Error; +use tracing::{debug, error}; /// Does two things: /// 1. Logs the given message. A success status code (within 200-299) will cause an info log to be diff --git a/crates/datadog-trace-agent/src/mini_agent.rs b/crates/datadog-trace-agent/src/mini_agent.rs index f89e05e..2004659 100644 --- a/crates/datadog-trace-agent/src/mini_agent.rs +++ b/crates/datadog-trace-agent/src/mini_agent.rs @@ -13,8 +13,8 @@ use std::time::Instant; use tokio::sync::mpsc::{self, Receiver, Sender}; use tracing::{debug, error}; -use crate::http_utils::{log_and_create_http_response}; -use crate::proxy_flusher::{ProxyRequest, ProxyFlusher}; +use crate::http_utils::log_and_create_http_response; +use crate::proxy_flusher::{ProxyFlusher, ProxyRequest}; use crate::{config, env_verifier, stats_flusher, stats_processor, trace_flusher, trace_processor}; use datadog_trace_protobuf::pb; use datadog_trace_utils::trace_utils; @@ -90,10 +90,8 @@ impl MiniAgent { }); // channels to send processed profiling requests to our proxy flusher - let (proxy_tx, proxy_rx): ( - Sender, - Receiver, - ) = mpsc::channel(PROXY_PAYLOAD_CHANNEL_BUFFER_SIZE); + let (proxy_tx, proxy_rx): (Sender, Receiver) = + mpsc::channel(PROXY_PAYLOAD_CHANNEL_BUFFER_SIZE); // start our proxy flusher for profiling requests let proxy_flusher = self.proxy_flusher.clone(); diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index fe2d92e..6c06036 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -3,10 +3,10 @@ use bytes::Bytes; -use std::{sync::Arc}; -use tokio::sync::{mpsc::Receiver}; -use tracing::{debug, error}; use reqwest::header::HeaderMap; +use std::sync::Arc; +use tokio::sync::mpsc::Receiver; +use tracing::{debug, error}; use crate::config::Config; use crate::http_utils::build_client; @@ -19,7 +19,6 @@ pub struct ProxyRequest { } pub struct ProxyFlusher { - /// Handles forwarding proxy requests to Datadog with retry logic pub config: Arc, client: reqwest::Client, } @@ -39,8 +38,13 @@ impl ProxyFlusher { /// Starts the proxy flusher that listens for proxy payloads from the channel and forwards them to Datadog pub async fn start_proxy_flusher(&self, mut rx: Receiver) { + let Some(api_key) = self.config.proxy_intake.api_key.as_ref() else { + error!("Proxy Flusher | No API key configured, cannot start"); + return; + }; + while let Some(proxy_payload) = rx.recv().await { - self.send_request(proxy_payload).await; + self.send_request(proxy_payload, api_key).await; } } @@ -48,7 +52,7 @@ impl ProxyFlusher { &self, request: &ProxyRequest, api_key: &str, - ) -> reqwest::RequestBuilder { + ) -> Result { let mut headers = request.headers.clone(); // Remove headers that are not needed for the proxy request @@ -56,23 +60,34 @@ impl ProxyFlusher { headers.remove("content-length"); // Add headers to the request - headers.insert("DD-API-KEY", api_key.parse().expect("Failed to parse API key header")); + match api_key.parse() { + Ok(parsed_key) => headers.insert("DD-API-KEY", parsed_key), + Err(e) => return Err(format!("Failed to parse API key: {}", e)), + }; - self.client + Ok(self + .client .post(&request.target_url) .headers(headers) .timeout(std::time::Duration::from_secs(30)) - .body(request.body.clone()) + .body(request.body.clone())) } - async fn send_request(&self, request: ProxyRequest) { + async fn send_request(&self, request: ProxyRequest, api_key: &str) { const MAX_RETRIES: u32 = 3; let mut attempts = 0; loop { attempts += 1; - let request_builder = self.create_request(&request, self.config.proxy_intake.api_key.as_ref().unwrap()).await; + let request_builder = match self.create_request(&request, api_key).await { + Ok(builder) => builder, + Err(e) => { + error!("Proxy Flusher | {}", e); + return; + } + }; + let time = std::time::Instant::now(); let response = request_builder.send().await; let elapsed = time.elapsed(); @@ -83,7 +98,10 @@ impl ProxyFlusher { let status = r.status(); let body = r.text().await; if status == 202 || status == 200 { - debug!("Proxy Flusher | Successfully sent request in {} ms to {url}", elapsed.as_millis()); + debug!( + "Proxy Flusher | Successfully sent request in {} ms to {url}", + elapsed.as_millis() + ); } else { error!("Proxy Flusher | Request failed with status {status}: {body:?}"); } @@ -92,7 +110,10 @@ impl ProxyFlusher { Err(e) => { error!("Network error (attempt {}): {:?}", attempts, e); if attempts >= MAX_RETRIES { - error!("Proxy Flusher | Failed to send request after {} attempts: {:?}", attempts, e); + error!( + "Proxy Flusher | Failed to send request after {} attempts: {:?}", + attempts, e + ); return; }; } @@ -101,7 +122,5 @@ impl ProxyFlusher { let backoff_ms = 100 * (2_u64.pow(attempts - 1)); tokio::time::sleep(Duration::from_millis(backoff_ms)).await; } - - } + } } - From a38161247d50943817e0e9e7850c213b5bee9b0a Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Fri, 17 Oct 2025 10:25:14 -0400 Subject: [PATCH 06/23] Fix reqwest dependency --- Cargo.lock | 209 +------------------------- crates/datadog-trace-agent/Cargo.toml | 2 +- 2 files changed, 7 insertions(+), 204 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9a353a2..54ae1e8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -318,16 +318,6 @@ dependencies = [ "unicode-xid", ] -[[package]] -name = "core-foundation" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "core-foundation" version = "0.10.1" @@ -660,15 +650,6 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" -[[package]] -name = "encoding_rs" -version = "0.8.35" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" -dependencies = [ - "cfg-if", -] - [[package]] name = "equivalent" version = "1.0.2" @@ -749,21 +730,6 @@ version = "1.0.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3f9eec918d3f24069decb9af1554cad7c880e2da24a9afd88aca000531ab82c1" -[[package]] -name = "foreign-types" -version = "0.3.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" -dependencies = [ - "foreign-types-shared", -] - -[[package]] -name = "foreign-types-shared" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" - [[package]] name = "form_urlencoded" version = "1.2.2" @@ -1146,22 +1112,6 @@ dependencies = [ "webpki-roots 1.0.3", ] -[[package]] -name = "hyper-tls" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70206fc6890eaca9fde8a0bf71caa2ddfc9fe045ac9e5c70df101a7dbde866e0" -dependencies = [ - "bytes", - "http-body-util", - "hyper", - "hyper-util", - "native-tls", - "tokio", - "tokio-native-tls", - "tower-service", -] - [[package]] name = "hyper-util" version = "0.1.17" @@ -1181,11 +1131,9 @@ dependencies = [ "percent-encoding", "pin-project-lite", "socket2", - "system-configuration", "tokio", "tower-service", "tracing", - "windows-registry", ] [[package]] @@ -1384,7 +1332,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d7c4b02199fee7c5d21a5ae7d8cfa79a6ef5bb2fc834d6e9058e89c825efdc55" dependencies = [ "cfg-if", - "windows-link 0.2.1", + "windows-link", ] [[package]] @@ -1503,23 +1451,6 @@ version = "0.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d87ecb2933e8aeadb3e3a02b828fed80a7528047e68b4f424523a0981a3a084" -[[package]] -name = "native-tls" -version = "0.2.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87de3442987e9dbec73158d5c715e7ad9072fda936bb03d19d7fa10e00520f0e" -dependencies = [ - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework 2.11.1", - "security-framework-sys", - "tempfile", -] - [[package]] name = "nix" version = "0.29.0" @@ -1566,50 +1497,12 @@ version = "1.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "42f5e15c9953c5e4ccceeb2e7382a716482c34515315f7b03532b8b4e8393d2d" -[[package]] -name = "openssl" -version = "0.10.74" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24ad14dd45412269e1a30f52ad8f0664f0f4f4a89ee8fe28c3b3527021ebb654" -dependencies = [ - "bitflags", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.106", -] - [[package]] name = "openssl-probe" version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d05e27ee213611ffe7d6348b942e8f942b37114c00cc03cec254295a4a17852e" -[[package]] -name = "openssl-sys" -version = "0.9.110" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a9f0075ba3c21b09f8e8b2026584b1d18d49388648f2fbbf3c97ea8deced8e2" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] - [[package]] name = "ordered-float" version = "4.6.0" @@ -1645,7 +1538,7 @@ dependencies = [ "libc", "redox_syscall", "smallvec", - "windows-link 0.2.1", + "windows-link", ] [[package]] @@ -2092,7 +1985,6 @@ checksum = "9d0946410b9f7b082a427e4ef5c8ff541a88b357bc6c637c40db3a68ac70a36f" dependencies = [ "base64", "bytes", - "encoding_rs", "futures-core", "h2", "http", @@ -2100,12 +1992,9 @@ dependencies = [ "http-body-util", "hyper", "hyper-rustls", - "hyper-tls", "hyper-util", "js-sys", "log", - "mime", - "native-tls", "percent-encoding", "pin-project-lite", "quinn", @@ -2116,7 +2005,6 @@ dependencies = [ "serde_urlencoded", "sync_wrapper", "tokio", - "tokio-native-tls", "tokio-rustls", "tower", "tower-http", @@ -2230,7 +2118,7 @@ dependencies = [ "openssl-probe", "rustls-pki-types", "schannel", - "security-framework 3.5.1", + "security-framework", ] [[package]] @@ -2294,19 +2182,6 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" -[[package]] -name = "security-framework" -version = "2.11.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" -dependencies = [ - "bitflags", - "core-foundation 0.9.4", - "core-foundation-sys", - "libc", - "security-framework-sys", -] - [[package]] name = "security-framework" version = "3.5.1" @@ -2314,7 +2189,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3297343eaf830f66ede390ea39da1d462b6b0c1b000f420d0a83f898bbbe6ef" dependencies = [ "bitflags", - "core-foundation 0.10.1", + "core-foundation", "core-foundation-sys", "libc", "security-framework-sys", @@ -2574,27 +2449,6 @@ dependencies = [ "syn 2.0.106", ] -[[package]] -name = "system-configuration" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c879d448e9d986b661742763247d3693ed13609438cf3d006f51f5368a5ba6b" -dependencies = [ - "bitflags", - "core-foundation 0.9.4", - "system-configuration-sys", -] - -[[package]] -name = "system-configuration-sys" -version = "0.6.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e1d1b10ced5ca923a1fcb8d03e96b8d3268065d724548c0211415ff6ac6bac4" -dependencies = [ - "core-foundation-sys", - "libc", -] - [[package]] name = "tabwriter" version = "1.4.1" @@ -2726,16 +2580,6 @@ dependencies = [ "syn 2.0.106", ] -[[package]] -name = "tokio-native-tls" -version = "0.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbae76ab933c85776efabc971569dd6119c580d8f5d448769dec1764bf796ef2" -dependencies = [ - "native-tls", - "tokio", -] - [[package]] name = "tokio-rustls" version = "0.26.4" @@ -3017,12 +2861,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65" -[[package]] -name = "vcpkg" -version = "0.2.15" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" - [[package]] name = "version_check" version = "0.9.5" @@ -3184,47 +3022,12 @@ dependencies = [ "rustix 0.38.44", ] -[[package]] -name = "windows-link" -version = "0.1.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" - [[package]] name = "windows-link" version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f0805222e57f7521d6a62e36fa9163bc891acd422f971defe97d64e70d0a4fe5" -[[package]] -name = "windows-registry" -version = "0.5.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b8a9ed28765efc97bbc954883f4e6796c33a06546ebafacbabee9696967499e" -dependencies = [ - "windows-link 0.1.3", - "windows-result", - "windows-strings", -] - -[[package]] -name = "windows-result" -version = "0.3.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" -dependencies = [ - "windows-link 0.1.3", -] - -[[package]] -name = "windows-strings" -version = "0.4.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" -dependencies = [ - "windows-link 0.1.3", -] - [[package]] name = "windows-sys" version = "0.52.0" @@ -3258,7 +3061,7 @@ version = "0.61.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae137229bcbd6cdf0f7b80a31df61766145077ddf49416a728b02cb3921ff3fc" dependencies = [ - "windows-link 0.2.1", + "windows-link", ] [[package]] @@ -3283,7 +3086,7 @@ version = "0.53.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4945f9f551b88e0d65f3db0bc25c33b8acea4d9e41163edf90dcd0b19f9069f3" dependencies = [ - "windows-link 0.2.1", + "windows-link", "windows_aarch64_gnullvm 0.53.1", "windows_aarch64_msvc 0.53.1", "windows_i686_gnu 0.53.1", diff --git a/crates/datadog-trace-agent/Cargo.toml b/crates/datadog-trace-agent/Cargo.toml index e4d2d07..150efb3 100644 --- a/crates/datadog-trace-agent/Cargo.toml +++ b/crates/datadog-trace-agent/Cargo.toml @@ -19,7 +19,7 @@ tracing = { version = "0.1", default-features = false } serde = { version = "1.0.145", features = ["derive"] } serde_json = "1.0" datadog-fips = { path = "../datadog-fips", default-features = false } -reqwest = { version = "0.12.23", features = ["json"] } +reqwest = { version = "0.12.23", features = ["json", "http2"], default-features = false } bytes = "1.10.1" ddcommon = { git = "https://github.com/DataDog/libdatadog/", rev = "902b6a5296963b96c4faf355f6c53eec263b7568" } datadog-trace-protobuf = { git = "https://github.com/DataDog/libdatadog/", rev = "902b6a5296963b96c4faf355f6c53eec263b7568" } From 0e4572678845067d7d3de24bfd9c40a5d29c3398 Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Fri, 17 Oct 2025 13:41:26 -0400 Subject: [PATCH 07/23] Fix license --- LICENSE-3rdparty.csv | 80 +++++++++---------- crates/datadog-trace-agent/src/mini_agent.rs | 4 +- .../datadog-trace-agent/src/proxy_flusher.rs | 9 ++- 3 files changed, 47 insertions(+), 46 deletions(-) diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index aa7392d..681c6b0 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -1,5 +1,4 @@ Component,Origin,License,Copyright -addr2line,https://github.com/gimli-rs/addr2line,Apache-2.0 OR MIT,The addr2line Authors adler2,https://github.com/oyvindln/adler2,0BSD OR MIT OR Apache-2.0,"Jonas Schievink , oyvindln " ahash,https://github.com/tkaitchuck/ahash,MIT OR Apache-2.0,Tom Kaitchuck aho-corasick,https://github.com/BurntSushi/aho-corasick,Unlicense OR MIT,Andrew Gallant @@ -11,8 +10,6 @@ async-trait,https://github.com/dtolnay/async-trait,MIT OR Apache-2.0,David Tolna atomic-waker,https://github.com/smol-rs/atomic-waker,Apache-2.0 OR MIT,"Stjepan Glavina , Contributors to futures-rs" aws-lc-rs,https://github.com/aws/aws-lc-rs,ISC AND (Apache-2.0 OR ISC),AWS-LibCrypto aws-lc-sys,https://github.com/aws/aws-lc-rs,ISC AND (Apache-2.0 OR ISC) AND OpenSSL,AWS-LC -backtrace,https://github.com/rust-lang/backtrace-rs,MIT OR Apache-2.0,The Rust Project Developers -base64,https://github.com/marshallpierce/rust-base64,MIT OR Apache-2.0,"Alice Maz , Marshall Pierce " base64,https://github.com/marshallpierce/rust-base64,MIT OR Apache-2.0,Marshall Pierce bit-set,https://github.com/contain-rs/bit-set,Apache-2.0 OR MIT,Alexis Beingessner bit-vec,https://github.com/contain-rs/bit-vec,Apache-2.0 OR MIT,Alexis Beingessner @@ -26,7 +23,7 @@ cargo-platform,https://github.com/rust-lang/cargo,MIT OR Apache-2.0,The cargo-pl cargo_metadata,https://github.com/oli-obk/cargo_metadata,MIT,Oliver Schneider cc,https://github.com/rust-lang/cc-rs,MIT OR Apache-2.0,Alex Crichton cexpr,https://github.com/jethrogb/rust-cexpr,Apache-2.0 OR MIT,Jethro Beekman -cfg-if,https://github.com/alexcrichton/cfg-if,MIT OR Apache-2.0,Alex Crichton +cfg-if,https://github.com/rust-lang/cfg-if,MIT OR Apache-2.0,Alex Crichton clang-sys,https://github.com/KyleMayes/clang-sys,Apache-2.0,Kyle Mayes concurrent-queue,https://github.com/smol-rs/concurrent-queue,Apache-2.0 OR MIT,"Stjepan Glavina , Taiki Endo , John Nunley " const_format,https://github.com/rodrimati1992/const_format_crates,Zlib,rodrimati1992 @@ -51,10 +48,11 @@ digest,https://github.com/RustCrypto/traits,MIT OR Apache-2.0,RustCrypto Develop displaydoc,https://github.com/yaahc/displaydoc,MIT OR Apache-2.0,Jane Lusby either,https://github.com/rayon-rs/either,MIT OR Apache-2.0,bluss equivalent,https://github.com/indexmap-rs/equivalent,Apache-2.0 OR MIT,The equivalent Authors -errno,https://github.com/lambda-fairy/rust-errno,MIT OR Apache-2.0,Chris Wong +errno,https://github.com/lambda-fairy/rust-errno,MIT OR Apache-2.0,"Chris Wong , Dan Gohman " event-listener,https://github.com/smol-rs/event-listener,Apache-2.0 OR MIT,"Stjepan Glavina , John Nunley " event-listener-strategy,https://github.com/smol-rs/event-listener-strategy,Apache-2.0 OR MIT,John Nunley fastrand,https://github.com/smol-rs/fastrand,Apache-2.0 OR MIT,Stjepan Glavina +find-msvc-tools,https://github.com/rust-lang/cc-rs,MIT OR Apache-2.0,The find-msvc-tools Authors fixedbitset,https://github.com/petgraph/fixedbitset,MIT OR Apache-2.0,bluss flate2,https://github.com/rust-lang/flate2-rs,MIT OR Apache-2.0,"Alex Crichton , Josh Triplett " float-cmp,https://github.com/mikedilger/float-cmp,MIT,Mike Dilger @@ -72,7 +70,6 @@ futures-timer,https://github.com/async-rs/futures-timer,MIT OR Apache-2.0,Alex C futures-util,https://github.com/rust-lang/futures-rs,MIT OR Apache-2.0,The futures-util Authors generic-array,https://github.com/fizyk20/generic-array,MIT,"Bartłomiej Kamiński , Aaron Trent " getrandom,https://github.com/rust-random/getrandom,MIT OR Apache-2.0,The Rand Project Developers -gimli,https://github.com/gimli-rs/gimli,MIT OR Apache-2.0,The gimli Authors glob,https://github.com/rust-lang/glob,MIT OR Apache-2.0,The Rust Project Developers h2,https://github.com/hyperium/h2,MIT,"Carl Lerche , Sean McArthur " hashbrown,https://github.com/rust-lang/hashbrown,MIT OR Apache-2.0,Amanieu d'Antras @@ -93,31 +90,29 @@ hyper-http-proxy,https://github.com/metalbear-co/hyper-http-proxy,MIT,MetalBear hyper-rustls,https://github.com/rustls/hyper-rustls,Apache-2.0 OR ISC OR MIT,The hyper-rustls Authors hyper-util,https://github.com/hyperium/hyper-util,MIT,Sean McArthur icu_collections,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers -icu_locid,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers -icu_locid_transform,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers -icu_locid_transform_data,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers +icu_locale_core,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers icu_normalizer,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers icu_normalizer_data,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers icu_properties,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers icu_properties_data,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers icu_provider,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers -icu_provider_macros,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers idna,https://github.com/servo/rust-url,MIT OR Apache-2.0,The rust-url developers idna_adapter,https://github.com/hsivonen/idna_adapter,Apache-2.0 OR MIT,The rust-url developers indexmap,https://github.com/indexmap-rs/indexmap,Apache-2.0 OR MIT,The indexmap Authors ipnet,https://github.com/krisprice/ipnet,MIT OR Apache-2.0,Kris Price +iri-string,https://github.com/lo48576/iri-string,MIT OR Apache-2.0,YOSHIOKA Takuma itertools,https://github.com/rust-itertools/itertools,MIT OR Apache-2.0,bluss itoa,https://github.com/dtolnay/itoa,MIT OR Apache-2.0,David Tolnay jobserver,https://github.com/rust-lang/jobserver-rs,MIT OR Apache-2.0,Alex Crichton -js-sys,https://github.com/rustwasm/wasm-bindgen/tree/master/crates/js-sys,MIT OR Apache-2.0,The wasm-bindgen Developers +js-sys,https://github.com/wasm-bindgen/wasm-bindgen/tree/master/crates/js-sys,MIT OR Apache-2.0,The wasm-bindgen Developers lazy_static,https://github.com/rust-lang-nursery/lazy-static.rs,MIT OR Apache-2.0,Marvin Löbel -lazycell,https://github.com/indiv0/lazycell,MIT OR Apache-2.0,"Alex Crichton , Nikita Pekin " libc,https://github.com/rust-lang/libc,MIT OR Apache-2.0,The Rust Project Developers libloading,https://github.com/nagisa/rust_libloading,ISC,Simonas Kazlauskas linux-raw-sys,https://github.com/sunfishcode/linux-raw-sys,Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT,Dan Gohman litemap,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers lock_api,https://github.com/Amanieu/parking_lot,MIT OR Apache-2.0,Amanieu d'Antras log,https://github.com/rust-lang/log,MIT OR Apache-2.0,The Rust Project Developers +lru-slab,https://github.com/Ralith/lru-slab,MIT OR Apache-2.0 OR Zlib,Benjamin Saunders matchers,https://github.com/hawkw/matchers,MIT,Eliza Weisman memchr,https://github.com/BurntSushi/memchr,Unlicense OR MIT,"Andrew Gallant , bluss" mime,https://github.com/hyperium/mime,MIT OR Apache-2.0,Sean McArthur @@ -129,11 +124,9 @@ nix,https://github.com/nix-rust/nix,MIT,The nix-rust Project Developers nom,https://github.com/Geal/nom,MIT,contact@geoffroycouprie.com nu-ansi-term,https://github.com/nushell/nu-ansi-term,MIT,"ogham@bsago.me, Ryan Scheel (Havvy) , Josh Triplett , The Nushell Project Developers" num-traits,https://github.com/rust-num/num-traits,MIT OR Apache-2.0,The Rust Project Developers -object,https://github.com/gimli-rs/object,Apache-2.0 OR MIT,The object Authors once_cell,https://github.com/matklad/once_cell,MIT OR Apache-2.0,Aleksey Kladov openssl-probe,https://github.com/alexcrichton/openssl-probe,MIT OR Apache-2.0,Alex Crichton ordered-float,https://github.com/reem/rust-ordered-float,MIT,"Jonathan Reem , Matt Brubeck " -overload,https://github.com/danaugrs/overload,MIT,Daniel Salvadori parking,https://github.com/smol-rs/parking,Apache-2.0 OR MIT,"Stjepan Glavina , The Rust Project Developers" parking_lot,https://github.com/Amanieu/parking_lot,MIT OR Apache-2.0,Amanieu d'Antras parking_lot_core,https://github.com/Amanieu/parking_lot,MIT OR Apache-2.0,Amanieu d'Antras @@ -145,6 +138,7 @@ pin-project,https://github.com/taiki-e/pin-project,Apache-2.0 OR MIT,The pin-pro pin-project-internal,https://github.com/taiki-e/pin-project,Apache-2.0 OR MIT,The pin-project-internal Authors pin-project-lite,https://github.com/taiki-e/pin-project-lite,Apache-2.0 OR MIT,The pin-project-lite Authors pin-utils,https://github.com/rust-lang-nursery/pin-utils,MIT OR Apache-2.0,Josef Brandl +potential_utf,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers ppv-lite86,https://github.com/cryptocorrosion/cryptocorrosion,MIT OR Apache-2.0,The CryptoCorrosion Contributors prettyplease,https://github.com/dtolnay/prettyplease,MIT OR Apache-2.0,David Tolnay proc-macro-error,https://gitlab.com/CreepySkeleton/proc-macro-error,MIT OR Apache-2.0,CreepySkeleton @@ -169,25 +163,19 @@ rand_core,https://github.com/rust-random/rand,MIT OR Apache-2.0,"The Rand Projec rand_xorshift,https://github.com/rust-random/rngs,MIT OR Apache-2.0,"The Rand Project Developers, The Rust Project Developers" redox_syscall,https://gitlab.redox-os.org/redox-os/syscall,MIT,Jeremy Soller regex,https://github.com/rust-lang/regex,MIT OR Apache-2.0,"The Rust Project Developers, Andrew Gallant " -regex-automata,https://github.com/BurntSushi/regex-automata,Unlicense OR MIT,Andrew Gallant -regex-automata,https://github.com/rust-lang/regex/tree/master/regex-automata,MIT OR Apache-2.0,"The Rust Project Developers, Andrew Gallant " -regex-syntax,https://github.com/rust-lang/regex,MIT OR Apache-2.0,The Rust Project Developers -regex-syntax,https://github.com/rust-lang/regex/tree/master/regex-syntax,MIT OR Apache-2.0,"The Rust Project Developers, Andrew Gallant " +regex-automata,https://github.com/rust-lang/regex,MIT OR Apache-2.0,"The Rust Project Developers, Andrew Gallant " +regex-syntax,https://github.com/rust-lang/regex,MIT OR Apache-2.0,"The Rust Project Developers, Andrew Gallant " reqwest,https://github.com/seanmonstar/reqwest,MIT OR Apache-2.0,Sean McArthur ring,https://github.com/briansmith/ring,Apache-2.0 AND ISC,The ring Authors rmp,https://github.com/3Hren/msgpack-rust,MIT,Evgeny Safronov rmp-serde,https://github.com/3Hren/msgpack-rust,MIT,Evgeny Safronov rmpv,https://github.com/3Hren/msgpack-rust,MIT,Evgeny Safronov -rustc-demangle,https://github.com/rust-lang/rustc-demangle,MIT OR Apache-2.0,Alex Crichton -rustc-hash,https://github.com/rust-lang-nursery/rustc-hash,Apache-2.0 OR MIT,The Rust Project Developers rustc-hash,https://github.com/rust-lang/rustc-hash,Apache-2.0 OR MIT,The Rust Project Developers rustix,https://github.com/bytecodealliance/rustix,Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT,"Dan Gohman , Jakub Konka " rustls,https://github.com/rustls/rustls,Apache-2.0 OR ISC OR MIT,The rustls Authors rustls-native-certs,https://github.com/rustls/rustls-native-certs,Apache-2.0 OR ISC OR MIT,The rustls-native-certs Authors -rustls-pemfile,https://github.com/rustls/pemfile,Apache-2.0 OR ISC OR MIT,The rustls-pemfile Authors rustls-pki-types,https://github.com/rustls/pki-types,MIT OR Apache-2.0,The rustls-pki-types Authors rustls-webpki,https://github.com/rustls/webpki,ISC,The rustls-webpki Authors -rustversion,https://github.com/dtolnay/rustversion,MIT OR Apache-2.0,David Tolnay rusty-fork,https://github.com/altsysrq/rusty-fork,MIT OR Apache-2.0,Jason Lingle ryu,https://github.com/dtolnay/ryu,Apache-2.0 OR BSL-1.0,David Tolnay schannel,https://github.com/steffengy/schannel-rs,MIT,"Steven Fackler , Steffen Butzer " @@ -197,6 +185,7 @@ security-framework-sys,https://github.com/kornelski/rust-security-framework,MIT semver,https://github.com/dtolnay/semver,MIT OR Apache-2.0,David Tolnay serde,https://github.com/serde-rs/serde,MIT OR Apache-2.0,"Erick Tryzelaar , David Tolnay " serde_bytes,https://github.com/serde-rs/bytes,MIT OR Apache-2.0,David Tolnay +serde_core,https://github.com/serde-rs/serde,MIT OR Apache-2.0,"Erick Tryzelaar , David Tolnay " serde_derive,https://github.com/serde-rs/serde,MIT OR Apache-2.0,"Erick Tryzelaar , David Tolnay " serde_json,https://github.com/serde-rs/json,MIT OR Apache-2.0,"Erick Tryzelaar , David Tolnay " serde_regex,https://github.com/tailhook/serde-regex,MIT OR Apache-2.0,paul@colomiets.name @@ -206,6 +195,7 @@ sha1,https://github.com/RustCrypto/hashes,MIT OR Apache-2.0,RustCrypto Developer sharded-slab,https://github.com/hawkw/sharded-slab,MIT,Eliza Weisman shlex,https://github.com/comex/rust-shlex,MIT OR Apache-2.0,"comex , Fenhl , Adrian Taylor , Alex Touchet , Daniel Parks , Garrett Berg " signal-hook-registry,https://github.com/vorner/signal-hook,Apache-2.0 OR MIT,"Michal 'vorner' Vaner , Masaki Hara " +simd-adler32,https://github.com/mcountryman/simd-adler32,MIT,Marvin Countryman similar,https://github.com/mitsuhiko/similar,Apache-2.0,"Armin Ronacher , Pierre-Étienne Meunier , Brandon Williams " slab,https://github.com/tokio-rs/slab,MIT,Carl Lerche smallvec,https://github.com/servo/rust-smallvec,MIT OR Apache-2.0,The Servo Project Developers @@ -233,6 +223,7 @@ tokio-stream,https://github.com/tokio-rs/tokio,MIT,Tokio Contributors tonic,https://github.com/hyperium/tonic,MIT,Lucio Franco tower,https://github.com/tower-rs/tower,MIT,Tower Maintainers +tower-http,https://github.com/tower-rs/tower-http,MIT,Tower Maintainers tower-layer,https://github.com/tower-rs/tower,MIT,Tower Maintainers tower-service,https://github.com/tower-rs/tower,MIT,Tower Maintainers tracing,https://github.com/tokio-rs/tracing,MIT,"Eliza Weisman , Tokio Contributors " @@ -251,52 +242,53 @@ untrusted,https://github.com/briansmith/untrusted,ISC,Brian Smith , Bertram Truong " ustr,https://github.com/anderslanglands/ustr,BSD-2-Clause-Patent,Anders Langlands -utf16_iter,https://github.com/hsivonen/utf16_iter,Apache-2.0 OR MIT,Henri Sivonen utf8_iter,https://github.com/hsivonen/utf8_iter,Apache-2.0 OR MIT,Henri Sivonen valuable,https://github.com/tokio-rs/valuable,MIT,The valuable Authors wait-timeout,https://github.com/alexcrichton/wait-timeout,MIT OR Apache-2.0,Alex Crichton want,https://github.com/seanmonstar/want,MIT,Sean McArthur wasi,https://github.com/bytecodealliance/wasi,Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT,The Cranelift Project Developers -wasi,https://github.com/bytecodealliance/wasi-rs,Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT,The Cranelift Project Developers -wasm-bindgen,https://github.com/rustwasm/wasm-bindgen,MIT OR Apache-2.0,The wasm-bindgen Developers -wasm-bindgen-backend,https://github.com/rustwasm/wasm-bindgen/tree/master/crates/backend,MIT OR Apache-2.0,The wasm-bindgen Developers -wasm-bindgen-futures,https://github.com/rustwasm/wasm-bindgen/tree/master/crates/futures,MIT OR Apache-2.0,The wasm-bindgen Developers -wasm-bindgen-macro,https://github.com/rustwasm/wasm-bindgen/tree/master/crates/macro,MIT OR Apache-2.0,The wasm-bindgen Developers -wasm-bindgen-macro-support,https://github.com/rustwasm/wasm-bindgen/tree/master/crates/macro-support,MIT OR Apache-2.0,The wasm-bindgen Developers -wasm-bindgen-shared,https://github.com/rustwasm/wasm-bindgen/tree/master/crates/shared,MIT OR Apache-2.0,The wasm-bindgen Developers -web-sys,https://github.com/rustwasm/wasm-bindgen/tree/master/crates/web-sys,MIT OR Apache-2.0,The wasm-bindgen Developers +wasip2,https://github.com/bytecodealliance/wasi-rs,Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT,The wasip2 Authors +wasm-bindgen,https://github.com/wasm-bindgen/wasm-bindgen,MIT OR Apache-2.0,The wasm-bindgen Developers +wasm-bindgen-backend,https://github.com/wasm-bindgen/wasm-bindgen/tree/master/crates/backend,MIT OR Apache-2.0,The wasm-bindgen Developers +wasm-bindgen-futures,https://github.com/wasm-bindgen/wasm-bindgen/tree/master/crates/futures,MIT OR Apache-2.0,The wasm-bindgen Developers +wasm-bindgen-macro,https://github.com/wasm-bindgen/wasm-bindgen/tree/master/crates/macro,MIT OR Apache-2.0,The wasm-bindgen Developers +wasm-bindgen-macro-support,https://github.com/wasm-bindgen/wasm-bindgen/tree/master/crates/macro-support,MIT OR Apache-2.0,The wasm-bindgen Developers +wasm-bindgen-shared,https://github.com/wasm-bindgen/wasm-bindgen/tree/master/crates/shared,MIT OR Apache-2.0,The wasm-bindgen Developers +web-sys,https://github.com/wasm-bindgen/wasm-bindgen/tree/master/crates/web-sys,MIT OR Apache-2.0,The wasm-bindgen Developers web-time,https://github.com/daxpedda/web-time,MIT OR Apache-2.0,The web-time Authors -webpki-roots,https://github.com/rustls/webpki-roots,MPL-2.0,The webpki-roots Authors +webpki-roots,https://github.com/rustls/webpki-roots,CDLA-Permissive-2.0,The webpki-roots Authors which,https://github.com/harryfei/which-rs,MIT,Harry Fei -winapi,https://github.com/retep998/winapi-rs,MIT OR Apache-2.0,Peter Atashian -winapi-i686-pc-windows-gnu,https://github.com/retep998/winapi-rs,MIT OR Apache-2.0,Peter Atashian -winapi-x86_64-pc-windows-gnu,https://github.com/retep998/winapi-rs,MIT OR Apache-2.0,Peter Atashian -windows-link,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft -windows-registry,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft -windows-result,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft -windows-strings,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft +windows-link,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,The windows-link Authors windows-sys,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft +windows-sys,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,The windows-sys Authors windows-targets,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft +windows-targets,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,The windows-targets Authors windows_aarch64_gnullvm,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft +windows_aarch64_gnullvm,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,The windows_aarch64_gnullvm Authors windows_aarch64_msvc,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft +windows_aarch64_msvc,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,The windows_aarch64_msvc Authors windows_i686_gnu,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft +windows_i686_gnu,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,The windows_i686_gnu Authors windows_i686_gnullvm,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft +windows_i686_gnullvm,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,The windows_i686_gnullvm Authors windows_i686_msvc,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft +windows_i686_msvc,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,The windows_i686_msvc Authors windows_x86_64_gnu,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft +windows_x86_64_gnu,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,The windows_x86_64_gnu Authors windows_x86_64_gnullvm,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft +windows_x86_64_gnullvm,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,The windows_x86_64_gnullvm Authors windows_x86_64_msvc,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft -wit-bindgen-rt,https://github.com/bytecodealliance/wit-bindgen,Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT,The wit-bindgen-rt Authors -write16,https://github.com/hsivonen/write16,Apache-2.0 OR MIT,The write16 Authors +windows_x86_64_msvc,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,The windows_x86_64_msvc Authors +wit-bindgen,https://github.com/bytecodealliance/wit-bindgen,Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT,Alex Crichton writeable,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers yoke,https://github.com/unicode-org/icu4x,Unicode-3.0,Manish Goregaokar yoke-derive,https://github.com/unicode-org/icu4x,Unicode-3.0,Manish Goregaokar -zerocopy,https://github.com/google/zerocopy,BSD-2-Clause OR Apache-2.0 OR MIT,Joshua Liebow-Feeser zerocopy,https://github.com/google/zerocopy,BSD-2-Clause OR Apache-2.0 OR MIT,"Joshua Liebow-Feeser , Jack Wrenn " -zerocopy-derive,https://github.com/google/zerocopy,BSD-2-Clause OR Apache-2.0 OR MIT,Joshua Liebow-Feeser zerocopy-derive,https://github.com/google/zerocopy,BSD-2-Clause OR Apache-2.0 OR MIT,"Joshua Liebow-Feeser , Jack Wrenn " zerofrom,https://github.com/unicode-org/icu4x,Unicode-3.0,Manish Goregaokar zerofrom-derive,https://github.com/unicode-org/icu4x,Unicode-3.0,Manish Goregaokar -zeroize,https://github.com/RustCrypto/utils/tree/master/zeroize,Apache-2.0 OR MIT,The RustCrypto Project Developers +zeroize,https://github.com/RustCrypto/utils,Apache-2.0 OR MIT,The RustCrypto Project Developers +zerotrie,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers zerovec,https://github.com/unicode-org/icu4x,Unicode-3.0,The ICU4X Project Developers zerovec-derive,https://github.com/unicode-org/icu4x,Unicode-3.0,Manish Goregaokar zstd,https://github.com/gyscos/zstd-rs,MIT,Alexandre Bury diff --git a/crates/datadog-trace-agent/src/mini_agent.rs b/crates/datadog-trace-agent/src/mini_agent.rs index 2004659..be0cfc5 100644 --- a/crates/datadog-trace-agent/src/mini_agent.rs +++ b/crates/datadog-trace-agent/src/mini_agent.rs @@ -243,7 +243,7 @@ impl MiniAgent { request: hyper_migration::HttpRequest, proxy_tx: Sender, ) -> http::Result { - debug!("Trace Agent | Proxied request for profiling"); + debug!("Trace Agent | Received profiling request"); // Extract headers and body let (parts, body) = request.into_parts(); @@ -265,6 +265,8 @@ impl MiniAgent { target_url: config.proxy_intake.url.to_string(), }; + debug!("Trace Agent | Sending profiling request to channel, target: {}", proxy_request.target_url); + // Send to channel match proxy_tx.send(proxy_request).await { Ok(_) => log_and_create_http_response( diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index 6c06036..4eae626 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -25,6 +25,7 @@ pub struct ProxyFlusher { impl ProxyFlusher { pub fn new(config: Arc) -> Self { + debug!("Proxy Flusher | Creating new proxy flusher with target URL: {}", config.proxy_intake.url); let client = build_client(config.proxy_url.as_deref(), Duration::from_secs(30)) .unwrap_or_else(|e| { error!( @@ -43,7 +44,10 @@ impl ProxyFlusher { return; }; + debug!("Proxy Flusher | Started, listening for requests"); + while let Some(proxy_payload) = rx.recv().await { + debug!("Proxy Flusher | Received request from channel, body size: {} bytes", proxy_payload.body.len()); self.send_request(proxy_payload, api_key).await; } } @@ -88,6 +92,8 @@ impl ProxyFlusher { } }; + debug!("Proxy Flusher | Sending request (attempt {}/{})", attempts, MAX_RETRIES); + let time = std::time::Instant::now(); let response = request_builder.send().await; let elapsed = time.elapsed(); @@ -108,7 +114,7 @@ impl ProxyFlusher { return; } Err(e) => { - error!("Network error (attempt {}): {:?}", attempts, e); + error!("Proxy Flusher | Network error (attempt {}): {:?}", attempts, e); if attempts >= MAX_RETRIES { error!( "Proxy Flusher | Failed to send request after {} attempts: {:?}", @@ -120,6 +126,7 @@ impl ProxyFlusher { } // Exponential backoff let backoff_ms = 100 * (2_u64.pow(attempts - 1)); + debug!("Proxy Flusher | Retrying after {}ms backoff", backoff_ms); tokio::time::sleep(Duration::from_millis(backoff_ms)).await; } } From 420e805e8bec1818f677345bcadfbdd07d6353e5 Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Fri, 17 Oct 2025 13:42:27 -0400 Subject: [PATCH 08/23] nit: formatting --- crates/datadog-trace-agent/src/mini_agent.rs | 5 ++++- .../datadog-trace-agent/src/proxy_flusher.rs | 20 +++++++++++++++---- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/crates/datadog-trace-agent/src/mini_agent.rs b/crates/datadog-trace-agent/src/mini_agent.rs index be0cfc5..3b1d349 100644 --- a/crates/datadog-trace-agent/src/mini_agent.rs +++ b/crates/datadog-trace-agent/src/mini_agent.rs @@ -265,7 +265,10 @@ impl MiniAgent { target_url: config.proxy_intake.url.to_string(), }; - debug!("Trace Agent | Sending profiling request to channel, target: {}", proxy_request.target_url); + debug!( + "Trace Agent | Sending profiling request to channel, target: {}", + proxy_request.target_url + ); // Send to channel match proxy_tx.send(proxy_request).await { diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index 4eae626..d6c9f20 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -25,7 +25,10 @@ pub struct ProxyFlusher { impl ProxyFlusher { pub fn new(config: Arc) -> Self { - debug!("Proxy Flusher | Creating new proxy flusher with target URL: {}", config.proxy_intake.url); + debug!( + "Proxy Flusher | Creating new proxy flusher with target URL: {}", + config.proxy_intake.url + ); let client = build_client(config.proxy_url.as_deref(), Duration::from_secs(30)) .unwrap_or_else(|e| { error!( @@ -47,7 +50,10 @@ impl ProxyFlusher { debug!("Proxy Flusher | Started, listening for requests"); while let Some(proxy_payload) = rx.recv().await { - debug!("Proxy Flusher | Received request from channel, body size: {} bytes", proxy_payload.body.len()); + debug!( + "Proxy Flusher | Received request from channel, body size: {} bytes", + proxy_payload.body.len() + ); self.send_request(proxy_payload, api_key).await; } } @@ -92,7 +98,10 @@ impl ProxyFlusher { } }; - debug!("Proxy Flusher | Sending request (attempt {}/{})", attempts, MAX_RETRIES); + debug!( + "Proxy Flusher | Sending request (attempt {}/{})", + attempts, MAX_RETRIES + ); let time = std::time::Instant::now(); let response = request_builder.send().await; @@ -114,7 +123,10 @@ impl ProxyFlusher { return; } Err(e) => { - error!("Proxy Flusher | Network error (attempt {}): {:?}", attempts, e); + error!( + "Proxy Flusher | Network error (attempt {}): {:?}", + attempts, e + ); if attempts >= MAX_RETRIES { error!( "Proxy Flusher | Failed to send request after {} attempts: {:?}", From 4698d0f76fcae275af3a40136b00a999f02508af Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Fri, 17 Oct 2025 14:11:03 -0400 Subject: [PATCH 09/23] Renaming for clarity --- crates/datadog-trace-agent/src/config.rs | 11 ++++++----- crates/datadog-trace-agent/src/mini_agent.rs | 2 +- crates/datadog-trace-agent/src/proxy_flusher.rs | 4 ++-- crates/datadog-trace-agent/src/trace_processor.rs | 2 +- 4 files changed, 10 insertions(+), 9 deletions(-) diff --git a/crates/datadog-trace-agent/src/config.rs b/crates/datadog-trace-agent/src/config.rs index fa51c1f..0f93490 100644 --- a/crates/datadog-trace-agent/src/config.rs +++ b/crates/datadog-trace-agent/src/config.rs @@ -86,8 +86,8 @@ pub struct Config { pub trace_flush_interval: u64, pub trace_intake: Endpoint, pub trace_stats_intake: Endpoint, - /// the endpoint to forward proxy requests to - pub proxy_intake: Endpoint, + /// the endpoint to forward profiling requests to + pub profiling_intake: Endpoint, /// timeout for environment verification, in milliseconds pub verify_env_timeout: u64, pub proxy_url: Option, @@ -113,7 +113,8 @@ impl Config { // trace stats to) let mut trace_intake_url = trace_intake_url(&dd_site); let mut trace_stats_intake_url = trace_stats_url(&dd_site); - let proxy_intake_url = format!("https://intake.profile.{}/api/v2/profile", dd_site); + + let profiling_intake_url = format!("https://intake.profile.{}/api/v2/profile", dd_site); // DD_APM_DD_URL env var will primarily be used for integration tests // overrides the entire trace/trace stats intake url prefix @@ -155,8 +156,8 @@ impl Config { api_key: Some(api_key.clone()), ..Default::default() }, - proxy_intake: Endpoint { - url: hyper::Uri::from_str(&proxy_intake_url).unwrap(), + profiling_intake: Endpoint { + url: hyper::Uri::from_str(&profiling_intake_url).unwrap(), api_key: Some(api_key), ..Default::default() }, diff --git a/crates/datadog-trace-agent/src/mini_agent.rs b/crates/datadog-trace-agent/src/mini_agent.rs index 3b1d349..05a1281 100644 --- a/crates/datadog-trace-agent/src/mini_agent.rs +++ b/crates/datadog-trace-agent/src/mini_agent.rs @@ -262,7 +262,7 @@ impl MiniAgent { let proxy_request = ProxyRequest { headers: parts.headers, body: body_bytes, - target_url: config.proxy_intake.url.to_string(), + target_url: config.profiling_intake.url.to_string(), }; debug!( diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index d6c9f20..7d2ace7 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -27,7 +27,7 @@ impl ProxyFlusher { pub fn new(config: Arc) -> Self { debug!( "Proxy Flusher | Creating new proxy flusher with target URL: {}", - config.proxy_intake.url + config.profiling_intake.url ); let client = build_client(config.proxy_url.as_deref(), Duration::from_secs(30)) .unwrap_or_else(|e| { @@ -42,7 +42,7 @@ impl ProxyFlusher { /// Starts the proxy flusher that listens for proxy payloads from the channel and forwards them to Datadog pub async fn start_proxy_flusher(&self, mut rx: Receiver) { - let Some(api_key) = self.config.proxy_intake.api_key.as_ref() else { + let Some(api_key) = self.config.profiling_intake.api_key.as_ref() else { error!("Proxy Flusher | No API key configured, cannot start"); return; }; diff --git a/crates/datadog-trace-agent/src/trace_processor.rs b/crates/datadog-trace-agent/src/trace_processor.rs index 245632c..afd95e2 100644 --- a/crates/datadog-trace-agent/src/trace_processor.rs +++ b/crates/datadog-trace-agent/src/trace_processor.rs @@ -202,7 +202,7 @@ mod tests { os: "linux".to_string(), obfuscation_config: ObfuscationConfig::new().unwrap(), proxy_url: None, - proxy_intake: Endpoint { + profiling_intake: Endpoint { url: hyper::Uri::from_static("https://proxy.agent.notdog.com/proxy"), api_key: Some("dummy_api_key".into()), ..Default::default() From 3ec3d73fc8158b13853874bc57fde7262e076ae6 Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Fri, 17 Oct 2025 14:46:13 -0400 Subject: [PATCH 10/23] Minor refactoring --- crates/datadog-trace-agent/src/config.rs | 14 ++++++- .../datadog-trace-agent/src/proxy_flusher.rs | 41 +++++++++++-------- crates/dogstatsd/src/datadog.rs | 2 +- 3 files changed, 38 insertions(+), 19 deletions(-) diff --git a/crates/datadog-trace-agent/src/config.rs b/crates/datadog-trace-agent/src/config.rs index 0f93490..f0e321f 100644 --- a/crates/datadog-trace-agent/src/config.rs +++ b/crates/datadog-trace-agent/src/config.rs @@ -86,8 +86,16 @@ pub struct Config { pub trace_flush_interval: u64, pub trace_intake: Endpoint, pub trace_stats_intake: Endpoint, - /// the endpoint to forward profiling requests to + /// Profiling intake endpoint (for proxying profiling data to Datadog) pub profiling_intake: Endpoint, + /// HTTP client timeout for proxy requests, in milliseconds + pub proxy_client_timeout: u64, + /// Individual request timeout for proxy requests, in seconds + pub proxy_request_timeout: u64, + /// Maximum number of retry attempts for failed proxy requests + pub proxy_max_retries: u32, + /// Base backoff duration for proxy retries, in milliseconds + pub proxy_retry_backoff_base_ms: u64, /// timeout for environment verification, in milliseconds pub verify_env_timeout: u64, pub proxy_url: Option, @@ -143,6 +151,10 @@ impl Config { max_request_content_length: 10 * 1024 * 1024, // 10MB in Bytes trace_flush_interval: 3, stats_flush_interval: 3, + proxy_client_timeout: 30, + proxy_request_timeout: 30, + proxy_max_retries: 3, + proxy_retry_backoff_base_ms: 100, verify_env_timeout: 100, dd_dogstatsd_port, dd_site, diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index 7d2ace7..0a15f43 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -29,14 +29,17 @@ impl ProxyFlusher { "Proxy Flusher | Creating new proxy flusher with target URL: {}", config.profiling_intake.url ); - let client = build_client(config.proxy_url.as_deref(), Duration::from_secs(30)) - .unwrap_or_else(|e| { - error!( - "Unable to parse proxy configuration: {}, no proxy will be used", - e - ); - reqwest::Client::new() - }); + let client = build_client( + config.proxy_url.as_deref(), + Duration::from_secs(config.proxy_client_timeout), + ) + .unwrap_or_else(|e| { + error!( + "Unable to parse proxy configuration: {}, no proxy will be used", + e + ); + reqwest::Client::new() + }); ProxyFlusher { config, client } } @@ -79,12 +82,14 @@ impl ProxyFlusher { .client .post(&request.target_url) .headers(headers) - .timeout(std::time::Duration::from_secs(30)) + .timeout(std::time::Duration::from_secs( + self.config.proxy_request_timeout, + )) .body(request.body.clone())) } async fn send_request(&self, request: ProxyRequest, api_key: &str) { - const MAX_RETRIES: u32 = 3; + let max_retries = self.config.proxy_max_retries; let mut attempts = 0; loop { @@ -100,7 +105,7 @@ impl ProxyFlusher { debug!( "Proxy Flusher | Sending request (attempt {}/{})", - attempts, MAX_RETRIES + attempts, max_retries ); let time = std::time::Instant::now(); @@ -123,23 +128,25 @@ impl ProxyFlusher { return; } Err(e) => { + // Only retry on network errors error!( "Proxy Flusher | Network error (attempt {}): {:?}", attempts, e ); - if attempts >= MAX_RETRIES { + if attempts >= max_retries { error!( "Proxy Flusher | Failed to send request after {} attempts: {:?}", attempts, e ); return; - }; + } + // Exponential backoff before retry + let backoff_ms = + self.config.proxy_retry_backoff_base_ms * (2_u64.pow(attempts - 1)); + debug!("Proxy Flusher | Retrying after {}ms backoff", backoff_ms); + tokio::time::sleep(Duration::from_millis(backoff_ms)).await; } } - // Exponential backoff - let backoff_ms = 100 * (2_u64.pow(attempts - 1)); - debug!("Proxy Flusher | Retrying after {}ms backoff", backoff_ms); - tokio::time::sleep(Duration::from_millis(backoff_ms)).await; } } } diff --git a/crates/dogstatsd/src/datadog.rs b/crates/dogstatsd/src/datadog.rs index f59d191..cf48224 100644 --- a/crates/dogstatsd/src/datadog.rs +++ b/crates/dogstatsd/src/datadog.rs @@ -293,7 +293,7 @@ pub enum RetryStrategy { fn build_client(https_proxy: Option, timeout: Duration) -> Result> { let mut builder = create_reqwest_client_builder()?.timeout(timeout); if let Some(proxy) = https_proxy { - builder = builder.proxy(reqwest::Proxy::https(proxy)?); + builder = builder.proxy(reqwest::Proxy::all(proxy)?); } Ok(builder.build()?) } From e63d8acc024d308d0a49dcf01f88231b9f87ec26 Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Fri, 17 Oct 2025 15:03:34 -0400 Subject: [PATCH 11/23] fix --- crates/datadog-trace-agent/src/trace_processor.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/crates/datadog-trace-agent/src/trace_processor.rs b/crates/datadog-trace-agent/src/trace_processor.rs index afd95e2..77ae411 100644 --- a/crates/datadog-trace-agent/src/trace_processor.rs +++ b/crates/datadog-trace-agent/src/trace_processor.rs @@ -185,6 +185,10 @@ mod tests { max_request_content_length: 10 * 1024 * 1024, trace_flush_interval: 3, stats_flush_interval: 3, + proxy_client_timeout: 30, + proxy_request_timeout: 30, + proxy_max_retries: 3, + proxy_retry_backoff_base_ms: 100, verify_env_timeout: 100, trace_intake: Endpoint { url: hyper::Uri::from_static("https://trace.agent.notdog.com/traces"), From 638acb3752cdba9b6f63b5fb99f9f6285bd177b3 Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Mon, 27 Oct 2025 11:29:20 -0400 Subject: [PATCH 12/23] Reduce request timeout --- crates/datadog-trace-agent/src/config.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/datadog-trace-agent/src/config.rs b/crates/datadog-trace-agent/src/config.rs index f0e321f..525f212 100644 --- a/crates/datadog-trace-agent/src/config.rs +++ b/crates/datadog-trace-agent/src/config.rs @@ -152,7 +152,7 @@ impl Config { trace_flush_interval: 3, stats_flush_interval: 3, proxy_client_timeout: 30, - proxy_request_timeout: 30, + proxy_request_timeout: 5, proxy_max_retries: 3, proxy_retry_backoff_base_ms: 100, verify_env_timeout: 100, From d529656e42671ab235e3342957a8d39e6e01c78f Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Mon, 27 Oct 2025 15:06:41 -0400 Subject: [PATCH 13/23] Add rustls-tls --- crates/datadog-trace-agent/Cargo.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/crates/datadog-trace-agent/Cargo.toml b/crates/datadog-trace-agent/Cargo.toml index 150efb3..e05355c 100644 --- a/crates/datadog-trace-agent/Cargo.toml +++ b/crates/datadog-trace-agent/Cargo.toml @@ -33,3 +33,6 @@ serial_test = "2.0.0" duplicate = "0.4.1" tempfile = "3.3.0" datadog-trace-utils = { git = "https://github.com/DataDog/libdatadog/", rev = "902b6a5296963b96c4faf355f6c53eec263b7568", features=["test-utils"] } + +[features] +default = [ "reqwest/rustls-tls" ] \ No newline at end of file From 6feb3b162c2563030c451283468d5d258b14fccd Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Wed, 5 Nov 2025 15:51:55 -0500 Subject: [PATCH 14/23] Revert to original dogstatsd build_client function --- crates/datadog-trace-agent/src/http_utils.rs | 2 +- crates/dogstatsd/src/datadog.rs | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/datadog-trace-agent/src/http_utils.rs b/crates/datadog-trace-agent/src/http_utils.rs index 9accc11..624516f 100644 --- a/crates/datadog-trace-agent/src/http_utils.rs +++ b/crates/datadog-trace-agent/src/http_utils.rs @@ -122,7 +122,7 @@ pub fn build_client( ) -> Result> { let mut builder = create_reqwest_client_builder()?.timeout(timeout); if let Some(proxy) = proxy_url { - builder = builder.proxy(reqwest::Proxy::all(proxy)?); + builder = builder.proxy(reqwest::Proxy::https(proxy)?); } Ok(builder.build()?) } diff --git a/crates/dogstatsd/src/datadog.rs b/crates/dogstatsd/src/datadog.rs index cf48224..f59d191 100644 --- a/crates/dogstatsd/src/datadog.rs +++ b/crates/dogstatsd/src/datadog.rs @@ -293,7 +293,7 @@ pub enum RetryStrategy { fn build_client(https_proxy: Option, timeout: Duration) -> Result> { let mut builder = create_reqwest_client_builder()?.timeout(timeout); if let Some(proxy) = https_proxy { - builder = builder.proxy(reqwest::Proxy::all(proxy)?); + builder = builder.proxy(reqwest::Proxy::https(proxy)?); } Ok(builder.build()?) } From a8a5a6b9a322e35630e5d1a417b4ea9e73938eeb Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Fri, 7 Nov 2025 14:46:47 -0500 Subject: [PATCH 15/23] Add additional tags --- .../datadog-trace-agent/src/proxy_flusher.rs | 19 +++++++++++++++++++ 1 file changed, 19 insertions(+) diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index 0a15f43..da0ae20 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -12,6 +12,8 @@ use crate::config::Config; use crate::http_utils::build_client; use core::time::Duration; +const DD_ADDITIONAL_TAGS_HEADER: &str = "X-Datadog-Additional-Tags"; + pub struct ProxyRequest { pub headers: HeaderMap, pub body: Bytes, @@ -78,6 +80,23 @@ impl ProxyFlusher { Err(e) => return Err(format!("Failed to parse API key: {}", e)), }; + // Add additional Azure Function-specific tags + let mut tag_parts = vec![ + format!("_dd.origin:azure_functions"), + format!("functionname:{}", self.config.app_name.as_deref().unwrap_or_default()), + ]; + + // Append aas.* tags + for (key, value) in self.config.tags.tags() { + if key.starts_with("aas.") { + tag_parts.push(format!("{}:{}", key, value)); + } + } + + let additional_tags = tag_parts.join(";"); + debug!("Proxy Flusher | Adding profiling tags: {}", additional_tags); + headers.insert(DD_ADDITIONAL_TAGS_HEADER, additional_tags.parse().expect("Failed to parse additional tags header")); + Ok(self .client .post(&request.target_url) From 5a8984a27f1a398bd73a170cb7d1bb4c466f43d0 Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Fri, 7 Nov 2025 14:49:14 -0500 Subject: [PATCH 16/23] Formatting --- crates/datadog-trace-agent/src/proxy_flusher.rs | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index da0ae20..cd637af 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -83,7 +83,10 @@ impl ProxyFlusher { // Add additional Azure Function-specific tags let mut tag_parts = vec![ format!("_dd.origin:azure_functions"), - format!("functionname:{}", self.config.app_name.as_deref().unwrap_or_default()), + format!( + "functionname:{}", + self.config.app_name.as_deref().unwrap_or_default() + ), ]; // Append aas.* tags @@ -95,7 +98,12 @@ impl ProxyFlusher { let additional_tags = tag_parts.join(";"); debug!("Proxy Flusher | Adding profiling tags: {}", additional_tags); - headers.insert(DD_ADDITIONAL_TAGS_HEADER, additional_tags.parse().expect("Failed to parse additional tags header")); + headers.insert( + DD_ADDITIONAL_TAGS_HEADER, + additional_tags + .parse() + .expect("Failed to parse additional tags header"), + ); Ok(self .client From 6a64f402b8c2b7af7784878e1c940ec9da8a560a Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Fri, 7 Nov 2025 15:50:30 -0500 Subject: [PATCH 17/23] Avoid panicking --- crates/datadog-trace-agent/src/proxy_flusher.rs | 14 ++++++++------ 1 file changed, 8 insertions(+), 6 deletions(-) diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index cd637af..4c6627c 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -98,12 +98,14 @@ impl ProxyFlusher { let additional_tags = tag_parts.join(";"); debug!("Proxy Flusher | Adding profiling tags: {}", additional_tags); - headers.insert( - DD_ADDITIONAL_TAGS_HEADER, - additional_tags - .parse() - .expect("Failed to parse additional tags header"), - ); + match additional_tags.parse() { + Ok(parsed_tags) => { + headers.insert(DD_ADDITIONAL_TAGS_HEADER, parsed_tags); + } + Err(e) => { + return Err(format!("Failed to parse additional tags header: {}", e)); + } + }; Ok(self .client From 4d2ddc2e5f81da30e588da9828f502c11eac70d7 Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Mon, 10 Nov 2025 13:15:05 -0500 Subject: [PATCH 18/23] Add aas tags --- .../datadog-trace-agent/src/proxy_flusher.rs | 54 +++++++++++++++---- 1 file changed, 45 insertions(+), 9 deletions(-) diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index 4c6627c..0b7e4ea 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -3,6 +3,7 @@ use bytes::Bytes; +use ddcommon::azure_app_services; use reqwest::header::HeaderMap; use std::sync::Arc; use tokio::sync::mpsc::Receiver; @@ -69,17 +70,13 @@ impl ProxyFlusher { api_key: &str, ) -> Result { let mut headers = request.headers.clone(); + debug!("Hello from proxy flusher create_request"); // Remove headers that are not needed for the proxy request headers.remove("host"); headers.remove("content-length"); // Add headers to the request - match api_key.parse() { - Ok(parsed_key) => headers.insert("DD-API-KEY", parsed_key), - Err(e) => return Err(format!("Failed to parse API key: {}", e)), - }; - // Add additional Azure Function-specific tags let mut tag_parts = vec![ format!("_dd.origin:azure_functions"), @@ -89,10 +86,34 @@ impl ProxyFlusher { ), ]; - // Append aas.* tags - for (key, value) in self.config.tags.tags() { - if key.starts_with("aas.") { - tag_parts.push(format!("{}:{}", key, value)); + // Add aas.* tags from Azure App Services metadata if available + if let Some(aas_metadata) = &*azure_app_services::AAS_METADATA { + let aas_tags = [ + ("aas.resource.id", aas_metadata.get_resource_id()), + ( + "aas.environment.extension_version", + aas_metadata.get_extension_version(), + ), + ( + "aas.environment.instance_id", + aas_metadata.get_instance_id(), + ), + ( + "aas.environment.instance_name", + aas_metadata.get_instance_name(), + ), + ("aas.environment.os", aas_metadata.get_operating_system()), + ("aas.resource.group", aas_metadata.get_resource_group()), + ("aas.site.name", aas_metadata.get_site_name()), + ("aas.site.kind", aas_metadata.get_site_kind()), + ("aas.site.type", aas_metadata.get_site_type()), + ("aas.subscription.id", aas_metadata.get_subscription_id()), + ]; + + for (name, value) in aas_tags { + if !value.is_empty() { + tag_parts.push(format!("{}:{}", name, value)); + } } } @@ -107,6 +128,20 @@ impl ProxyFlusher { } }; + debug!("Proxy Flusher | Final headers being sent:"); + for (name, value) in &headers { + if name.as_str().to_lowercase().contains("key") || name.as_str().to_lowercase().contains("token") || name.as_str().to_lowercase().contains("secret") { + continue; + } else { + debug!(" {}: {:?}", name, value); + } + } + + match api_key.parse() { + Ok(parsed_key) => headers.insert("DD-API-KEY", parsed_key), + Err(e) => return Err(format!("Failed to parse API key: {}", e)), + }; + Ok(self .client .post(&request.target_url) @@ -123,6 +158,7 @@ impl ProxyFlusher { loop { attempts += 1; + debug!("Proxy Flusher | Creating request. Gonna add tags"); let request_builder = match self.create_request(&request, api_key).await { Ok(builder) => builder, From 6c8a8db673ddf130848154e7fd999441d9e9e59e Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Mon, 10 Nov 2025 14:27:15 -0500 Subject: [PATCH 19/23] Fix aas tags --- .../datadog-trace-agent/src/proxy_flusher.rs | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index 0b7e4ea..143c819 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -87,13 +87,9 @@ impl ProxyFlusher { ]; // Add aas.* tags from Azure App Services metadata if available - if let Some(aas_metadata) = &*azure_app_services::AAS_METADATA { + if let Some(aas_metadata) = &*azure_app_services::AAS_METADATA_FUNCTION { let aas_tags = [ ("aas.resource.id", aas_metadata.get_resource_id()), - ( - "aas.environment.extension_version", - aas_metadata.get_extension_version(), - ), ( "aas.environment.instance_id", aas_metadata.get_instance_id(), @@ -102,12 +98,21 @@ impl ProxyFlusher { "aas.environment.instance_name", aas_metadata.get_instance_name(), ), + ("aas.subscription.id", aas_metadata.get_subscription_id()), ("aas.environment.os", aas_metadata.get_operating_system()), + ("aas.environment.runtime", aas_metadata.get_runtime()), + ( + "aas.environment.runtime_version", + aas_metadata.get_runtime_version(), + ), + ( + "aas.environment.function_runtime", + aas_metadata.get_function_runtime_version(), + ), ("aas.resource.group", aas_metadata.get_resource_group()), ("aas.site.name", aas_metadata.get_site_name()), ("aas.site.kind", aas_metadata.get_site_kind()), ("aas.site.type", aas_metadata.get_site_type()), - ("aas.subscription.id", aas_metadata.get_subscription_id()), ]; for (name, value) in aas_tags { @@ -115,6 +120,8 @@ impl ProxyFlusher { tag_parts.push(format!("{}:{}", name, value)); } } + } else { + debug!("Proxy Flusher | No Azure App Services metadata found"); } let additional_tags = tag_parts.join(";"); From 004562721cd29d5ecec81e278f62cac336b20fa2 Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Mon, 10 Nov 2025 15:10:18 -0500 Subject: [PATCH 20/23] Fix additional tags in profiles --- crates/datadog-trace-agent/src/proxy_flusher.rs | 15 ++------------- 1 file changed, 2 insertions(+), 13 deletions(-) diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index 143c819..9ce81d9 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -76,8 +76,7 @@ impl ProxyFlusher { headers.remove("host"); headers.remove("content-length"); - // Add headers to the request - // Add additional Azure Function-specific tags + // Add headers to the request, including Azure Function-specific tags let mut tag_parts = vec![ format!("_dd.origin:azure_functions"), format!( @@ -124,8 +123,7 @@ impl ProxyFlusher { debug!("Proxy Flusher | No Azure App Services metadata found"); } - let additional_tags = tag_parts.join(";"); - debug!("Proxy Flusher | Adding profiling tags: {}", additional_tags); + let additional_tags = tag_parts.join(","); match additional_tags.parse() { Ok(parsed_tags) => { headers.insert(DD_ADDITIONAL_TAGS_HEADER, parsed_tags); @@ -135,15 +133,6 @@ impl ProxyFlusher { } }; - debug!("Proxy Flusher | Final headers being sent:"); - for (name, value) in &headers { - if name.as_str().to_lowercase().contains("key") || name.as_str().to_lowercase().contains("token") || name.as_str().to_lowercase().contains("secret") { - continue; - } else { - debug!(" {}: {:?}", name, value); - } - } - match api_key.parse() { Ok(parsed_key) => headers.insert("DD-API-KEY", parsed_key), Err(e) => return Err(format!("Failed to parse API key: {}", e)), From 8522ac07764c5e45f87179d07099356f14ddc7f5 Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Mon, 10 Nov 2025 15:30:41 -0500 Subject: [PATCH 21/23] Remove unnecessary debug logs --- crates/datadog-trace-agent/src/proxy_flusher.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index 9ce81d9..3ba4558 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -70,7 +70,6 @@ impl ProxyFlusher { api_key: &str, ) -> Result { let mut headers = request.headers.clone(); - debug!("Hello from proxy flusher create_request"); // Remove headers that are not needed for the proxy request headers.remove("host"); @@ -154,7 +153,6 @@ impl ProxyFlusher { loop { attempts += 1; - debug!("Proxy Flusher | Creating request. Gonna add tags"); let request_builder = match self.create_request(&request, api_key).await { Ok(builder) => builder, From c774b3bfc03732e12098ab6064082d53a9ce064e Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Wed, 12 Nov 2025 11:33:39 -0500 Subject: [PATCH 22/23] Refactor --- .../datadog-trace-agent/src/proxy_flusher.rs | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index 3ba4558..56e8af4 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -75,16 +75,9 @@ impl ProxyFlusher { headers.remove("host"); headers.remove("content-length"); - // Add headers to the request, including Azure Function-specific tags - let mut tag_parts = vec![ - format!("_dd.origin:azure_functions"), - format!( - "functionname:{}", - self.config.app_name.as_deref().unwrap_or_default() - ), - ]; - + // Add headers to the request // Add aas.* tags from Azure App Services metadata if available + let mut tag_parts = vec![]; if let Some(aas_metadata) = &*azure_app_services::AAS_METADATA_FUNCTION { let aas_tags = [ ("aas.resource.id", aas_metadata.get_resource_id()), @@ -122,6 +115,13 @@ impl ProxyFlusher { debug!("Proxy Flusher | No Azure App Services metadata found"); } + // Add serverless-specific tags for profiling + tag_parts.push(format!( + "functionname:{}", + self.config.app_name.as_deref().unwrap_or_default() + )); + tag_parts.push("_dd.origin:azure_functions".to_string()); + let additional_tags = tag_parts.join(","); match additional_tags.parse() { Ok(parsed_tags) => { From cca8317244129aaa228730f3ad397e1a6a9b5954 Mon Sep 17 00:00:00 2001 From: Kathie Huang Date: Mon, 24 Nov 2025 18:44:19 -0500 Subject: [PATCH 23/23] Comment out aas.* tags for testing --- .../datadog-trace-agent/src/proxy_flusher.rs | 72 +++++++++---------- 1 file changed, 36 insertions(+), 36 deletions(-) diff --git a/crates/datadog-trace-agent/src/proxy_flusher.rs b/crates/datadog-trace-agent/src/proxy_flusher.rs index 56e8af4..049c700 100644 --- a/crates/datadog-trace-agent/src/proxy_flusher.rs +++ b/crates/datadog-trace-agent/src/proxy_flusher.rs @@ -78,42 +78,42 @@ impl ProxyFlusher { // Add headers to the request // Add aas.* tags from Azure App Services metadata if available let mut tag_parts = vec![]; - if let Some(aas_metadata) = &*azure_app_services::AAS_METADATA_FUNCTION { - let aas_tags = [ - ("aas.resource.id", aas_metadata.get_resource_id()), - ( - "aas.environment.instance_id", - aas_metadata.get_instance_id(), - ), - ( - "aas.environment.instance_name", - aas_metadata.get_instance_name(), - ), - ("aas.subscription.id", aas_metadata.get_subscription_id()), - ("aas.environment.os", aas_metadata.get_operating_system()), - ("aas.environment.runtime", aas_metadata.get_runtime()), - ( - "aas.environment.runtime_version", - aas_metadata.get_runtime_version(), - ), - ( - "aas.environment.function_runtime", - aas_metadata.get_function_runtime_version(), - ), - ("aas.resource.group", aas_metadata.get_resource_group()), - ("aas.site.name", aas_metadata.get_site_name()), - ("aas.site.kind", aas_metadata.get_site_kind()), - ("aas.site.type", aas_metadata.get_site_type()), - ]; - - for (name, value) in aas_tags { - if !value.is_empty() { - tag_parts.push(format!("{}:{}", name, value)); - } - } - } else { - debug!("Proxy Flusher | No Azure App Services metadata found"); - } + // if let Some(aas_metadata) = &*azure_app_services::AAS_METADATA_FUNCTION { + // let aas_tags = [ + // ("aas.resource.id", aas_metadata.get_resource_id()), + // ( + // "aas.environment.instance_id", + // aas_metadata.get_instance_id(), + // ), + // ( + // "aas.environment.instance_name", + // aas_metadata.get_instance_name(), + // ), + // ("aas.subscription.id", aas_metadata.get_subscription_id()), + // ("aas.environment.os", aas_metadata.get_operating_system()), + // ("aas.environment.runtime", aas_metadata.get_runtime()), + // ( + // "aas.environment.runtime_version", + // aas_metadata.get_runtime_version(), + // ), + // ( + // "aas.environment.function_runtime", + // aas_metadata.get_function_runtime_version(), + // ), + // ("aas.resource.group", aas_metadata.get_resource_group()), + // ("aas.site.name", aas_metadata.get_site_name()), + // ("aas.site.kind", aas_metadata.get_site_kind()), + // ("aas.site.type", aas_metadata.get_site_type()), + // ]; + + // for (name, value) in aas_tags { + // if !value.is_empty() { + // tag_parts.push(format!("{}:{}", name, value)); + // } + // } + // } else { + // debug!("Proxy Flusher | No Azure App Services metadata found"); + // } // Add serverless-specific tags for profiling tag_parts.push(format!(