From 7c0072689fba435640e26e63d46343064c477b0f Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Mon, 12 Feb 2024 09:04:56 -0800 Subject: [PATCH 0001/1491] chore(dev): Add a note that GH usernames shouldn't start with @ (#19859) People seem to commonly include this. Signed-off-by: Jesse Szwedko --- changelog.d/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/changelog.d/README.md b/changelog.d/README.md index fd48a3439142f..d98be4201011a 100644 --- a/changelog.d/README.md +++ b/changelog.d/README.md @@ -76,6 +76,8 @@ The process for adding this is simply to have the last line of the file be in th authors: , , <...> +Do not include a leading `@` when specifying your username. + ## Example Here is an example of a changelog fragment that adds a breaking change explanation. From 8d897af2f621a0402678141a3a94e1196ea56037 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Mon, 12 Feb 2024 09:17:57 -0800 Subject: [PATCH 0002/1491] chore(api): Fix API address example (#19858) Only IP addresses can be used, not `localhost`. Signed-off-by: Jesse Szwedko --- website/cue/reference/api.cue | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/cue/reference/api.cue b/website/cue/reference/api.cue index 47fc8a29109c7..bd7ac51d92ea8 100644 --- a/website/cue/reference/api.cue +++ b/website/cue/reference/api.cue @@ -29,7 +29,7 @@ api: { required: false type: string: { default: "127.0.0.1:8686" - examples: ["0.0.0.0:8686", "localhost:1234"] + examples: ["0.0.0.0:8686", "127.0.0.1:1234"] } description: """ The network address to which the API should bind. If you're running From e0d5f1e4dbd433165c525e941c95dd8eea2ebee6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 12 Feb 2024 18:09:28 +0000 Subject: [PATCH 0003/1491] chore(deps): Bump the aws group with 2 updates (#19848) Bumps the aws group with 2 updates: [aws-types](https://github.com/smithy-lang/smithy-rs) and [aws-sigv4](https://github.com/smithy-lang/smithy-rs). Updates `aws-types` from 1.1.4 to 1.1.5 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-sigv4` from 1.1.4 to 1.1.5 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) --- updated-dependencies: - dependency-name: aws-types dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-sigv4 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- Cargo.toml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 41193aa9a0145..4fbe22b96c49a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1022,9 +1022,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.1.4" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c371c6b0ac54d4605eb6f016624fb5c7c2925d315fdf600ac1bf21b19d5f1742" +checksum = "54b1cbe0eee57a213039088dbdeca7be9352f24e0d72332d961e8a1cb388f82d" dependencies = [ "aws-credential-types", "aws-smithy-eventstream", @@ -1199,9 +1199,9 @@ dependencies = [ [[package]] name = "aws-types" -version = "1.1.4" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "789bbe008e65636fe1b6dbbb374c40c8960d1232b96af5ff4aec349f9c4accf4" +checksum = "0ff7e122ee50ca962e9de91f5850cc37e2184b1219611eef6d44aa85929b54f6" dependencies = [ "aws-credential-types", "aws-smithy-async", diff --git a/Cargo.toml b/Cargo.toml index 5968f802b8134..c3be39242d8de 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -181,8 +181,8 @@ aws-sdk-cloudwatchlogs = { version = "1.3.0", default-features = false, features aws-sdk-elasticsearch = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } aws-sdk-firehose = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } aws-sdk-kinesis = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } -aws-types = { version = "1.1.4", default-features = false, optional = true } -aws-sigv4 = { version = "1.1.4", default-features = false, features = ["sign-http"], optional = true } +aws-types = { version = "1.1.5", default-features = false, optional = true } +aws-sigv4 = { version = "1.1.5", default-features = false, features = ["sign-http"], optional = true } aws-config = { version = "1.0.1", default-features = false, features = ["behavior-version-latest"], optional = true } aws-credential-types = { version = "1.1.5", default-features = false, features = ["hardcoded-credentials"], optional = true } aws-smithy-http = { version = "0.60", default-features = false, features = ["event-stream"], optional = true } From 1637e566c08f5dc2b09e5c85ad49a93762647c06 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Mon, 12 Feb 2024 10:25:25 -0800 Subject: [PATCH 0004/1491] chore(kubernetes): Bump manifists to chart v0.30.2 (#19860) Signed-off-by: Jesse Szwedko --- distribution/kubernetes/vector-agent/README.md | 2 +- distribution/kubernetes/vector-agent/configmap.yaml | 2 +- distribution/kubernetes/vector-agent/daemonset.yaml | 8 ++++---- distribution/kubernetes/vector-agent/rbac.yaml | 4 ++-- .../kubernetes/vector-agent/service-headless.yaml | 2 +- distribution/kubernetes/vector-agent/serviceaccount.yaml | 2 +- distribution/kubernetes/vector-aggregator/README.md | 2 +- distribution/kubernetes/vector-aggregator/configmap.yaml | 2 +- .../kubernetes/vector-aggregator/service-headless.yaml | 2 +- distribution/kubernetes/vector-aggregator/service.yaml | 2 +- .../kubernetes/vector-aggregator/serviceaccount.yaml | 2 +- .../kubernetes/vector-aggregator/statefulset.yaml | 6 ++++-- .../kubernetes/vector-stateless-aggregator/README.md | 2 +- .../kubernetes/vector-stateless-aggregator/configmap.yaml | 2 +- .../vector-stateless-aggregator/deployment.yaml | 6 ++++-- .../vector-stateless-aggregator/service-headless.yaml | 2 +- .../kubernetes/vector-stateless-aggregator/service.yaml | 2 +- .../vector-stateless-aggregator/serviceaccount.yaml | 2 +- 18 files changed, 28 insertions(+), 24 deletions(-) diff --git a/distribution/kubernetes/vector-agent/README.md b/distribution/kubernetes/vector-agent/README.md index 1d0d44aa044b0..6176f85ac41e1 100644 --- a/distribution/kubernetes/vector-agent/README.md +++ b/distribution/kubernetes/vector-agent/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.30.0 with the following `values.yaml`: +version 0.30.2 with the following `values.yaml`: ```yaml role: Agent diff --git a/distribution/kubernetes/vector-agent/configmap.yaml b/distribution/kubernetes/vector-agent/configmap.yaml index 6150a7c47969a..7a5abd7cdc10b 100644 --- a/distribution/kubernetes/vector-agent/configmap.yaml +++ b/distribution/kubernetes/vector-agent/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" data: agent.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-agent/daemonset.yaml b/distribution/kubernetes/vector-agent/daemonset.yaml index 8c4a3aa71728a..1ed9b9c70cf68 100644 --- a/distribution/kubernetes/vector-agent/daemonset.yaml +++ b/distribution/kubernetes/vector-agent/daemonset.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" annotations: {} spec: selector: @@ -30,12 +30,14 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.35.0-distroless-libc" + image: "timberio/vector:0.35.1-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir - /etc/vector/ env: + - name: VECTOR_LOG + value: "info" - name: VECTOR_SELF_NODE_NAME valueFrom: fieldRef: @@ -52,8 +54,6 @@ spec: value: "/host/proc" - name: SYSFS_ROOT value: "/host/sys" - - name: VECTOR_LOG - value: "info" ports: - name: prom-exporter containerPort: 9090 diff --git a/distribution/kubernetes/vector-agent/rbac.yaml b/distribution/kubernetes/vector-agent/rbac.yaml index 21eaabb6ce505..773db8846b5dd 100644 --- a/distribution/kubernetes/vector-agent/rbac.yaml +++ b/distribution/kubernetes/vector-agent/rbac.yaml @@ -10,7 +10,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" rules: - apiGroups: - "" @@ -31,7 +31,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole diff --git a/distribution/kubernetes/vector-agent/service-headless.yaml b/distribution/kubernetes/vector-agent/service-headless.yaml index e716b46b5fbf8..72e14567d171b 100644 --- a/distribution/kubernetes/vector-agent/service-headless.yaml +++ b/distribution/kubernetes/vector-agent/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-agent/serviceaccount.yaml b/distribution/kubernetes/vector-agent/serviceaccount.yaml index 01b0cd040a753..1472923230116 100644 --- a/distribution/kubernetes/vector-agent/serviceaccount.yaml +++ b/distribution/kubernetes/vector-agent/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" automountServiceAccountToken: true diff --git a/distribution/kubernetes/vector-aggregator/README.md b/distribution/kubernetes/vector-aggregator/README.md index c770d0c0c52d5..7b9fa8055150a 100644 --- a/distribution/kubernetes/vector-aggregator/README.md +++ b/distribution/kubernetes/vector-aggregator/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.30.0 with the following `values.yaml`: +version 0.30.2 with the following `values.yaml`: ```yaml diff --git a/distribution/kubernetes/vector-aggregator/configmap.yaml b/distribution/kubernetes/vector-aggregator/configmap.yaml index 08d8098f39f59..04a8025d10fec 100644 --- a/distribution/kubernetes/vector-aggregator/configmap.yaml +++ b/distribution/kubernetes/vector-aggregator/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" data: aggregator.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-aggregator/service-headless.yaml b/distribution/kubernetes/vector-aggregator/service-headless.yaml index a0bf61c348020..232c59389131f 100644 --- a/distribution/kubernetes/vector-aggregator/service-headless.yaml +++ b/distribution/kubernetes/vector-aggregator/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-aggregator/service.yaml b/distribution/kubernetes/vector-aggregator/service.yaml index c3cd56a8cd882..6d1243421541b 100644 --- a/distribution/kubernetes/vector-aggregator/service.yaml +++ b/distribution/kubernetes/vector-aggregator/service.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" annotations: spec: ports: diff --git a/distribution/kubernetes/vector-aggregator/serviceaccount.yaml b/distribution/kubernetes/vector-aggregator/serviceaccount.yaml index 0c52d00aadc09..6e1aad1fea6bb 100644 --- a/distribution/kubernetes/vector-aggregator/serviceaccount.yaml +++ b/distribution/kubernetes/vector-aggregator/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" automountServiceAccountToken: true diff --git a/distribution/kubernetes/vector-aggregator/statefulset.yaml b/distribution/kubernetes/vector-aggregator/statefulset.yaml index 0620b2c8150ff..00cbf034de9db 100644 --- a/distribution/kubernetes/vector-aggregator/statefulset.yaml +++ b/distribution/kubernetes/vector-aggregator/statefulset.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" annotations: {} spec: replicas: 1 @@ -32,12 +32,14 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.35.0-distroless-libc" + image: "timberio/vector:0.35.1-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir - /etc/vector/ env: + - name: VECTOR_LOG + value: "info" ports: - name: datadog-agent containerPort: 8282 diff --git a/distribution/kubernetes/vector-stateless-aggregator/README.md b/distribution/kubernetes/vector-stateless-aggregator/README.md index 63c9fcb6eebd3..5bb86c423daac 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/README.md +++ b/distribution/kubernetes/vector-stateless-aggregator/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.30.0 with the following `values.yaml`: +version 0.30.2 with the following `values.yaml`: ```yaml role: Stateless-Aggregator diff --git a/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml b/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml index 5fd55085636cd..09c0aa160bf25 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" data: aggregator.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml b/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml index 587e5771c7c58..e156ce9f59614 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" annotations: {} spec: replicas: 1 @@ -30,12 +30,14 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.35.0-distroless-libc" + image: "timberio/vector:0.35.1-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir - /etc/vector/ env: + - name: VECTOR_LOG + value: "info" ports: - name: datadog-agent containerPort: 8282 diff --git a/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml b/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml index 4c6cd4476db5a..b9d2a1e783a3f 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-stateless-aggregator/service.yaml b/distribution/kubernetes/vector-stateless-aggregator/service.yaml index 39b7bb4aad0b0..f6d52b1e7ea23 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/service.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/service.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" annotations: spec: ports: diff --git a/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml b/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml index 118ff15855b4f..c9c4d424df84b 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.35.0-distroless-libc" + app.kubernetes.io/version: "0.35.1-distroless-libc" automountServiceAccountToken: true From 79ab38947f5869afe154f83cf15868c01b43ac4b Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Mon, 12 Feb 2024 16:20:01 -0500 Subject: [PATCH 0005/1491] fix(codecs): expose VRL deserializer options (#19862) --- lib/codecs/src/decoding/format/vrl.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/codecs/src/decoding/format/vrl.rs b/lib/codecs/src/decoding/format/vrl.rs index 72e5649758227..63a127955ee87 100644 --- a/lib/codecs/src/decoding/format/vrl.rs +++ b/lib/codecs/src/decoding/format/vrl.rs @@ -17,7 +17,7 @@ use vrl::value::Kind; #[derive(Debug, Clone, Default)] pub struct VrlDeserializerConfig { /// VRL-specific decoding options. - vrl: VrlDeserializerOptions, + pub vrl: VrlDeserializerOptions, } /// VRL-specific decoding options. From e8401c473fb0334c36ac91a411392f1ac7ae9ce5 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Mon, 12 Feb 2024 17:06:15 -0500 Subject: [PATCH 0006/1491] chore(tests): expose test utils (feature flag) (#19863) * chore: expose source sender test utils (feature flag) * test_utils feature --- Cargo.toml | 2 +- src/source_sender/mod.rs | 14 +++++++------- src/test_util/mod.rs | 2 +- 3 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index c3be39242d8de..1f79e9945a889 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -856,7 +856,7 @@ webhdfs-integration-tests = ["sinks-webhdfs"] disable-resolv-conf = [] shutdown-tests = ["api", "sinks-blackhole", "sinks-console", "sinks-prometheus", "sources", "transforms-lua", "transforms-remap", "unix"] cli-tests = ["sinks-blackhole", "sinks-socket", "sources-demo_logs", "sources-file"] -component-test-utils = [] +test-utils = [] # End-to-End testing-related features all-e2e-tests = [ diff --git a/src/source_sender/mod.rs b/src/source_sender/mod.rs index c0c8322df5b52..695d4015c0402 100644 --- a/src/source_sender/mod.rs +++ b/src/source_sender/mod.rs @@ -8,7 +8,7 @@ use tracing::Span; use vector_lib::buffers::topology::channel::{self, LimitedReceiver, LimitedSender}; use vector_lib::buffers::EventCount; use vector_lib::event::array::EventArrayIntoIter; -#[cfg(test)] +#[cfg(any(test, feature = "test-utils"))] use vector_lib::event::{into_event_stream, EventStatus}; use vector_lib::finalization::{AddBatchNotifier, BatchNotifier}; use vector_lib::internal_event::{ComponentEventsDropped, UNINTENTIONAL}; @@ -31,7 +31,7 @@ pub use errors::{ClosedError, StreamSendError}; pub(crate) const CHUNK_SIZE: usize = 1000; -#[cfg(test)] +#[cfg(any(test, feature = "test-utils"))] const TEST_BUFFER_SIZE: usize = 100; const LAG_TIME_NAME: &str = "source_lag_time_seconds"; @@ -174,7 +174,7 @@ impl SourceSender { } } - #[cfg(test)] + #[cfg(any(test, feature = "test-utils"))] pub fn new_test_sender_with_buffer(n: usize) -> (Self, LimitedReceiver) { let lag_time = Some(register_histogram!(LAG_TIME_NAME)); let output_id = OutputId { @@ -192,14 +192,14 @@ impl SourceSender { ) } - #[cfg(test)] + #[cfg(any(test, feature = "test-utils"))] pub fn new_test() -> (Self, impl Stream + Unpin) { let (pipe, recv) = Self::new_test_sender_with_buffer(TEST_BUFFER_SIZE); let recv = recv.into_stream().flat_map(into_event_stream); (pipe, recv) } - #[cfg(test)] + #[cfg(any(test, feature = "test-utils"))] pub fn new_test_finalize(status: EventStatus) -> (Self, impl Stream + Unpin) { let (pipe, recv) = Self::new_test_sender_with_buffer(TEST_BUFFER_SIZE); // In a source test pipeline, there is no sink to acknowledge @@ -216,7 +216,7 @@ impl SourceSender { (pipe, recv) } - #[cfg(test)] + #[cfg(any(test, feature = "test-utils"))] pub fn new_test_errors( error_at: impl Fn(usize) -> bool, ) -> (Self, impl Stream + Unpin) { @@ -242,7 +242,7 @@ impl SourceSender { (pipe, recv) } - #[cfg(test)] + #[cfg(any(test, feature = "test-utils"))] pub fn add_outputs( &mut self, status: EventStatus, diff --git a/src/test_util/mod.rs b/src/test_util/mod.rs index 769715e237163..7197ddfbd3692 100644 --- a/src/test_util/mod.rs +++ b/src/test_util/mod.rs @@ -53,7 +53,7 @@ const WAIT_FOR_SECS: u64 = 5; // The default time to wait in `wait_for` const WAIT_FOR_MIN_MILLIS: u64 = 5; // The minimum time to pause before retrying const WAIT_FOR_MAX_MILLIS: u64 = 500; // The maximum time to pause before retrying -#[cfg(any(test, feature = "component-test-utils"))] +#[cfg(any(test, feature = "test-utils"))] pub mod components; #[cfg(test)] From 99c2207932894d362975fa81000b4819d5e7bb52 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Feb 2024 06:43:08 -0800 Subject: [PATCH 0007/1491] chore(deps): Bump chrono-tz from 0.8.5 to 0.8.6 (#19866) Bumps [chrono-tz](https://github.com/chronotope/chrono-tz) from 0.8.5 to 0.8.6. - [Release notes](https://github.com/chronotope/chrono-tz/releases) - [Changelog](https://github.com/chronotope/chrono-tz/blob/main/CHANGELOG.md) - [Commits](https://github.com/chronotope/chrono-tz/commits) --- updated-dependencies: - dependency-name: chrono-tz dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- lib/vector-common/Cargo.toml | 2 +- lib/vector-config/Cargo.toml | 2 +- lib/vector-core/Cargo.toml | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4fbe22b96c49a..cfe6e001913a8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1876,9 +1876,9 @@ dependencies = [ [[package]] name = "chrono-tz" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91d7b79e99bfaa0d47da0687c43aa3b7381938a62ad3a6498599039321f660b7" +checksum = "d59ae0466b83e838b81a54256c39d5d7c20b9d7daa10510a242d9b75abd5936e" dependencies = [ "chrono", "chrono-tz-build", diff --git a/Cargo.toml b/Cargo.toml index 1f79e9945a889..0b1aeae9bb4a5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -256,7 +256,7 @@ bollard = { version = "0.15.0", default-features = false, features = ["ssl", "ch bytes = { version = "1.5.0", default-features = false, features = ["serde"] } bytesize = { version = "1.3.0", default-features = false } chrono.workspace = true -chrono-tz = { version = "0.8.5", default-features = false } +chrono-tz = { version = "0.8.6", default-features = false } cidr-utils = { version = "0.6.1", default-features = false } colored = { version = "2.1.0", default-features = false } csv = { version = "1.3", default-features = false } diff --git a/lib/vector-common/Cargo.toml b/lib/vector-common/Cargo.toml index 012f258172b62..318e3b9d427df 100644 --- a/lib/vector-common/Cargo.toml +++ b/lib/vector-common/Cargo.toml @@ -41,7 +41,7 @@ tokenize = [ [dependencies] async-stream = "0.3.5" bytes = { version = "1.5.0", default-features = false, optional = true } -chrono-tz = { version = "0.8.5", default-features = false, features = ["serde"] } +chrono-tz = { version = "0.8.6", default-features = false, features = ["serde"] } chrono.workspace = true crossbeam-utils = { version = "0.8.19", default-features = false } derivative = { version = "2.2.0", default-features = false } diff --git a/lib/vector-config/Cargo.toml b/lib/vector-config/Cargo.toml index 3bf49624200e2..a1a84348d4487 100644 --- a/lib/vector-config/Cargo.toml +++ b/lib/vector-config/Cargo.toml @@ -12,7 +12,7 @@ path = "tests/integration/lib.rs" [dependencies] chrono.workspace = true -chrono-tz = { version = "0.8.5", default-features = false } +chrono-tz = { version = "0.8.6", default-features = false } encoding_rs = { version = "0.8", default-features = false, features = ["alloc", "serde"] } indexmap.workspace = true inventory = { version = "0.3" } diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index f3746c502b2c4..c555a8f610e9a 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -75,7 +75,7 @@ prost-build = "0.12" [dev-dependencies] base64 = "0.21.7" -chrono-tz = { version = "0.8.5", default-features = false } +chrono-tz = { version = "0.8.6", default-features = false } criterion = { version = "0.5.1", features = ["html_reports"] } env-test-util = "1.0.1" quickcheck = "1" From c654207d5a41c8ec9fff4ac497ac3cec7a40c55c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Feb 2024 06:43:19 -0800 Subject: [PATCH 0008/1491] chore(deps): Bump crc32fast from 1.3.2 to 1.4.0 (#19867) Bumps [crc32fast](https://github.com/srijs/rust-crc32fast) from 1.3.2 to 1.4.0. - [Commits](https://github.com/srijs/rust-crc32fast/compare/v1.3.2...v1.4.0) --- updated-dependencies: - dependency-name: crc32fast dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- lib/vector-buffers/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cfe6e001913a8..2f7a5d72e3fa6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2340,9 +2340,9 @@ dependencies = [ [[package]] name = "crc32fast" -version = "1.3.2" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b540bd8bc810d3885c6ea91e2018302f68baba2129ab3e88f32389ee9370880d" +checksum = "b3855a8a784b474f333699ef2bbca9db2c4a1f6d9088a90a2d25b1eb53111eaa" dependencies = [ "cfg-if", ] diff --git a/lib/vector-buffers/Cargo.toml b/lib/vector-buffers/Cargo.toml index 2b617da56325e..5b6a70bef6a8e 100644 --- a/lib/vector-buffers/Cargo.toml +++ b/lib/vector-buffers/Cargo.toml @@ -11,7 +11,7 @@ async-stream = "0.3.5" async-trait = { version = "0.1", default-features = false } bytecheck = { version = "0.6.9", default-features = false, features = ["std"] } bytes = { version = "1.5.0", default-features = false } -crc32fast = { version = "1.3.2", default-features = false } +crc32fast = { version = "1.4.0", default-features = false } crossbeam-queue = { version = "0.3.11", default-features = false, features = ["std"] } crossbeam-utils = { version = "0.8.19", default-features = false } derivative = { version = "2.2.0", default-features = false } From 0922c3f67f57e2d8c29029a91e1f60ab4d699f50 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 13 Feb 2024 14:43:34 +0000 Subject: [PATCH 0009/1491] chore(deps): Bump ratatui from 0.26.0 to 0.26.1 (#19868) Bumps [ratatui](https://github.com/ratatui-org/ratatui) from 0.26.0 to 0.26.1. - [Release notes](https://github.com/ratatui-org/ratatui/releases) - [Changelog](https://github.com/ratatui-org/ratatui/blob/main/CHANGELOG.md) - [Commits](https://github.com/ratatui-org/ratatui/compare/v0.26.0...v0.26.1) --- updated-dependencies: - dependency-name: ratatui dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2f7a5d72e3fa6..5801565f67b8f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7139,9 +7139,9 @@ dependencies = [ [[package]] name = "ratatui" -version = "0.26.0" +version = "0.26.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "154b85ef15a5d1719bcaa193c3c81fe645cd120c156874cd660fe49fd21d1373" +checksum = "bcb12f8fbf6c62614b0d56eb352af54f6a22410c3b079eb53ee93c7b97dd31d8" dependencies = [ "bitflags 2.4.1", "cassowary", diff --git a/Cargo.toml b/Cargo.toml index 0b1aeae9bb4a5..e7c51ecce33b0 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -235,7 +235,7 @@ async-graphql-warp = { version = "7.0.1", default-features = false, optional = t crossterm = { version = "0.27.0", default-features = false, features = ["event-stream", "windows"], optional = true } num-format = { version = "0.4.4", default-features = false, features = ["with-num-bigint"], optional = true } number_prefix = { version = "0.4.0", default-features = false, features = ["std"], optional = true } -ratatui = { version = "0.26.0", optional = true, default-features = false, features = ["crossterm"] } +ratatui = { version = "0.26.1", optional = true, default-features = false, features = ["crossterm"] } # Datadog Pipelines From 753466fb4bf663796854156c0ddaedaf0cc9bc9c Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Mon, 5 Feb 2024 14:08:03 -0800 Subject: [PATCH 0010/1491] chore(releasing): Prepare v0.36.0 release Signed-off-by: Jesse Szwedko --- ...d_prometheus_pushgateway_source.feature.md | 5 - .../15813_sink_clickhouse_format.feature.md | 3 - ...643_only_set_connection_close_http1.fix.md | 3 - ..._s3_source_delete_configuration.feature.md | 3 - ...s_svcb_record_types_support.enhancement.md | 2 - .../19812_journald_metadata_missing.fix.md | 4 - changelog.d/19825.feature.md | 2 - changelog.d/bump_alpine_3_19.enhancement.md | 1 - .../validate_skip_healthchecks.enhancement.md | 5 - distribution/install.sh | 2 +- .../2024-02-13-0-36-0-upgrade-guide.md | 12 + website/content/en/releases/0.36.0.md | 4 + .../administration/interfaces/kubectl.cue | 2 +- website/cue/reference/releases/0.36.0.cue | 369 ++++++++++++++++++ website/cue/reference/versions.cue | 1 + 15 files changed, 388 insertions(+), 30 deletions(-) delete mode 100644 changelog.d/10304_add_prometheus_pushgateway_source.feature.md delete mode 100644 changelog.d/15813_sink_clickhouse_format.feature.md delete mode 100644 changelog.d/19643_only_set_connection_close_http1.fix.md delete mode 100644 changelog.d/19711_add_s3_source_delete_configuration.feature.md delete mode 100644 changelog.d/19785_https_svcb_record_types_support.enhancement.md delete mode 100644 changelog.d/19812_journald_metadata_missing.fix.md delete mode 100644 changelog.d/19825.feature.md delete mode 100644 changelog.d/bump_alpine_3_19.enhancement.md delete mode 100644 changelog.d/validate_skip_healthchecks.enhancement.md create mode 100644 website/content/en/highlights/2024-02-13-0-36-0-upgrade-guide.md create mode 100644 website/content/en/releases/0.36.0.md create mode 100644 website/cue/reference/releases/0.36.0.cue diff --git a/changelog.d/10304_add_prometheus_pushgateway_source.feature.md b/changelog.d/10304_add_prometheus_pushgateway_source.feature.md deleted file mode 100644 index 7921fecc0b10f..0000000000000 --- a/changelog.d/10304_add_prometheus_pushgateway_source.feature.md +++ /dev/null @@ -1,5 +0,0 @@ -Vector can now emulate a [Prometheus Pushgateway](https://github.com/prometheus/pushgateway) through the new `prometheus_pushgateway` source. Counters and histograms can optionally be aggregated across pushes to support use-cases like cron jobs. - -There are some caveats, which are listed [here](https://github.com/Sinjo/vector/blob/0d4fc20091ddae7f3562bfdf07c9095c0c7223e0/src/sources/prometheus/pushgateway.rs#L8-L12). - -authors: Sinjo diff --git a/changelog.d/15813_sink_clickhouse_format.feature.md b/changelog.d/15813_sink_clickhouse_format.feature.md deleted file mode 100644 index eda8c61498989..0000000000000 --- a/changelog.d/15813_sink_clickhouse_format.feature.md +++ /dev/null @@ -1,3 +0,0 @@ -The `clickhouse` sink now supports `format`. This can be used to specify the data [format](https://clickhouse.com/docs/en/interfaces/formats) provided to `INSERT`s. The default is `JSONEachRow`. - -authors: gabriel376 diff --git a/changelog.d/19643_only_set_connection_close_http1.fix.md b/changelog.d/19643_only_set_connection_close_http1.fix.md deleted file mode 100644 index b649ed777d231..0000000000000 --- a/changelog.d/19643_only_set_connection_close_http1.fix.md +++ /dev/null @@ -1,3 +0,0 @@ -When terminating idle HTTP connections using the configured `max_connection_age`, only send -`Connection: Close` for HTTP/0.9, HTTP/1.0, and HTTP/1.1 requests. This header is not supported on -HTTP/2 and HTTP/3 requests. This may be supported on these HTTP versions in the future. diff --git a/changelog.d/19711_add_s3_source_delete_configuration.feature.md b/changelog.d/19711_add_s3_source_delete_configuration.feature.md deleted file mode 100644 index c68247eae5612..0000000000000 --- a/changelog.d/19711_add_s3_source_delete_configuration.feature.md +++ /dev/null @@ -1,3 +0,0 @@ -Added a configuration option for the `aws_s3` source that prevents deletion of messages which failed to be delivered to a sink. - -authors: tanushri-sundar diff --git a/changelog.d/19785_https_svcb_record_types_support.enhancement.md b/changelog.d/19785_https_svcb_record_types_support.enhancement.md deleted file mode 100644 index 7fe0924fb7398..0000000000000 --- a/changelog.d/19785_https_svcb_record_types_support.enhancement.md +++ /dev/null @@ -1,2 +0,0 @@ -Added support for parsing HTTPS (type 65) and SVCB (type 64) resource records from DNS messages -authors: @esensar diff --git a/changelog.d/19812_journald_metadata_missing.fix.md b/changelog.d/19812_journald_metadata_missing.fix.md deleted file mode 100644 index 01f79f3521157..0000000000000 --- a/changelog.d/19812_journald_metadata_missing.fix.md +++ /dev/null @@ -1,4 +0,0 @@ -Fixed an issue where the `journald` source was not correctly emitting metadata when `log_namespace = -True`. - -authors: @dalegaard diff --git a/changelog.d/19825.feature.md b/changelog.d/19825.feature.md deleted file mode 100644 index 5a6ff76051e82..0000000000000 --- a/changelog.d/19825.feature.md +++ /dev/null @@ -1,2 +0,0 @@ -Implemented VRL decoder. This enables users to set `decoding.codec = "vrl"` in their -source configurations and use VRL programs to decode logs. diff --git a/changelog.d/bump_alpine_3_19.enhancement.md b/changelog.d/bump_alpine_3_19.enhancement.md deleted file mode 100644 index d5eaa7eb8e86d..0000000000000 --- a/changelog.d/bump_alpine_3_19.enhancement.md +++ /dev/null @@ -1 +0,0 @@ -The base for Vector's Alpine Docker images was updated from 3.18 to 3.19. diff --git a/changelog.d/validate_skip_healthchecks.enhancement.md b/changelog.d/validate_skip_healthchecks.enhancement.md deleted file mode 100644 index 5ba1ef5f60ff6..0000000000000 --- a/changelog.d/validate_skip_healthchecks.enhancement.md +++ /dev/null @@ -1,5 +0,0 @@ -New Option `--skip-healthchecks` for `vector validate` validates config -including VRL, but skips health checks for sinks. - -Useful to validate configuration before deploying it remotely. -authors: MartinEmrich diff --git a/distribution/install.sh b/distribution/install.sh index ec247d509c17f..75f206d1a8c5b 100755 --- a/distribution/install.sh +++ b/distribution/install.sh @@ -13,7 +13,7 @@ set -u # If PACKAGE_ROOT is unset or empty, default it. PACKAGE_ROOT="${PACKAGE_ROOT:-"https://packages.timber.io/vector"}" # If VECTOR_VERSION is unset or empty, default it. -VECTOR_VERSION="${VECTOR_VERSION:-"0.35.1"}" +VECTOR_VERSION="${VECTOR_VERSION:-"0.36.0"}" _divider="--------------------------------------------------------------------------------" _prompt=">>>" _indent=" " diff --git a/website/content/en/highlights/2024-02-13-0-36-0-upgrade-guide.md b/website/content/en/highlights/2024-02-13-0-36-0-upgrade-guide.md new file mode 100644 index 0000000000000..216e86a9da7e7 --- /dev/null +++ b/website/content/en/highlights/2024-02-13-0-36-0-upgrade-guide.md @@ -0,0 +1,12 @@ +--- +date: "2024-02-13" +title: "0.36 Upgrade Guide" +description: "An upgrade guide that addresses breaking changes in 0.36.0" +authors: ["jszwedko"] +release: "0.36.0" +hide_on_release_notes: false +badges: + type: breaking change +--- + +Vector's 0.36.0 release includes no **breaking changes** or **deprecations**: diff --git a/website/content/en/releases/0.36.0.md b/website/content/en/releases/0.36.0.md new file mode 100644 index 0000000000000..29d0d21cf2203 --- /dev/null +++ b/website/content/en/releases/0.36.0.md @@ -0,0 +1,4 @@ +--- +title: Vector v0.36.0 release notes +weight: 21 +--- diff --git a/website/cue/reference/administration/interfaces/kubectl.cue b/website/cue/reference/administration/interfaces/kubectl.cue index 35d5c6b3a3889..883c1d554a9a7 100644 --- a/website/cue/reference/administration/interfaces/kubectl.cue +++ b/website/cue/reference/administration/interfaces/kubectl.cue @@ -19,7 +19,7 @@ administration: interfaces: kubectl: { role_implementations: [Name=string]: { commands: { _deployment_variant: string - _vector_version: "0.35" + _vector_version: "0.36" _namespace: string | *"vector" _controller_resource_type: string _controller_resource_name: string | *_deployment_variant diff --git a/website/cue/reference/releases/0.36.0.cue b/website/cue/reference/releases/0.36.0.cue new file mode 100644 index 0000000000000..768f2e37c4a21 --- /dev/null +++ b/website/cue/reference/releases/0.36.0.cue @@ -0,0 +1,369 @@ +package metadata + +releases: "0.36.0": { + date: "2024-02-13" + codename: "" + + whats_next: [] + + description: """ + The Vector team is pleased to announce version 0.36.0! + + There are no breaking changes in this release. + + In addition to the usual enhancements and bug fixes, this release also includes + + - A new `prometheus_pushgateway` source to receive Prometheus data + + A reminder that the `repositories.timber.io` package repositories will be decommissioned on + February 28th, 2024. Please see the [release + highlight](/highlights/2023-11-07-new-linux-repos) for details about this change and + instructions on how to migrate. + """ + + changelog: [ + { + type: "feat" + description: """ + Vector can now emulate a [Prometheus Pushgateway](https://github.com/prometheus/pushgateway) through the new `prometheus_pushgateway` source. Counters and histograms can optionally be aggregated across pushes to support use-cases like cron jobs. + + There are some caveats, which are listed [here](https://github.com/Sinjo/vector/blob/0d4fc20091ddae7f3562bfdf07c9095c0c7223e0/src/sources/prometheus/pushgateway.rs#L8-L12). + + """ + contributors: ["Sinjo"] + }, + { + type: "feat" + description: """ + The `clickhouse` sink now supports `format`. This can be used to specify the data [format](https://clickhouse.com/docs/en/interfaces/formats) provided to `INSERT`s. The default is `JSONEachRow`. + + """ + contributors: ["gabriel376"] + }, + { + type: "fix" + description: """ + Fixed an issue where the `aws_s3` sink adds a trailing period to the s3 key when the `filename_extension` is empty. + """ + }, + { + type: "fix" + description: """ + Removed warnings for unused outputs in `datadog_agent` source when the corresponding output is disabled in the source config. + """ + }, + { + type: "enhancement" + description: """ + Unit tests can now populate event metadata with the `% = ...` syntax. + + """ + contributors: ["GreyTeardrop"] + }, + { + type: "fix" + description: """ + When terminating idle HTTP connections using the configured `max_connection_age`, only send + `Connection: Close` for HTTP/0.9, HTTP/1.0, and HTTP/1.1 requests. This header is not supported on + HTTP/2 and HTTP/3 requests. This may be supported on these HTTP versions in the future. + """ + }, + { + type: "feat" + description: """ + Added a configuration option for the `aws_s3` source that prevents deletion of messages which failed to be delivered to a sink. + + """ + contributors: ["tanushri-sundar"] + }, + { + type: "fix" + description: """ + The following metrics now correctly have the `component_kind`, `component_type`, and `component_id` tags: + - `component_errors_total` + - `component_discarded_events_total` + + For the following sinks: + - `splunk_hec` + - `clickhouse` + - `loki` + - `redis` + - `azure_blob` + - `azure_monitor_logs` + - `webhdfs` + - `appsignal` + - `amqp` + - `aws_kinesis` + - `statsd` + - `honeycomb` + - `gcp_stackdriver_metrics` + - `gcs_chronicle_unstructured` + - `gcp_stackdriver_logs` + - `gcp_pubsub` + - `gcp_cloud_storage` + - `nats` + - `http` + - `kafka` + - `new_relic` + - `datadog_metrics` + - `datadog_traces` + - `datadog_events` + - `databend` + - `prometheus_remote_write` + - `pulsar` + - `aws_s3` + - `aws_sqs` + - `aws_sns` + - `elasticsearch` + """ + }, + { + type: "enhancement" + description: """ + Added support for parsing HTTPS (type 65) and SVCB (type 64) resource records from DNS messages + """ + contributors: ["esensar"] + }, + { + type: "fix" + description: """ + Fixed an issue where the `journald` source was not correctly emitting metadata when `log_namespace = + True`. + + """ + contributors: ["dalegaard"] + }, + { + type: "feat" + description: """ + Implemented VRL decoder. This enables users to set `decoding.codec = "vrl"` in their + source configurations and use VRL programs to decode logs. + """ + }, + { + type: "enhancement" + description: """ + The base for Vector's Alpine Docker images was updated from 3.18 to 3.19. + """ + }, + { + type: "fix" + description: """ + Fixed an issue where the `datadog_logs` sink could produce a request larger than the allowed API + limit. + """ + }, + { + type: "enhancement" + description: """ + Gracefully accept `@` characters in labels when decoding GELF. + """ + contributors: ["MartinEmrich"] + }, + { + type: "enhancement" + description: """ + Added a boolean `graphql` field to the api configuration to allow disabling the graphql endpoint. + + Note that the `playground` endpoint will now only be enabled if the `graphql` endpoint is also enabled. + """ + }, + { + type: "enhancement" + description: """ + New Option `--skip-healthchecks` for `vector validate` validates config + including VRL, but skips health checks for sinks. + + Useful to validate configuration before deploying it remotely. + """ + contributors: ["MartinEmrich"] + }, + ] + + commits: [ + {sha: "d115e269dbbb06fe25977df74b10d5cd0fa04628", date: "2024-01-05 09:20:10 UTC", description: "Automated changelog generation", pr_number: 19429, scopes: ["releasing"], type: "chore", breaking_change: false, author: "neuronull", files_count: 9, insertions_count: 345, deletions_count: 7}, + {sha: "3525d062dd2387bdda8babc5a98f5a9997a0362a", date: "2024-01-06 08:35:59 UTC", description: "Fix link to RFC 3339", pr_number: 19509, scopes: ["docs"], type: "chore", breaking_change: false, author: "Benedikt Heine", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "81d22b30e20ba9a250b4d9a5d56aa4216fcd7ece", date: "2024-01-06 01:15:56 UTC", description: "fix changelog workflow extern contribs", pr_number: 19524, scopes: ["ci"], type: "chore", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 5, deletions_count: 0}, + {sha: "aa6fd40ae9fda3279cbfd4f4ec3bdbb7debde691", date: "2024-01-06 05:39:43 UTC", description: "improve retry behavior code quality", pr_number: 19450, scopes: ["datadog_metrics sink", "datadog_logs sink"], type: "enhancement", breaking_change: false, author: "Doug Smith", files_count: 2, insertions_count: 37, deletions_count: 39}, + {sha: "c2cc94a262ecf39798009d29751d59cc97baa0c5", date: "2024-01-09 09:16:21 UTC", description: "Update AWS crates", pr_number: 19312, scopes: [], type: "chore", breaking_change: false, author: "Stephen Wakely", files_count: 54, insertions_count: 827, deletions_count: 1043}, + {sha: "9c832fd2f8677ddceb15e2e3a8e5a504b1b1cea3", date: "2024-01-09 02:18:50 UTC", description: "exclude dependabot from changelog job steps", pr_number: 19545, scopes: ["ci"], type: "chore", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 4, deletions_count: 4}, + {sha: "a3f033766dab2d41f00b68f19aa97eecb5f42728", date: "2024-01-09 05:42:06 UTC", description: "Bump serde from 1.0.194 to 1.0.195", pr_number: 19533, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 8, insertions_count: 11, deletions_count: 11}, + {sha: "b2cc78869c7890ab00e586ab8b34f7ec5828da4a", date: "2024-01-09 09:57:09 UTC", description: "Bump Rust to 1.75.0", pr_number: 19518, scopes: ["deps"], type: "chore", breaking_change: false, author: "neuronull", files_count: 52, insertions_count: 245, deletions_count: 253}, + {sha: "3b120ff0c17ccedf07f423090f8c009bf7164410", date: "2024-01-10 06:12:54 UTC", description: "Add Prometheus Pushgateway source", pr_number: 18143, scopes: ["new source"], type: "feat", breaking_change: false, author: "Chris Sinjakli", files_count: 13, insertions_count: 1112, deletions_count: 46}, + {sha: "f914cf602e78685804efaf473a056bb87f612110", date: "2024-01-10 01:49:11 UTC", description: "Fix the check for external contributor author GH usernames", pr_number: 19568, scopes: ["ci"], type: "chore", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "2b25a99a7347f40043434d1337a6b960338357c0", date: "2024-01-10 10:46:08 UTC", description: "Fix aws feature error", pr_number: 19567, scopes: [], type: "chore", breaking_change: false, author: "Stephen Wakely", files_count: 2, insertions_count: 9, deletions_count: 6}, + {sha: "05b07ab196b3891ca203dd64200fa5b064b7abb1", date: "2024-01-10 10:55:52 UTC", description: "only export RemoteWriteConfig for remote-write feature", pr_number: 19569, scopes: [], type: "chore", breaking_change: false, author: "Stephen Wakely", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "d6bd2696d138e3499deea7db9a9ac9432a96e687", date: "2024-01-10 09:26:13 UTC", description: "Add pure/impure badge for VRL functions", pr_number: 19571, scopes: ["docs"], type: "chore", breaking_change: false, author: "Pavlos Rontidis", files_count: 7, insertions_count: 22, deletions_count: 0}, + {sha: "1eda83b64c83e067c3577b9e63cc4bb28d064518", date: "2024-01-10 07:16:02 UTC", description: "Bump anyhow from 1.0.76 to 1.0.79", pr_number: 19500, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 5, deletions_count: 5}, + {sha: "f38796d3a8e341a9fc5fe5499a489af33c19a3b7", date: "2024-01-10 15:16:06 UTC", description: "Bump async-trait from 0.1.75 to 0.1.77", pr_number: 19498, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "e08b187b5502b97cbbbd337c043e59227c2de291", date: "2024-01-10 15:17:13 UTC", description: "Bump serde_bytes from 0.11.12 to 0.11.14", pr_number: 19495, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "f5bed3fd72f1239a41a82ac89b6ebb303318f5f9", date: "2024-01-10 15:20:12 UTC", description: "Bump semver from 1.0.20 to 1.0.21", pr_number: 19505, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 5, deletions_count: 5}, + {sha: "a7a41661a4339c07034fb38c05ffdea4f5d3c4fc", date: "2024-01-10 15:20:53 UTC", description: "Bump serde_yaml from 0.9.29 to 0.9.30", pr_number: 19514, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 8, deletions_count: 8}, + {sha: "1d979cc6791f32b024459f5e76c503bf6947db76", date: "2024-01-10 15:22:07 UTC", description: "Bump syn from 2.0.46 to 2.0.48", pr_number: 19532, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 113, deletions_count: 113}, + {sha: "eec7eb5a9abfdc6f63cc1b8f4ed2c8364492622d", date: "2024-01-10 15:22:36 UTC", description: "Bump num_enum from 0.7.1 to 0.7.2", pr_number: 19536, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 7, deletions_count: 7}, + {sha: "47fcf91f8935df19d93b08a8420c79f67bdcfb68", date: "2024-01-10 15:23:12 UTC", description: "Bump opendal from 0.44.0 to 0.44.1", pr_number: 19538, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "61b2a3f365876b4a23115d38b7817eff450afa58", date: "2024-01-10 15:23:26 UTC", description: "Bump the clap group with 1 update", pr_number: 19552, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 7, insertions_count: 20, deletions_count: 20}, + {sha: "c57435d9b142a34674e4260a9ef6ce7b044c6a4e", date: "2024-01-10 15:24:19 UTC", description: "Bump base64 from 0.21.5 to 0.21.6", pr_number: 19557, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 27, deletions_count: 27}, + {sha: "1daa0d38728665d1fd716be848544d2e2cf6579e", date: "2024-01-10 15:24:30 UTC", description: "Bump cargo_toml from 0.17.2 to 0.18.0", pr_number: 19558, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "2be297649ba4e16d9b85802f2e0f69c71e2e310f", date: "2024-01-10 15:25:01 UTC", description: "Bump crossbeam-utils from 0.8.18 to 0.8.19", pr_number: 19560, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 5, deletions_count: 8}, + {sha: "14ae52ed542514368495aa641e873a851c4bb2f4", date: "2024-01-10 07:45:59 UTC", description: "Group together crossbeam updates", pr_number: 19572, scopes: ["deps"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 3, deletions_count: 0}, + {sha: "84de179739a45ba02878c1df0aee5cdee3b8082f", date: "2024-01-10 19:35:22 UTC", description: "Bump thiserror from 1.0.51 to 1.0.56", pr_number: 19510, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 4, deletions_count: 4}, + {sha: "79f0fd335e6ae92b3d3dab11e04b721536b6f0e8", date: "2024-01-10 22:27:42 UTC", description: "Bump libc from 0.2.151 to 0.2.152", pr_number: 19534, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "8f504b35985b9cc1e29f1505b8fd42abd138851e", date: "2024-01-11 01:50:13 UTC", description: "Bump the aws group with 2 updates", pr_number: 19556, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 13, deletions_count: 13}, + {sha: "586fb31a1678ca220cdeef7f37b091de41b6ce95", date: "2024-01-11 13:08:43 UTC", description: "update ingestion api for greptimedb sink", pr_number: 19410, scopes: ["greptimedb sink"], type: "feat", breaking_change: false, author: "Ning Sun", files_count: 6, insertions_count: 163, deletions_count: 150}, + {sha: "86b16e04a2f98701f13e7c814baf5cf837d0a82c", date: "2024-01-10 22:56:47 UTC", description: "Bump the crossbeam group with 1 update", pr_number: 19576, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 4, deletions_count: 5}, + {sha: "e3f285c32e857b1b1a8de4504e9bdfebdf0e77ec", date: "2024-01-10 22:56:57 UTC", description: "Bump getrandom from 0.2.11 to 0.2.12", pr_number: 19575, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 15, deletions_count: 15}, + {sha: "8881cc4a811d2253699f025f2d20fa496e38fe32", date: "2024-01-11 06:57:11 UTC", description: "Bump maxminddb from 0.23.0 to 0.24.0", pr_number: 19574, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 5, deletions_count: 5}, + {sha: "27e49e7ee645da5f1bf33b49dc616a3c8592bc72", date: "2024-01-11 06:57:22 UTC", description: "Bump mlua from 0.9.2 to 0.9.3", pr_number: 19573, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 8, deletions_count: 8}, + {sha: "13a930afcfbe2f11a8eef9634a2229e3e8672b1f", date: "2024-01-11 07:06:06 UTC", description: "Bump serde_json from 1.0.109 to 1.0.111", pr_number: 19520, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 6, insertions_count: 7, deletions_count: 7}, + {sha: "b8c268cf7e8853b41b50285f8959f87a99939f01", date: "2024-01-11 07:06:15 UTC", description: "Bump docker/metadata-action from 5.4.0 to 5.5.0", pr_number: 19526, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "5fb8efcef24f231589e63e16b420f7f42dda7813", date: "2024-01-10 23:43:22 UTC", description: "Ensure PR runs of regression and k8s e2e tests don't cancel each other", pr_number: 19578, scopes: ["ci"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 6, deletions_count: 4}, + {sha: "a412c3c013c2de24e6a1502ed1cfe19f4b511f81", date: "2024-01-10 23:44:38 UTC", description: "Bump manifests to v0.30.0 of the chart", pr_number: 19554, scopes: ["kubernetes"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 18, insertions_count: 22, deletions_count: 22}, + {sha: "d282d260ae1f950f25516498f80ee55512192866", date: "2024-01-10 23:49:42 UTC", description: "Fix proofreading mistake in v0.35.0 upgrade guide", pr_number: 19551, scopes: ["docs"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "dd76ca8815679d1e791b3b16400639fd815168fd", date: "2024-01-11 02:26:18 UTC", description: "Bump graphql crates to 7.0.0", pr_number: 19579, scopes: ["deps"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 3, insertions_count: 107, deletions_count: 14}, + {sha: "9f7c92d8d4b605f14f9d65ee9f9e34dcedf297d8", date: "2024-01-11 04:32:13 UTC", description: "abort serialization and split batch when payload is too large", pr_number: 19189, scopes: ["datadog_logs sink"], type: "fix", breaking_change: false, author: "Luke Steensen", files_count: 3, insertions_count: 205, deletions_count: 137}, + {sha: "df0eafce599b8c58053c0f2d68b479507824fc0b", date: "2024-01-11 08:40:53 UTC", description: "Skip serializing default proxy config fields", pr_number: 19580, scopes: ["config"], type: "chore", breaking_change: false, author: "Bruce Guenter", files_count: 1, insertions_count: 6, deletions_count: 7}, + {sha: "bbff1b2e325df0ce706b244e73126580acd1f846", date: "2024-01-11 14:40:53 UTC", description: "Bump cached from 0.46.1 to 0.47.0", pr_number: 19503, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "11f50370254b85d6ca79d8874b32a55458fa2b7c", date: "2024-01-11 14:41:08 UTC", description: "Bump h2 from 0.4.0 to 0.4.1", pr_number: 19559, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 4, deletions_count: 4}, + {sha: "f4ad8bf0978b4524305dbfddf77609cdedf8e92a", date: "2024-01-11 08:11:31 UTC", description: "enable running all int tests comment", pr_number: 19581, scopes: ["ci"], type: "chore", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 116, deletions_count: 43}, + {sha: "2e756a16dc4aaf2faca2a293cc4f99ea3ef59617", date: "2024-01-11 15:24:06 UTC", description: "Bump the aws group with 4 updates", pr_number: 19582, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 23, deletions_count: 24}, + {sha: "2448a72770444e4c203d7d937e1ccede22c23aed", date: "2024-01-11 22:42:24 UTC", description: "Bump the aws group with 1 update", pr_number: 19586, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 8, deletions_count: 8}, + {sha: "9dd9907b356996d9bbb395fd4aea2a207c930914", date: "2024-01-12 03:19:50 UTC", description: "Shorten name of `skip_serializing_if_default`", pr_number: 19591, scopes: [], type: "chore", breaking_change: false, author: "Bruce Guenter", files_count: 70, insertions_count: 107, deletions_count: 210}, + {sha: "b1502ec185a517f2c95078f5a70acae7baaf1c30", date: "2024-01-12 11:22:19 UTC", description: "Allow @ as valid GELF field character in decoder", pr_number: 19544, scopes: ["codec"], type: "enhancement", breaking_change: false, author: "Martin Emrich", files_count: 3, insertions_count: 12, deletions_count: 4}, + {sha: "1e1f2ecdf96ec104234756efb5a47167a85bc25e", date: "2024-01-12 22:33:03 UTC", description: "Bump the aws group with 1 update", pr_number: 19605, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "59699f6cf7e4f96d2d7b3d633eb8082d85110695", date: "2024-01-12 22:33:13 UTC", description: "Bump the clap group with 1 update", pr_number: 19606, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 7, insertions_count: 22, deletions_count: 22}, + {sha: "6fde1861fe8961b1c100c951e0752b48673fac12", date: "2024-01-13 06:33:26 UTC", description: "Bump mlua from 0.9.3 to 0.9.4", pr_number: 19607, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 6, deletions_count: 6}, + {sha: "37125b9af3c8dfaa6924a8f5e59cc2a37f58923a", date: "2024-01-13 06:33:48 UTC", description: "Bump confy from 0.5.1 to 0.6.0", pr_number: 19608, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 13, deletions_count: 42}, + {sha: "af6169c99e2bf236b958d775bd8af868c9dac094", date: "2024-01-13 06:34:02 UTC", description: "Bump assert_cmd from 2.0.12 to 2.0.13", pr_number: 19610, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "c2f32593776f1e9304dc20ae2adbfb3efb8a8eb8", date: "2024-01-13 06:34:13 UTC", description: "Bump base64 from 0.21.6 to 0.21.7", pr_number: 19611, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 26, deletions_count: 26}, + {sha: "1705dfd5d85b08be96a594dfbf9081ed78497ee1", date: "2024-01-13 01:43:47 UTC", description: "Fix handling of the default value for `ProxyConfig::enabled`", pr_number: 19604, scopes: ["config"], type: "fix", breaking_change: false, author: "Bruce Guenter", files_count: 3, insertions_count: 41, deletions_count: 1}, + {sha: "e1d570d99621f5b9c58423bdc1e5e8cee8ca9c0f", date: "2024-01-13 02:37:33 UTC", description: "Bump Vector to v0.36.0", pr_number: 19550, scopes: ["releasing"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 2, deletions_count: 2}, + {sha: "f262324595883633a21ead16c5fc165a576c9f17", date: "2024-01-13 06:20:23 UTC", description: "improve source data_dir docs", pr_number: 19596, scopes: ["docs"], type: "chore", breaking_change: false, author: "Pavlos Rontidis", files_count: 6, insertions_count: 32, deletions_count: 12}, + {sha: "131ab453d4611699e6f6989546c4b5d289e8768a", date: "2024-01-13 06:14:49 UTC", description: "improve documentation of `RetryLogic` trait functions", pr_number: 19617, scopes: ["sinks"], type: "docs", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 8, deletions_count: 0}, + {sha: "20b4fc72bcb8f605e044e05ae3df0e26aa637875", date: "2024-01-13 09:38:22 UTC", description: "remove trailing dot from s3 filename extension", pr_number: 19616, scopes: ["aws_s3 sink"], type: "fix", breaking_change: false, author: "Sebastian Tia", files_count: 2, insertions_count: 24, deletions_count: 1}, + {sha: "38d8801d4096f1f9e12ffd01fe8014b92682297d", date: "2024-01-16 16:04:05 UTC", description: "pub prometheus sink configs", pr_number: 19540, scopes: [], type: "chore", breaking_change: false, author: "Suika", files_count: 2, insertions_count: 5, deletions_count: 5}, + {sha: "521512dcb07d4222630999e301f82ddd5fd16218", date: "2024-01-16 15:23:35 UTC", description: "Bump the aws group with 2 updates", pr_number: 19619, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 6, deletions_count: 6}, + {sha: "cebe6284595badef5112807fd1f7e9a5f0e7d3ce", date: "2024-01-16 15:24:33 UTC", description: "Bump wasm-bindgen from 0.2.89 to 0.2.90", pr_number: 19620, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 10, deletions_count: 10}, + {sha: "628d207bf4769ebd0bbf2b98ddbbf162ebd5be14", date: "2024-01-16 23:32:48 UTC", description: "fix filter out PRs for gardener issue comment workflow", pr_number: 19618, scopes: ["ci"], type: "chore", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "4c098417baef4c0d2d7af09beaad3dfa1483ad3f", date: "2024-01-16 23:38:59 UTC", description: "update GELF codec", pr_number: 19602, scopes: ["docs"], type: "chore", breaking_change: false, author: "neuronull", files_count: 45, insertions_count: 586, deletions_count: 0}, + {sha: "26f2468f66bc22a0d66b3a382be17a46bc4bb1a9", date: "2024-01-17 00:10:55 UTC", description: "Bump smallvec from 1.11.2 to 1.12.0", pr_number: 19623, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "b540936fc0ac132d257e168dae78e228c3cce324", date: "2024-01-17 07:15:23 UTC", description: "Bump the clap group with 2 updates", pr_number: 19626, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 7, insertions_count: 23, deletions_count: 23}, + {sha: "045b38448482a4d090b3ac0fbafa10fbf2ba0030", date: "2024-01-17 04:13:28 UTC", description: "Clarify that this source receives data from Splunk clients", pr_number: 19615, scopes: ["splunk_hec source"], type: "docs", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 9, deletions_count: 1}, + {sha: "b2c9f27d4360cbdb211d9f7230ae90e6becfee8d", date: "2024-01-17 05:45:51 UTC", description: "fix and simplify concurrency groups", pr_number: 19630, scopes: ["ci"], type: "chore", breaking_change: false, author: "neuronull", files_count: 5, insertions_count: 10, deletions_count: 9}, + {sha: "c30a45f362550c1b2989a1ca43f60bb7267ccfa0", date: "2024-01-18 05:09:28 UTC", description: "Bump the graphql group with 1 update", pr_number: 19583, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 10, deletions_count: 99}, + {sha: "4e877e53d112310ddee4d97417550ed0e20316d4", date: "2024-01-18 11:39:13 UTC", description: "Bump the clap group with 2 updates", pr_number: 19634, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 7, insertions_count: 24, deletions_count: 23}, + {sha: "58a37b24dad42fc8aa0bd4737786a6aae780a3c5", date: "2024-01-18 08:55:08 UTC", description: "acquire exclusive lock to global data dir", pr_number: 19595, scopes: ["config"], type: "fix", breaking_change: false, author: "Pavlos Rontidis", files_count: 10, insertions_count: 63, deletions_count: 5}, + {sha: "2adf6726906b54e4ef30524b635830a860590310", date: "2024-01-19 07:47:47 UTC", description: "add write perms to the default data_dir", pr_number: 19659, scopes: ["buffers"], type: "fix", breaking_change: false, author: "Pavlos Rontidis", files_count: 2, insertions_count: 6, deletions_count: 2}, + {sha: "cc9203b610868d5de8daff7ac1051dce9038dfe8", date: "2024-01-19 07:28:33 UTC", description: "Document Vector's MSRV policy", pr_number: 19646, scopes: ["deps"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 10, deletions_count: 0}, + {sha: "52c12c3fa0355dd53edfd01ffd979f5be40f09f6", date: "2024-01-19 15:44:00 UTC", description: "Bump async-compression from 0.4.5 to 0.4.6", pr_number: 19652, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "32748273fbbf3a65851f6e4f65ddaae385000cdd", date: "2024-01-20 00:58:42 UTC", description: "Bump the aws group with 2 updates", pr_number: 19660, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 15, deletions_count: 15}, + {sha: "846075c4bbe2fb982c7d289a5011ec96d4f9b0cc", date: "2024-01-20 00:59:11 UTC", description: "Bump uuid from 1.6.1 to 1.7.0", pr_number: 19661, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "50a86ef4fb59b9f9ac5e3179d6e8892019d552ee", date: "2024-01-20 08:37:36 UTC", description: "Remove warning for unused outputs when output is disabled", pr_number: 19629, scopes: ["datadog_agent source"], type: "fix", breaking_change: false, author: "Sebastian Tia", files_count: 4, insertions_count: 122, deletions_count: 18}, + {sha: "9b024b9564b24524ce9a305b3c00080779f63250", date: "2024-01-20 13:40:52 UTC", description: "Bump actions/cache from 3 to 4", pr_number: 19642, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 7, insertions_count: 7, deletions_count: 7}, + {sha: "c1199512c73bfd58e76daf1297cf29f7eff6aa5a", date: "2024-01-20 06:54:21 UTC", description: "Update h2", pr_number: 19648, scopes: ["deps"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 9, deletions_count: 9}, + {sha: "55317dcda1a26c533242eb3a9bd24a61dd5958e3", date: "2024-01-23 06:52:31 UTC", description: "Bump openssl from 0.10.62 to 0.10.63", pr_number: 19672, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 6, deletions_count: 6}, + {sha: "9c581836c9a4ba1993022be918a034d50f89794e", date: "2024-01-23 06:52:42 UTC", description: "Bump cached from 0.47.0 to 0.48.0", pr_number: 19673, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 5, deletions_count: 5}, + {sha: "c12c8e1bef9fb2f9a9a31892d7911b8637f581e7", date: "2024-01-23 06:53:02 UTC", description: "Bump regex from 1.10.2 to 1.10.3", pr_number: 19674, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 5, insertions_count: 10, deletions_count: 10}, + {sha: "ba9b4bd7c4af1eed4cc6b7e64686a2e666a306d6", date: "2024-01-23 06:54:03 UTC", description: "Bump smallvec from 1.12.0 to 1.13.1", pr_number: 19677, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "a6fb31b2bfd3fedcf53d858d5d7f99942649ea21", date: "2024-01-23 08:03:48 UTC", description: "Fix docs for `ignore_older_secs`", pr_number: 19682, scopes: [], type: "docs", breaking_change: false, author: "silverwind", files_count: 5, insertions_count: 8, deletions_count: 8}, + {sha: "eeab67d7b86166dfeac345144aaa36d72f746253", date: "2024-01-23 07:23:12 UTC", description: "Bump the graphql group with 2 updates", pr_number: 19670, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 21, deletions_count: 14}, + {sha: "f41ca86876ae9c6fb98c8edd363691cfff963daf", date: "2024-01-23 07:27:21 UTC", description: "Bump opendal from 0.44.1 to 0.44.2", pr_number: 19676, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 20}, + {sha: "5bb492608d935c38a1ae6e748592f0ae9812413c", date: "2024-01-23 03:57:16 UTC", description: "Add `graphql` field to toggle graphql endpoint", pr_number: 19645, scopes: ["config api"], type: "enhancement", breaking_change: false, author: "Sebastian Tia", files_count: 4, insertions_count: 53, deletions_count: 12}, + {sha: "25b1b8c7d891bbc7bbe8addbde0342c820b5424f", date: "2024-01-23 22:46:48 UTC", description: "Bump the clap group with 1 update", pr_number: 19687, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "b56f1c3a341df729a217256fa3fefa9772583c96", date: "2024-01-24 09:30:04 UTC", description: "Bump the aws group with 1 update", pr_number: 19688, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "b51085b1a8d0c3e7c957bf9ad1d2a8db6a661dce", date: "2024-01-24 09:37:59 UTC", description: "Bump serde_with from 3.4.0 to 3.5.0", pr_number: 19675, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 14, deletions_count: 14}, + {sha: "2f8fbd135e1c7a683d70be0c09a8dbc43e6f5d0d", date: "2024-01-24 09:38:30 UTC", description: "Bump proc-macro2 from 1.0.76 to 1.0.78", pr_number: 19671, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 68, deletions_count: 68}, + {sha: "53f97c1c61ca176ba20852d0cfc1e45e44cf2235", date: "2024-01-24 09:39:18 UTC", description: "Bump env_logger from 0.10.1 to 0.10.2", pr_number: 19651, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 3, deletions_count: 3}, + {sha: "5d7ceaa8c963bd23e6c0b066fa36c0581103575f", date: "2024-01-24 07:03:16 UTC", description: "Run the changelog check on the merge queue to pass required checks", pr_number: 19696, scopes: ["ci"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 24, deletions_count: 2}, + {sha: "7cf2f009dbd9be4177dfbce7950cd82d57f93448", date: "2024-01-24 10:57:26 UTC", description: "emit graphql field of api config", pr_number: 19692, scopes: ["config api"], type: "fix", breaking_change: false, author: "Sebastian Tia", files_count: 3, insertions_count: 9, deletions_count: 3}, + {sha: "88c10a9e0142a5aca06972ceba2e24983df631b6", date: "2024-01-25 01:46:33 UTC", description: "Bump the aws group with 2 updates", pr_number: 19697, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 7, deletions_count: 6}, + {sha: "650d478fc28f79d1f075f43971cd2b54ca848652", date: "2024-01-25 01:20:06 UTC", description: "Drop dependency on `cached` crate", pr_number: 19693, scopes: ["vdev"], type: "chore", breaking_change: false, author: "Bruce Guenter", files_count: 4, insertions_count: 10, deletions_count: 81}, + {sha: "cacba25ea31a394663169b253dba747f6f8a89f6", date: "2024-01-26 11:25:26 UTC", description: "Bump peter-evans/create-or-update-comment from 3 to 4", pr_number: 19710, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "b72217cf40d3216625cf274fe79b669f823a1c8a", date: "2024-01-26 11:25:56 UTC", description: "Bump dorny/paths-filter from 2 to 3", pr_number: 19708, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "8a82a3b1347c25efbb06b9ad300fd9d7a779b202", date: "2024-01-26 06:26:29 UTC", description: "remove cfg test attribute", pr_number: 19684, scopes: ["aws region"], type: "fix", breaking_change: false, author: "Sebastian Tia", files_count: 1, insertions_count: 0, deletions_count: 2}, + {sha: "a4aff31d54a3c820f50aa94acef66e0938f3c77e", date: "2024-01-26 11:54:16 UTC", description: "Bump bufbuild/buf-setup-action from 1.28.1 to 1.29.0", pr_number: 19709, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "2b8334397212f749ad5ef4961d22a630568f7dd6", date: "2024-01-27 06:54:12 UTC", description: "Bump pin-project from 1.1.3 to 1.1.4", pr_number: 19718, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 5, deletions_count: 5}, + {sha: "dd50a46b92f33dfbf81ef150a7be892c896ab401", date: "2024-01-27 06:54:21 UTC", description: "Bump memmap2 from 0.9.3 to 0.9.4", pr_number: 19719, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "09668836bb8331e894d5c48e0376041fb92e385d", date: "2024-01-27 08:21:41 UTC", description: "Bump mlua from 0.9.4 to 0.9.5", pr_number: 19717, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 10, deletions_count: 10}, + {sha: "4195071d984a4d2107a2f5888bca82db0bab4b5c", date: "2024-01-27 05:12:26 UTC", description: "propagate tracing span context in stream sink request building", pr_number: 19712, scopes: ["observability"], type: "fix", breaking_change: false, author: "neuronull", files_count: 2, insertions_count: 44, deletions_count: 0}, + {sha: "b141f2ea0550410989a98bef80e5863a373dca4c", date: "2024-01-27 05:49:38 UTC", description: "Bump chrono from 0.4.31 to 0.4.33", pr_number: 19723, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 13, insertions_count: 87, deletions_count: 17}, + {sha: "650a738e63f3ff7d80ff872760fc8497b257e709", date: "2024-01-30 01:40:33 UTC", description: "update basic sink tutorial doc", pr_number: 19722, scopes: ["docs"], type: "chore", breaking_change: false, author: "Sebastian Tia", files_count: 1, insertions_count: 26, deletions_count: 13}, + {sha: "51c6466c7d848b49e9a66293ddfb8211c1f6acb5", date: "2024-01-30 10:20:20 UTC", description: "Bump lru from 0.12.1 to 0.12.2", pr_number: 19731, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "7c27b2e5eb82150660a6066318aef2926be84ee1", date: "2024-01-30 16:21:04 UTC", description: "Bump inventory from 0.3.14 to 0.3.15", pr_number: 19732, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "fc0958863b674fbca4550c274e6f0c7711264593", date: "2024-01-30 16:23:07 UTC", description: "Bump cargo_toml from 0.18.0 to 0.19.0", pr_number: 19733, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "5f233f23700fb22a031168078cdcbaee79242775", date: "2024-01-30 16:25:25 UTC", description: "Bump the aws group with 2 updates", pr_number: 19720, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 17, deletions_count: 17}, + {sha: "7056f5fe02af3d11a0ac813c9043788d96ed233c", date: "2024-01-30 16:50:10 UTC", description: "Bump serde_json from 1.0.111 to 1.0.112", pr_number: 19730, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 13, insertions_count: 16, deletions_count: 15}, + {sha: "bf2d7329c0fd41f478f974b282923f00d89cf027", date: "2024-01-31 00:37:24 UTC", description: "Bump cargo_toml from 0.19.0 to 0.19.1", pr_number: 19744, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "9571b4ec304f80a530ab312755cb93e9197ae1ba", date: "2024-01-31 06:38:35 UTC", description: "Bump itertools from 0.12.0 to 0.12.1", pr_number: 19745, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 10, deletions_count: 10}, + {sha: "ec9b2c7df7eba02dc1c3c0252c05a0a6499d5371", date: "2024-01-31 07:05:47 UTC", description: "Bump serde from 1.0.195 to 1.0.196", pr_number: 19734, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 17, insertions_count: 24, deletions_count: 30}, + {sha: "f085b72615c7e98760aef1192b72f697d127e358", date: "2024-01-31 07:06:29 UTC", description: "Bump the aws group with 2 updates", pr_number: 19742, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 6, deletions_count: 6}, + {sha: "83be4258bf998e6a2741c0ddf44a5b2ff29cbc67", date: "2024-01-31 07:06:42 UTC", description: "Bump darling from 0.20.3 to 0.20.4", pr_number: 19743, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 13, deletions_count: 13}, + {sha: "60f5fe091dfb73139945c931a4fab2164d59cc92", date: "2024-01-31 04:59:43 UTC", description: "suggest make generate-component-docs", pr_number: 19740, scopes: ["docs"], type: "chore", breaking_change: false, author: "gabriel376", files_count: 2, insertions_count: 2, deletions_count: 2}, + {sha: "c1a39e4067362d6e699573c4be4a92cef044766f", date: "2024-01-31 13:44:37 UTC", description: "Enable population of event metadata by a VRL unit test source", pr_number: 19729, scopes: ["unit tests"], type: "enhancement", breaking_change: false, author: "Mykola Rybak", files_count: 3, insertions_count: 39, deletions_count: 2}, + {sha: "ba2b3508ef5e6995d3dbd47d70977aa1763e8a34", date: "2024-02-01 00:57:35 UTC", description: "Bump openssl-src from 300.2.1+3.2.0 to 300.2.2+3.2.1", pr_number: 19750, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "13ac2dfb981160e4f6d1541c8537e47d6ac761e9", date: "2024-02-01 06:58:21 UTC", description: "Bump darling from 0.20.4 to 0.20.5", pr_number: 19751, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 13, deletions_count: 13}, + {sha: "43a9a366c4dee15f0294a0cd22c2dc5b8b2daae8", date: "2024-02-01 05:38:24 UTC", description: "Add end-to-end tests with the Datadog Agent", pr_number: 18538, scopes: ["tests"], type: "chore", breaking_change: false, author: "neuronull", files_count: 56, insertions_count: 2071, deletions_count: 306}, + {sha: "d7c615c6837429d8e36cd02df8da2e7485656df2", date: "2024-02-01 07:11:01 UTC", description: "Add configurable support for `http::Uri`", pr_number: 19758, scopes: ["config"], type: "chore", breaking_change: false, author: "Bruce Guenter", files_count: 2, insertions_count: 29, deletions_count: 0}, + {sha: "0f3faba5ee3fae2531ce4bb9b739a1a54d860f69", date: "2024-02-01 08:38:24 UTC", description: "Add `delete_failed_message` configuration option", pr_number: 19748, scopes: ["s3 source"], type: "feat", breaking_change: false, author: "tanushri-sundar", files_count: 5, insertions_count: 79, deletions_count: 4}, + {sha: "abb292a8c6179eb5650cc2a88f18897aa71509cf", date: "2024-02-01 17:45:21 UTC", description: "Bump nick-fields/retry from 2 to 3", pr_number: 19756, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 74, deletions_count: 74}, + {sha: "3da1a0206583500abad617147d76b3faf602a09b", date: "2024-02-02 00:52:15 UTC", description: "Bump libc from 0.2.152 to 0.2.153", pr_number: 19763, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "00a94801025a215a78ce684422b0a986727ccc50", date: "2024-02-02 07:34:49 UTC", description: "Bump docker/metadata-action from 5.5.0 to 5.5.1", pr_number: 19755, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "bd9fbd682b673e01f712a79af326eb307883cfad", date: "2024-02-02 02:56:28 UTC", description: "Bump reqwest from 0.11.23 to 0.11.24", pr_number: 19762, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 7, deletions_count: 6}, + {sha: "65acf06934c733bf3608387b2264b071cca27f3d", date: "2024-02-02 04:51:59 UTC", description: "Bump toml from 0.8.8 to 0.8.9", pr_number: 19761, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 5, insertions_count: 22, deletions_count: 21}, + {sha: "7cd151a822f0073f9df4bf01d7aec11500f5efe1", date: "2024-02-02 04:10:38 UTC", description: "Update labels used by dependabot", pr_number: 19760, scopes: ["ci"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 3, deletions_count: 1}, + {sha: "29a91a44ac762f2b02938d144503849a570ec747", date: "2024-02-02 10:39:14 UTC", description: "Revert \"Add configurable support for `http::Uri`\"", pr_number: 19770, scopes: ["config"], type: "chore", breaking_change: false, author: "Bruce Guenter", files_count: 2, insertions_count: 0, deletions_count: 29}, + {sha: "ac80d1ed07983d203671b7c2c625715fbc06a234", date: "2024-02-03 02:28:18 UTC", description: "expose DatadogSearch", pr_number: 19778, scopes: ["deps"], type: "chore", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "a215d59f1fcef34913e4316c36ca09ebea3bf7a0", date: "2024-02-03 04:20:06 UTC", description: "Pass the extra context to sources and transforms too", pr_number: 19779, scopes: ["config"], type: "chore", breaking_change: false, author: "Bruce Guenter", files_count: 5, insertions_count: 19, deletions_count: 2}, + {sha: "0a2dc2bafa6e56218797a0c238118ed58fd94113", date: "2024-02-03 04:20:17 UTC", description: "Implement an easier creator for multi-valued `ExtraContext`", pr_number: 19777, scopes: [], type: "chore", breaking_change: false, author: "Bruce Guenter", files_count: 1, insertions_count: 15, deletions_count: 13}, + {sha: "28a4cb4ca348287fb336f248988dd39ee9a74907", date: "2024-02-06 02:23:03 UTC", description: "add sink validator", pr_number: 17980, scopes: ["component validation"], type: "feat", breaking_change: false, author: "neuronull", files_count: 9, insertions_count: 375, deletions_count: 347}, + {sha: "17b29628c742a2841a19b19f70c5465935089b68", date: "2024-02-06 11:01:08 UTC", description: "Bump rkyv from 0.7.43 to 0.7.44", pr_number: 19789, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 5, deletions_count: 5}, + {sha: "0dce77620fbc240f6e880c6f49f7ef7f8bb5e3df", date: "2024-02-06 11:01:22 UTC", description: "Bump tokio from 1.35.1 to 1.36.0", pr_number: 19790, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 10, insertions_count: 13, deletions_count: 13}, + {sha: "774693772f6543166892c8497b3e9ab699045435", date: "2024-02-06 11:01:30 UTC", description: "Bump opendal from 0.44.2 to 0.45.0", pr_number: 19788, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "a247c515f768ef2293821e802ec3c7793cd5a1d5", date: "2024-02-06 11:03:25 UTC", description: "Bump dorny/paths-filter from 2 to 3", pr_number: 19768, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "43b96baa64a8cd6eefec1679f3b34ad753121d62", date: "2024-02-06 11:35:59 UTC", description: "Bump serde_with from 3.5.0 to 3.6.0", pr_number: 19800, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 14, deletions_count: 14}, + {sha: "509a858e74d43a431589b21928c405ac461f6551", date: "2024-02-06 15:28:32 UTC", description: "Bump vrl from 0.9.1 to 0.10.0", pr_number: 19705, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 9, insertions_count: 53, deletions_count: 29}, + {sha: "c5ee82faf01b543ad4db746abe5d4a305844a406", date: "2024-02-06 23:46:31 UTC", description: "Bump Alpine base image from 3.18 to 3.19", pr_number: 19804, scopes: ["releasing"], type: "enhancement", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 2, deletions_count: 1}, + {sha: "541e3086abcb4d95b77c273f6de19d9dc326c156", date: "2024-02-07 07:48:50 UTC", description: "Bump the clap group with 1 update", pr_number: 19786, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "68272040067f5cf167d925234bdfc15b6bd60f6f", date: "2024-02-07 07:48:58 UTC", description: "Bump ratatui from 0.25.0 to 0.26.0", pr_number: 19787, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 57, deletions_count: 10}, + {sha: "b3e0af7f268c2ef4c26299195a0aec0263df0b61", date: "2024-02-07 07:49:16 UTC", description: "Bump tempfile from 3.9.0 to 3.10.0", pr_number: 19807, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 16, deletions_count: 17}, + {sha: "f38ed158f939c6acf78cd039349d897f7127f0d1", date: "2024-02-07 07:49:26 UTC", description: "Bump toml from 0.8.9 to 0.8.10", pr_number: 19808, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 6, deletions_count: 6}, + {sha: "fd76dbf0fff80f89e1b7bdbfb57cf864709e9dfa", date: "2024-02-07 01:23:23 UTC", description: "re-organize to expose sampling logic", pr_number: 19806, scopes: ["sample transform"], type: "chore", breaking_change: false, author: "neuronull", files_count: 6, insertions_count: 183, deletions_count: 140}, + {sha: "c4fe1342ce8b80ef822203f01ef0093751195a3d", date: "2024-02-07 12:34:42 UTC", description: "make containing module pub", pr_number: 19816, scopes: ["sample transform"], type: "chore", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "9d4e89ee6304918be9a91e32a2edf89189bfe4c4", date: "2024-02-08 08:04:20 UTC", description: "fix example for high quality error messages", pr_number: 19821, scopes: ["vrl"], type: "docs", breaking_change: false, author: "Ensar Sarajčić", files_count: 1, insertions_count: 21, deletions_count: 9}, + {sha: "92b83cd2bea0c075134ea33bb2b204d333e4f27e", date: "2024-02-08 01:43:14 UTC", description: "clippy lint on feature flag case", pr_number: 19822, scopes: ["sample transform"], type: "fix", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 3, deletions_count: 0}, + {sha: "cf1aec66cd5fd9c4d01efce646de167a079b195e", date: "2024-02-08 04:22:21 UTC", description: "Add support for proptest to lookup types", pr_number: 19769, scopes: ["tests"], type: "chore", breaking_change: false, author: "Bruce Guenter", files_count: 16, insertions_count: 104, deletions_count: 55}, + {sha: "0046ee9b394274bc184efd2a07e76639cebe12fb", date: "2024-02-08 09:01:08 UTC", description: "expose component test utils", pr_number: 19826, scopes: [], type: "chore", breaking_change: false, author: "Pavlos Rontidis", files_count: 2, insertions_count: 2, deletions_count: 1}, + {sha: "56486bafe6ce41a7c92a11ccd0e2cf6e8f7ef838", date: "2024-02-08 07:11:17 UTC", description: "Bump VRL to 0.11.0", pr_number: 19827, scopes: ["deps"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 11, deletions_count: 9}, + {sha: "fa2c1941b3cf98316a94575a5faa9f0a025e8a9c", date: "2024-02-09 09:02:19 UTC", description: "Bump aws-actions/configure-aws-credentials from 4.0.1 to 4.0.2", pr_number: 19823, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 7, deletions_count: 7}, + {sha: "1c09d09cd4b9f86fd5e0a79d97fc6eb4b215cfa2", date: "2024-02-09 09:02:22 UTC", description: "Bump the prost group with 1 update", pr_number: 19830, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 4, deletions_count: 4}, + {sha: "c4593b743078762597c95c9a31430dfc2b845b37", date: "2024-02-09 09:02:24 UTC", description: "Bump num-traits from 0.2.17 to 0.2.18", pr_number: 19831, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 4, deletions_count: 4}, + {sha: "c797bc69b51574778b804f9bbdeb449af4f9af19", date: "2024-02-09 02:30:52 UTC", description: "improve example for `rate` setting", pr_number: 19834, scopes: ["sample transform"], type: "chore", breaking_change: false, author: "neuronull", files_count: 2, insertions_count: 7, deletions_count: 4}, + {sha: "c2917c1e22a9642d0e0072654c40be0c385c6b9b", date: "2024-02-09 02:40:10 UTC", description: "Documentation for redact redactor option", pr_number: 19749, scopes: ["vrl"], type: "docs", breaking_change: false, author: "Thayne McCombs", files_count: 1, insertions_count: 61, deletions_count: 0}, + {sha: "0d57ad9548dbfc97f7e6d32d81c6e179e19a465e", date: "2024-02-09 11:03:21 UTC", description: "Bump serde_yaml from 0.9.30 to 0.9.31", pr_number: 19832, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 10, deletions_count: 10}, + {sha: "a5d9a2777f97d23ea880a2a9f819878d6c69cfa5", date: "2024-02-09 12:04:16 UTC", description: "add support for parsing HTTPS and SVCB records", pr_number: 19819, scopes: ["dnsmsg_parser"], type: "feat", breaking_change: false, author: "Ensar Sarajčić", files_count: 5, insertions_count: 109, deletions_count: 8}, + {sha: "9e297f6c4faa503d195f29648aa5e35c7343acdd", date: "2024-02-09 12:06:37 UTC", description: "Bump serde-toml-merge from 0.3.3 to 0.3.4", pr_number: 19771, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "ff246b621b8c6d5c052621d4a4e86c6942a20f13", date: "2024-02-09 12:25:15 UTC", description: "Bump wasm-bindgen from 0.2.90 to 0.2.91", pr_number: 19817, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 10, deletions_count: 10}, + {sha: "9a20a12be927d29e929b62e4313193d91b86f543", date: "2024-02-09 07:41:38 UTC", description: "implement VRL decoder", pr_number: 19825, scopes: ["codecs"], type: "feat", breaking_change: false, author: "Pavlos Rontidis", files_count: 25, insertions_count: 992, deletions_count: 4}, + {sha: "4115c65587918e0f8a8ab31b1444e5c79e12e5ec", date: "2024-02-09 14:49:39 UTC", description: "add documentation for `parse_etld` function", pr_number: 19795, scopes: ["vrl"], type: "docs", breaking_change: false, author: "Ensar Sarajčić", files_count: 3, insertions_count: 68, deletions_count: 0}, + {sha: "de24167165a026c4df387459058efe341631668e", date: "2024-02-09 16:07:35 UTC", description: "fix inconsistency in docker configuration example", pr_number: 19797, scopes: ["setup"], type: "docs", breaking_change: false, author: "Ensar Sarajčić", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "0e6cf3e439e484f3e4e29d8a90b9250ebb274e95", date: "2024-02-09 16:41:05 UTC", description: "correctly emit metadata to log namespace", pr_number: 19812, scopes: ["journald source"], type: "fix", breaking_change: false, author: "dalegaard", files_count: 3, insertions_count: 38, deletions_count: 11}, + {sha: "18252206790c0c97863d110d0ec2cdd3bb15d24d", date: "2024-02-09 16:45:36 UTC", description: "New --skip-healthchecks option for vector validate", pr_number: 19691, scopes: ["administration config"], type: "enhancement", breaking_change: false, author: "Martin Emrich", files_count: 4, insertions_count: 28, deletions_count: 2}, + {sha: "7c3f91b3de204adcc154b9b0bcad1f5a85741ee3", date: "2024-02-09 07:53:00 UTC", description: "Look at merge base when looking for added changelog files", pr_number: 19835, scopes: ["ci"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "c172d504ea26f06a5be15c71dbfa6b135d732dc1", date: "2024-02-09 07:59:11 UTC", description: "Ensure changelog fragment author doesn't start with @", pr_number: 19836, scopes: ["dev"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 3, insertions_count: 6, deletions_count: 3}, + {sha: "ab9bf4ed2aa9e00223c973e5c899b1ef8aedade0", date: "2024-02-09 09:12:43 UTC", description: "Add VRL function get_vector_timezone", pr_number: 19727, scopes: ["vrl"], type: "enhancement", breaking_change: false, author: "klondikedragon", files_count: 3, insertions_count: 54, deletions_count: 0}, + {sha: "ed5578e89c1b0237e826ce0968713d67a99febef", date: "2024-02-10 07:34:22 UTC", description: "Bump heim from `76fa765` to `a66c440`", pr_number: 19840, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 8, deletions_count: 8}, + {sha: "86fe001b474cdd7cf74a63bd2f36b2fc81cf9f9f", date: "2024-02-10 07:34:35 UTC", description: "Bump serde_with from 3.6.0 to 3.6.1", pr_number: 19841, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 15, deletions_count: 14}, + {sha: "0851fca24799b9cd61df4eb7c7ab1838ae668236", date: "2024-02-10 10:49:40 UTC", description: "add documentation for punycode encoding functions", pr_number: 19794, scopes: ["vrl"], type: "docs", breaking_change: false, author: "Ensar Sarajčić", files_count: 6, insertions_count: 121, deletions_count: 0}, + {sha: "405f3ef22c3e25e196a4d9f76a8dfbb17f2e8c5c", date: "2024-02-10 10:50:09 UTC", description: "make `parse_etld` fallible in docs", pr_number: 19842, scopes: ["vrl"], type: "docs", breaking_change: false, author: "Ensar Sarajčić", files_count: 1, insertions_count: 3, deletions_count: 1}, + {sha: "4ab4c4a3c846f3d295feb890e923e9116a0b0441", date: "2024-02-10 03:37:04 UTC", description: "checkout full depth for changelog workflow", pr_number: 19844, scopes: ["ci"], type: "chore", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 5, deletions_count: 0}, + {sha: "beb76a81e8761da4eb2e0873607ba327baa81ea9", date: "2024-02-10 04:35:45 UTC", description: "Add documentation for replace_with", pr_number: 19638, scopes: ["vrl"], type: "docs", breaking_change: false, author: "Thayne McCombs", files_count: 1, insertions_count: 85, deletions_count: 0}, + {sha: "382ab32476d5204979e2170de90adcd6087edb64", date: "2024-02-10 11:36:37 UTC", description: "Bump the aws group with 5 updates", pr_number: 19838, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 15, deletions_count: 15}, + {sha: "76ab88dfcb51014986bed948f499cd51c5582bf4", date: "2024-02-10 04:31:06 UTC", description: "Reduce test timeout to 2 minutes", pr_number: 19845, scopes: ["ci"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 3, deletions_count: 0}, + {sha: "52049f81459d064abaf92e302414160e1ab39512", date: "2024-02-10 12:26:45 UTC", description: "add format", pr_number: 19739, scopes: ["clickhouse sink"], type: "feat", breaking_change: false, author: "gabriel376", files_count: 6, insertions_count: 99, deletions_count: 4}, + {sha: "51ee1044a1a60528c52b87e3f1f4cbd0290308fe", date: "2024-02-13 00:53:06 UTC", description: "Bump indexmap from 2.2.2 to 2.2.3", pr_number: 19855, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 23, deletions_count: 23}, + {sha: "493fb74d9530e8dc536e61b0e94ba327f8aac8cb", date: "2024-02-13 00:54:03 UTC", description: "Bump mongodb from 2.8.0 to 2.8.1", pr_number: 19856, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "9a610b009f7809458f50b9dd7ecab5aa15347282", date: "2024-02-13 06:54:56 UTC", description: "Bump thiserror from 1.0.56 to 1.0.57", pr_number: 19854, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 4, deletions_count: 4}, + {sha: "6bac428780de7d79cd750be9cfc36c4060a00019", date: "2024-02-13 07:45:12 UTC", description: "Bump chrono from 0.4.33 to 0.4.34", pr_number: 19851, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 4, deletions_count: 4}, + {sha: "9e7e658fa53c25d7d78d4fff00cdb3bb06f6af19", date: "2024-02-13 07:47:56 UTC", description: "Bump indicatif from 0.17.7 to 0.17.8", pr_number: 19850, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + ] +} diff --git a/website/cue/reference/versions.cue b/website/cue/reference/versions.cue index f7f360eb24004..a2e8b95e5c1ca 100644 --- a/website/cue/reference/versions.cue +++ b/website/cue/reference/versions.cue @@ -2,6 +2,7 @@ package metadata // This has to be maintained manually because there's currently no way to sort versions programmatically versions: [string, ...string] & [ + "0.36.0", "0.35.1", "0.35.0", "0.34.2", From 63a50740746325a4320a7c0e8da0b79dd2df3521 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 13 Feb 2024 11:06:19 -0800 Subject: [PATCH 0011/1491] chore(vrl stdlib): Fix redact doc URL templating Signed-off-by: Jesse Szwedko --- website/cue/reference/remap/functions/redact.cue | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/website/cue/reference/remap/functions/redact.cue b/website/cue/reference/remap/functions/redact.cue index 6ee75af4a9370..0e278d62d1dd0 100644 --- a/website/cue/reference/remap/functions/redact.cue +++ b/website/cue/reference/remap/functions/redact.cue @@ -57,8 +57,8 @@ remap: functions: redact: { type: ["array"] }, { - name: "redactor" - description: #""" + name: "redactor" + description: """ Specifies what to replace the redacted strings with. It is given as an object with a "type" key specifying the type of redactor to use @@ -85,8 +85,8 @@ remap: functions: redact: { This parameter must be a static expression so that the argument can be validated at compile-time to avoid runtime errors. You cannot use variables or other dynamic expressions with it. - """# - required: false + """ + required: false type: ["string", "object"] }, ] From 800173f9c57087c414059ba84d86105054e23781 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 13 Feb 2024 11:17:58 -0800 Subject: [PATCH 0012/1491] chore(releasing): Fix markdown formatting of v0.36.0 release description Signed-off-by: Jesse Szwedko --- website/cue/reference/releases/0.36.0.cue | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/website/cue/reference/releases/0.36.0.cue b/website/cue/reference/releases/0.36.0.cue index 768f2e37c4a21..5d9f536c7483d 100644 --- a/website/cue/reference/releases/0.36.0.cue +++ b/website/cue/reference/releases/0.36.0.cue @@ -7,18 +7,18 @@ releases: "0.36.0": { whats_next: [] description: """ - The Vector team is pleased to announce version 0.36.0! + The Vector team is pleased to announce version 0.36.0! - There are no breaking changes in this release. + There are no breaking changes in this release. - In addition to the usual enhancements and bug fixes, this release also includes + In addition to the usual enhancements and bug fixes, this release also includes - - A new `prometheus_pushgateway` source to receive Prometheus data + - A new `prometheus_pushgateway` source to receive Prometheus data - A reminder that the `repositories.timber.io` package repositories will be decommissioned on - February 28th, 2024. Please see the [release - highlight](/highlights/2023-11-07-new-linux-repos) for details about this change and - instructions on how to migrate. + A reminder that the `repositories.timber.io` package repositories will be decommissioned on + February 28th, 2024. Please see the [release + highlight](/highlights/2023-11-07-new-linux-repos) for details about this change and + instructions on how to migrate. """ changelog: [ From 2014b536f0a0af911f874802ed7fbf5237af009b Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 13 Feb 2024 11:19:29 -0800 Subject: [PATCH 0013/1491] chore(releasing): Add additional note about new VRL decoder Signed-off-by: Jesse Szwedko --- website/cue/reference/releases/0.36.0.cue | 1 + 1 file changed, 1 insertion(+) diff --git a/website/cue/reference/releases/0.36.0.cue b/website/cue/reference/releases/0.36.0.cue index 5d9f536c7483d..afbde3ca83b50 100644 --- a/website/cue/reference/releases/0.36.0.cue +++ b/website/cue/reference/releases/0.36.0.cue @@ -14,6 +14,7 @@ releases: "0.36.0": { In addition to the usual enhancements and bug fixes, this release also includes - A new `prometheus_pushgateway` source to receive Prometheus data + - A new 'vrl' decoder that can be used to decode data in sources using a VRL program A reminder that the `repositories.timber.io` package repositories will be decommissioned on February 28th, 2024. Please see the [release From cd6bbae5f3300acbc5607be2eaf98d718fe14ce3 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 13 Feb 2024 11:20:55 -0800 Subject: [PATCH 0014/1491] chore(releasing): Fix markdown formatting of v0.36.0 description Signed-off-by: Jesse Szwedko --- website/cue/reference/releases/0.36.0.cue | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/cue/reference/releases/0.36.0.cue b/website/cue/reference/releases/0.36.0.cue index afbde3ca83b50..d4df4ecc78368 100644 --- a/website/cue/reference/releases/0.36.0.cue +++ b/website/cue/reference/releases/0.36.0.cue @@ -14,7 +14,7 @@ releases: "0.36.0": { In addition to the usual enhancements and bug fixes, this release also includes - A new `prometheus_pushgateway` source to receive Prometheus data - - A new 'vrl' decoder that can be used to decode data in sources using a VRL program + - A new `vrl` decoder that can be used to decode data in sources using a VRL program A reminder that the `repositories.timber.io` package repositories will be decommissioned on February 28th, 2024. Please see the [release From 0a89cb13714876da089ea09d4881e98a890b3976 Mon Sep 17 00:00:00 2001 From: Sergey Yedrikov <48031344+syedriko@users.noreply.github.com> Date: Tue, 13 Feb 2024 15:04:04 -0500 Subject: [PATCH 0015/1491] enhancement(file source, kubernetes_logs source): add rotate_wait_ms config option (#18904) * enhancement(file source) * enhancement(kubernetes_logs source) For the file and kubernetes_logs sources, introduced a new configuration variable, rotate_wait_secs, defaulting to practical infinity. Out of the box, this default effectively turns this feature off. rotate_wait_secs determines for how long Vector keeps trying to read from a log file that has been deleted (most likely due to log rotation, hence the name of the variable). Once that time span has expired, Vector stops reading from and closes the file descriptor of the deleted file, thus allowing the OS to reclaim the storage space occupied by the file. This behavior is similar to that of Fluentd's tail plugin: https://docs.fluentd.org/input/tail#rotate_wait Addresses issue (#18863) Co-authored-by: Jesse Szwedko --- .../18863_k8s_logs_rotate_wait.enhancement.md | 3 +++ lib/file-source/src/file_server.rs | 10 +++++++++- lib/file-source/src/file_watcher/mod.rs | 19 +++++++++++++++++++ lib/file-source/src/fingerprinter.rs | 2 +- lib/file-source/src/internal_events.rs | 2 +- src/internal_events/file.rs | 13 ++++++++++--- src/sources/file.rs | 13 +++++++++++++ src/sources/kubernetes_logs/mod.rs | 16 ++++++++++++++++ .../components/sources/base/file.cue | 11 +++++++++++ .../sources/base/kubernetes_logs.cue | 11 +++++++++++ 10 files changed, 94 insertions(+), 6 deletions(-) create mode 100644 changelog.d/18863_k8s_logs_rotate_wait.enhancement.md diff --git a/changelog.d/18863_k8s_logs_rotate_wait.enhancement.md b/changelog.d/18863_k8s_logs_rotate_wait.enhancement.md new file mode 100644 index 0000000000000..042a8a6118cfd --- /dev/null +++ b/changelog.d/18863_k8s_logs_rotate_wait.enhancement.md @@ -0,0 +1,3 @@ +A new configuration option `rotate_wait_secs` was added to the `file` and `kubernetes_logs` sources. `rotate_wait_secs` determines for how long Vector keeps trying to read from a log file that has been deleted. Once that time span has expired, Vector stops reading from and closes the file descriptor of the deleted file, thus allowing the OS to reclaim the storage space occupied by the file. + +authors: syedriko diff --git a/lib/file-source/src/file_server.rs b/lib/file-source/src/file_server.rs index 59604037797c6..350664af478cc 100644 --- a/lib/file-source/src/file_server.rs +++ b/lib/file-source/src/file_server.rs @@ -52,6 +52,7 @@ where pub remove_after: Option, pub emitter: E, pub handle: tokio::runtime::Handle, + pub rotate_wait: Duration, } /// `FileServer` as Source @@ -292,11 +293,18 @@ where } } + for (_, watcher) in &mut fp_map { + if !watcher.file_findable() && watcher.last_seen().elapsed() > self.rotate_wait { + watcher.set_dead(); + } + } + // A FileWatcher is dead when the underlying file has disappeared. // If the FileWatcher is dead we don't retain it; it will be deallocated. fp_map.retain(|file_id, watcher| { if watcher.dead() { - self.emitter.emit_file_unwatched(&watcher.path); + self.emitter + .emit_file_unwatched(&watcher.path, watcher.reached_eof()); checkpoints.set_dead(*file_id); false } else { diff --git a/lib/file-source/src/file_watcher/mod.rs b/lib/file-source/src/file_watcher/mod.rs index f39df392748cf..7ac50173ea2b7 100644 --- a/lib/file-source/src/file_watcher/mod.rs +++ b/lib/file-source/src/file_watcher/mod.rs @@ -42,8 +42,10 @@ pub struct FileWatcher { devno: u64, inode: u64, is_dead: bool, + reached_eof: bool, last_read_attempt: Instant, last_read_success: Instant, + last_seen: Instant, max_line_bytes: usize, line_delimiter: Bytes, buf: BytesMut, @@ -143,8 +145,10 @@ impl FileWatcher { devno, inode: ino, is_dead: false, + reached_eof: false, last_read_attempt: ts, last_read_success: ts, + last_seen: ts, max_line_bytes, line_delimiter, buf: BytesMut::new(), @@ -176,6 +180,9 @@ impl FileWatcher { pub fn set_file_findable(&mut self, f: bool) { self.findable = f; + if f { + self.last_seen = Instant::now(); + } } pub fn file_findable(&self) -> bool { @@ -228,6 +235,7 @@ impl FileWatcher { let buf = self.buf.split().freeze(); if buf.is_empty() { // EOF + self.reached_eof = true; Ok(None) } else { Ok(Some(RawLine { @@ -236,6 +244,7 @@ impl FileWatcher { })) } } else { + self.reached_eof = true; Ok(None) } } @@ -268,6 +277,16 @@ impl FileWatcher { self.last_read_success.elapsed() < Duration::from_secs(10) || self.last_read_attempt.elapsed() > Duration::from_secs(10) } + + #[inline] + pub fn last_seen(&self) -> Instant { + self.last_seen + } + + #[inline] + pub fn reached_eof(&self) -> bool { + self.reached_eof + } } fn is_gzipped(r: &mut io::BufReader) -> io::Result { diff --git a/lib/file-source/src/fingerprinter.rs b/lib/file-source/src/fingerprinter.rs index de83749eb9d22..30086da1cd479 100644 --- a/lib/file-source/src/fingerprinter.rs +++ b/lib/file-source/src/fingerprinter.rs @@ -529,7 +529,7 @@ mod test { panic!(); } - fn emit_file_unwatched(&self, _: &Path) {} + fn emit_file_unwatched(&self, _: &Path, _: bool) {} fn emit_file_deleted(&self, _: &Path) {} diff --git a/lib/file-source/src/internal_events.rs b/lib/file-source/src/internal_events.rs index 20195bb5deb22..9eb60e65397a1 100644 --- a/lib/file-source/src/internal_events.rs +++ b/lib/file-source/src/internal_events.rs @@ -9,7 +9,7 @@ pub trait FileSourceInternalEvents: Send + Sync + Clone + 'static { fn emit_file_watch_error(&self, path: &Path, error: Error); - fn emit_file_unwatched(&self, path: &Path); + fn emit_file_unwatched(&self, path: &Path, reached_eof: bool); fn emit_file_deleted(&self, path: &Path); diff --git a/src/internal_events/file.rs b/src/internal_events/file.rs index fc493a1a83c78..876ffc3af0c2f 100644 --- a/src/internal_events/file.rs +++ b/src/internal_events/file.rs @@ -305,21 +305,27 @@ mod source { pub struct FileUnwatched<'a> { pub file: &'a Path, pub include_file_metric_tag: bool, + pub reached_eof: bool, } impl<'a> InternalEvent for FileUnwatched<'a> { fn emit(self) { + let reached_eof = if self.reached_eof { "true" } else { "false" }; info!( message = "Stopped watching file.", file = %self.file.display(), + reached_eof ); if self.include_file_metric_tag { counter!( "files_unwatched_total", 1, "file" => self.file.to_string_lossy().into_owned(), + "reached_eof" => reached_eof, ); } else { - counter!("files_unwatched_total", 1); + counter!("files_unwatched_total", 1, + "reached_eof" => reached_eof, + ); } } } @@ -505,10 +511,11 @@ mod source { }); } - fn emit_file_unwatched(&self, file: &Path) { + fn emit_file_unwatched(&self, file: &Path, reached_eof: bool) { emit!(FileUnwatched { file, - include_file_metric_tag: self.include_file_metric_tag + include_file_metric_tag: self.include_file_metric_tag, + reached_eof }); } diff --git a/src/sources/file.rs b/src/sources/file.rs index bad6bdc0d06af..6e618d11c4f3a 100644 --- a/src/sources/file.rs +++ b/src/sources/file.rs @@ -252,6 +252,13 @@ pub struct FileConfig { #[configurable(derived)] #[serde(default)] internal_metrics: FileInternalMetricsConfig, + + /// How long to keep an open handle to a rotated log file. + /// The default value represents "no limit" + #[serde_as(as = "serde_with::DurationSeconds")] + #[configurable(metadata(docs::type_unit = "seconds"))] + #[serde(default = "default_rotate_wait", rename = "rotate_wait_secs")] + pub rotate_wait: Duration, } fn default_max_line_bytes() -> usize { @@ -286,6 +293,10 @@ fn default_line_delimiter() -> String { "\n".to_string() } +const fn default_rotate_wait() -> Duration { + Duration::from_secs(u64::MAX / 2) +} + /// Configuration for how files should be identified. /// /// This is important for `checkpointing` when file rotation is used. @@ -404,6 +415,7 @@ impl Default for FileConfig { acknowledgements: Default::default(), log_namespace: None, internal_metrics: Default::default(), + rotate_wait: default_rotate_wait(), } } } @@ -556,6 +568,7 @@ pub fn file_source( remove_after: config.remove_after_secs.map(Duration::from_secs), emitter, handle: tokio::runtime::Handle::current(), + rotate_wait: config.rotate_wait, }; let event_metadata = EventMetadata { diff --git a/src/sources/kubernetes_logs/mod.rs b/src/sources/kubernetes_logs/mod.rs index bd079c76f8839..53a44e1601256 100644 --- a/src/sources/kubernetes_logs/mod.rs +++ b/src/sources/kubernetes_logs/mod.rs @@ -243,6 +243,13 @@ pub struct Config { #[configurable(derived)] #[serde(default)] internal_metrics: FileInternalMetricsConfig, + + /// How long to keep an open handle to a rotated log file. + /// The default value represents "no limit" + #[serde_as(as = "serde_with::DurationSeconds")] + #[configurable(metadata(docs::type_unit = "seconds"))] + #[serde(default = "default_rotate_wait", rename = "rotate_wait_secs")] + rotate_wait: Duration, } const fn default_read_from() -> ReadFromConfig { @@ -287,6 +294,7 @@ impl Default for Config { delay_deletion_ms: default_delay_deletion_ms(), log_namespace: None, internal_metrics: Default::default(), + rotate_wait: default_rotate_wait(), } } } @@ -538,6 +546,7 @@ struct Source { ingestion_timestamp_field: Option, delay_deletion: Duration, include_file_metric_tag: bool, + rotate_wait: Duration, } impl Source { @@ -617,6 +626,7 @@ impl Source { ingestion_timestamp_field, delay_deletion, include_file_metric_tag: config.internal_metrics.include_file_tag, + rotate_wait: config.rotate_wait, }) } @@ -650,6 +660,7 @@ impl Source { ingestion_timestamp_field, delay_deletion, include_file_metric_tag, + rotate_wait, } = self; let mut reflectors = Vec::new(); @@ -792,6 +803,7 @@ impl Source { }, // A handle to the current tokio runtime handle: tokio::runtime::Handle::current(), + rotate_wait, }; let (file_source_tx, file_source_rx) = futures::channel::mpsc::channel::>(2); @@ -995,6 +1007,10 @@ const fn default_delay_deletion_ms() -> Duration { Duration::from_millis(60_000) } +const fn default_rotate_wait() -> Duration { + Duration::from_secs(u64::MAX / 2) +} + // This function constructs the patterns we exclude from file watching, created // from the defaults or user provided configuration. fn prepare_exclude_paths(config: &Config) -> crate::Result> { diff --git a/website/cue/reference/components/sources/base/file.cue b/website/cue/reference/components/sources/base/file.cue index 33c167f7cf617..af94e06f9200a 100644 --- a/website/cue/reference/components/sources/base/file.cue +++ b/website/cue/reference/components/sources/base/file.cue @@ -368,4 +368,15 @@ base: components: sources: file: configuration: { unit: "seconds" } } + rotate_wait_secs: { + description: """ + How long to keep an open handle to a rotated log file. + The default value represents "no limit" + """ + required: false + type: uint: { + default: 9223372036854775807 + unit: "seconds" + } + } } diff --git a/website/cue/reference/components/sources/base/kubernetes_logs.cue b/website/cue/reference/components/sources/base/kubernetes_logs.cue index 4954334e2fe90..1db54ec13bbfe 100644 --- a/website/cue/reference/components/sources/base/kubernetes_logs.cue +++ b/website/cue/reference/components/sources/base/kubernetes_logs.cue @@ -405,6 +405,17 @@ base: components: sources: kubernetes_logs: configuration: { } } } + rotate_wait_secs: { + description: """ + How long to keep an open handle to a rotated log file. + The default value represents "no limit" + """ + required: false + type: uint: { + default: 9223372036854775807 + unit: "seconds" + } + } self_node_name: { description: """ The name of the Kubernetes [Node][node] that is running. From f88316cce7665c6dbf83a81a8261fa126b50542e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Ml=C3=A1dek?= Date: Wed, 14 Feb 2024 18:41:26 +0100 Subject: [PATCH 0016/1491] feat(mqtt sink): add MQTT sink (#19813) * feat(mqtt sink): Implement an MQTT sink * Update src/sinks/mqtt/sink.rs Co-authored-by: Kyle Criddle * Update src/sinks/mqtt/sink.rs Co-authored-by: Kyle Criddle * Update src/sinks/mqtt/mod.rs Co-authored-by: Kyle Criddle * comment in error_codes * fix * mqtt updates * sinks/mqtt: make user+password optional * sinks/mqtt: doc * sinks/mqtt: fix error/delivered flow * sinks/mqtt: default acknowledgements config * sinks/mqtt: refactor flow * sinks/mqtt: randomize default_client_id() * sinks/mqtt: use mqtt acknowledgements * fix(merge): update PR to the latest changes - update to the newer metrics infrastructure - update rumqttc version - fix compilation errors - fix formatting Tested: - Local build * fix: code review fixes - add quality_of_service option - add mqtt.org link - update documentation Tested: - Local build * fix: code review fixes * fix: code review * fix: remove redundant event * fix: disable healthcheck * fix: CUE fixes * fix: remove random from client-id * Update website/cue/reference/components/sinks/base/mqtt.cue Co-authored-by: Jesse Szwedko * Apply suggestions from code review Co-authored-by: neuronull * code review * feat: add integration tests * test integration test on CI * fix: integration_tests * docs: re-generate docs * fix: add mqtt test to Github Actions * fix: try integration tests again * Update to use SinkExt components Signed-off-by: Stephen Wakely * Workflow conflict Signed-off-by: Stephen Wakely * Proper worflow conflict Signed-off-by: Stephen Wakely * Put receiver in new task Signed-off-by: Stephen Wakely * Drop timeout Signed-off-by: Stephen Wakely * fix config * fixup integration test * Apply suggestions from code review Co-authored-by: Bryce Eadie * fix spellchecking * changelog * fix events * fix merge from master * update spellchecker * fix config test * Update scripts/integration/mqtt/test.yaml Co-authored-by: neuronull * regenerate mqtt sink cue reference * fix default qos * fix changelog after updates * update license * fix cue internal metrics * include in CI --------- Signed-off-by: Stephen Wakely Co-authored-by: Astro Co-authored-by: Kyle Criddle Co-authored-by: Alexander Zaitsev Co-authored-by: Jesse Szwedko Co-authored-by: Stephen Wakely Co-authored-by: Bryce Eadie Co-authored-by: neuronull --- .github/actions/spelling/allow.txt | 6 + .github/workflows/changes.yml | 3 + .github/workflows/integration-comment.yml | 10 + .github/workflows/integration.yml | 8 + Cargo.lock | 34 +- Cargo.toml | 4 + LICENSE-3rdparty.csv | 1 + changelog.d/19813_add_mqtt_sink.feature.md | 3 + scripts/integration/mqtt/compose.yaml | 11 + scripts/integration/mqtt/test.yaml | 12 + src/internal_events/mod.rs | 4 + src/internal_events/mqtt.rs | 34 ++ src/sinks/mod.rs | 2 + src/sinks/mqtt/config.rs | 217 +++++++++ src/sinks/mqtt/integration_tests.rs | 104 +++++ src/sinks/mqtt/mod.rs | 9 + src/sinks/mqtt/request_builder.rs | 92 ++++ src/sinks/mqtt/service.rs | 92 ++++ src/sinks/mqtt/sink.rs | 151 +++++++ .../reference/configuration/sinks/mqtt.md | 14 + .../reference/components/sinks/base/mqtt.cue | 423 ++++++++++++++++++ .../cue/reference/components/sinks/mqtt.cue | 85 ++++ website/cue/reference/services/mqtt.cue | 10 + website/cue/reference/urls.cue | 1 + 24 files changed, 1328 insertions(+), 2 deletions(-) create mode 100644 changelog.d/19813_add_mqtt_sink.feature.md create mode 100644 scripts/integration/mqtt/compose.yaml create mode 100644 scripts/integration/mqtt/test.yaml create mode 100644 src/internal_events/mqtt.rs create mode 100644 src/sinks/mqtt/config.rs create mode 100644 src/sinks/mqtt/integration_tests.rs create mode 100644 src/sinks/mqtt/mod.rs create mode 100644 src/sinks/mqtt/request_builder.rs create mode 100644 src/sinks/mqtt/service.rs create mode 100644 src/sinks/mqtt/sink.rs create mode 100644 website/content/en/docs/reference/configuration/sinks/mqtt.md create mode 100644 website/cue/reference/components/sinks/base/mqtt.cue create mode 100644 website/cue/reference/components/sinks/mqtt.cue create mode 100644 website/cue/reference/services/mqtt.cue diff --git a/.github/actions/spelling/allow.txt b/.github/actions/spelling/allow.txt index 1e2d039b9ac8f..a44b6ce736215 100644 --- a/.github/actions/spelling/allow.txt +++ b/.github/actions/spelling/allow.txt @@ -23,6 +23,8 @@ Arival Arnova Asus Atlassian +atleastonce +atmostonce Attab Audiosonic avsc @@ -53,8 +55,11 @@ Dockerfiles DOOV Douban E2ETest +emqx +eventloop Enot Evercoss +exactlyonce Explay FAQs FDO @@ -168,6 +173,7 @@ Rockchip Roku Roundcube Rowling +rumqttc SBT SKtelesys Salesforce diff --git a/.github/workflows/changes.yml b/.github/workflows/changes.yml index 121015a2b557a..7dec7b4a0162e 100644 --- a/.github/workflows/changes.yml +++ b/.github/workflows/changes.yml @@ -96,6 +96,8 @@ on: value: ${{ jobs.int_tests.outputs.loki }} mongodb: value: ${{ jobs.int_tests.outputs.mongodb }} + mqtt: + value: ${{ jobs.int_tests.outputs.mqtt }} nats: value: ${{ jobs.int_tests.outputs.nats }} nginx: @@ -221,6 +223,7 @@ jobs: logstash: ${{ steps.filter.outputs.logstash }} loki: ${{ steps.filter.outputs.loki }} mongodb: ${{ steps.filter.outputs.mongodb }} + mqtt: ${{ steps.filter.outputs.mqtt }} nats: ${{ steps.filter.outputs.nats }} nginx: ${{ steps.filter.outputs.nginx }} opentelemetry: ${{ steps.filter.outputs.opentelemetry }} diff --git a/.github/workflows/integration-comment.yml b/.github/workflows/integration-comment.yml index b0fb07d4ab1e2..fc755e8b3256a 100644 --- a/.github/workflows/integration-comment.yml +++ b/.github/workflows/integration-comment.yml @@ -344,6 +344,16 @@ jobs: max_attempts: 3 command: bash scripts/ci-int-e2e-test.sh int mongodb + - name: mqtt + if: ${{ contains(github.event.comment.body, '/ci-run-integration-mqtt') + || contains(github.event.comment.body, '/ci-run-integration-all') + || contains(github.event.comment.body, '/ci-run-all') }} + uses: nick-fields/retry@v3 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-int-e2e-test.sh int mqtt + - run: docker image prune -af --filter=label!=vector-test-runner=true ; docker container prune -f - name: nats diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 1e29ac2e88925..91c876bbf59a1 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -321,6 +321,14 @@ jobs: max_attempts: 3 command: bash scripts/ci-int-e2e-test.sh int mongodb + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.mqtt == 'true' }} + name: mqtt + uses: nick-fields/retry@v3 + with: + timeout_minutes: 30 + max_attempts: 3 + command: bash scripts/ci-int-e2e-test.sh int mqtt + - if: ${{ github.event_name == 'merge_group' || needs.changes.outputs.all-int == 'true' || needs.changes.outputs.nats == 'true' }} name: nats uses: nick-fields/retry@v3 diff --git a/Cargo.lock b/Cargo.lock index 5801565f67b8f..c52d6489b0d87 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3341,6 +3341,17 @@ dependencies = [ "spin 0.9.8", ] +[[package]] +name = "flume" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "55ac459de2512911e4b674ce33cf20befaba382d05b62b008afc1c8b57cbf181" +dependencies = [ + "futures-core", + "futures-sink", + "spin 0.9.8", +] + [[package]] name = "fnv" version = "1.0.7" @@ -4807,7 +4818,7 @@ dependencies = [ "async-reactor-trait", "async-trait", "executor-trait", - "flume", + "flume 0.10.14", "futures-core", "futures-io", "parking_lot", @@ -6327,7 +6338,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d894b67aa7a4bf295db5e85349078c604edaa6fa5c8721e8eca3c7729a27f2ac" dependencies = [ "doc-comment", - "flume", + "flume 0.10.14", "parking_lot", "tracing 0.1.40", ] @@ -7606,6 +7617,24 @@ dependencies = [ "unicode-ident", ] +[[package]] +name = "rumqttc" +version = "0.23.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8d8941c6791801b667d52bfe9ff4fc7c968d4f3f9ae8ae7abdaaa1c966feafc8" +dependencies = [ + "bytes 1.5.0", + "flume 0.11.0", + "futures-util", + "log", + "rustls-native-certs", + "rustls-pemfile", + "rustls-webpki", + "thiserror", + "tokio", + "tokio-rustls", +] + [[package]] name = "rust_decimal" version = "1.33.1" @@ -9949,6 +9978,7 @@ dependencies = [ "rmpv", "roaring", "rstest", + "rumqttc", "seahash", "semver 1.0.21", "serde", diff --git a/Cargo.toml b/Cargo.toml index e7c51ecce33b0..e34bd1d90039f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -313,6 +313,7 @@ rdkafka = { version = "0.35.0", default-features = false, features = ["tokio", " redis = { version = "0.24.0", default-features = false, features = ["connection-manager", "tokio-comp", "tokio-native-tls-comp"], optional = true } regex = { version = "1.10.3", default-features = false, features = ["std", "perf"] } roaring = { version = "0.10.2", default-features = false, optional = true } +rumqttc = { version = "0.23.0", default-features = false, features = ["use-rustls"], optional = true } seahash = { version = "4.1.0", default-features = false } semver = { version = "1.0.21", default-features = false, features = ["serde", "std"], optional = true } smallvec = { version = "1", default-features = false, features = ["union", "serde"] } @@ -656,6 +657,7 @@ sinks-logs = [ "sinks-kafka", "sinks-mezmo", "sinks-loki", + "sinks-mqtt", "sinks-nats", "sinks-new_relic_logs", "sinks-new_relic", @@ -718,6 +720,7 @@ sinks-influxdb = [] sinks-kafka = ["dep:rdkafka"] sinks-mezmo = [] sinks-loki = ["loki-logproto"] +sinks-mqtt = ["dep:rumqttc"] sinks-nats = ["dep:async-nats", "dep:nkeys"] sinks-new_relic_logs = ["sinks-http"] sinks-new_relic = [] @@ -843,6 +846,7 @@ kafka-integration-tests = ["sinks-kafka", "sources-kafka"] logstash-integration-tests = ["docker", "sources-logstash"] loki-integration-tests = ["sinks-loki"] mongodb_metrics-integration-tests = ["sources-mongodb_metrics"] +mqtt-integration-tests = ["sinks-mqtt"] nats-integration-tests = ["sinks-nats", "sources-nats"] nginx-integration-tests = ["sources-nginx_metrics"] opentelemetry-integration-tests = ["sources-opentelemetry"] diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index cc59110d0cef2..646bd38c122ad 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -465,6 +465,7 @@ rmpv,https://github.com/3Hren/msgpack-rust,MIT,Evgeny Safronov , Kerollmops " roxmltree,https://github.com/RazrFalcon/roxmltree,MIT OR Apache-2.0,Yevhenii Reizner rsa,https://github.com/RustCrypto/RSA,MIT OR Apache-2.0,"RustCrypto Developers, dignifiedquire " +rumqttc,https://github.com/bytebeamio/rumqtt,Apache-2.0,tekjar rust_decimal,https://github.com/paupino/rust-decimal,MIT,Paul Mason rustc-demangle,https://github.com/alexcrichton/rustc-demangle,MIT OR Apache-2.0,Alex Crichton rustc-hash,https://github.com/rust-lang-nursery/rustc-hash,Apache-2.0 OR MIT,The Rust Project Developers diff --git a/changelog.d/19813_add_mqtt_sink.feature.md b/changelog.d/19813_add_mqtt_sink.feature.md new file mode 100644 index 0000000000000..ba1c105fc4540 --- /dev/null +++ b/changelog.d/19813_add_mqtt_sink.feature.md @@ -0,0 +1,3 @@ +Vector can send logs to a MQTT broker through the new mqtt sink. + +authors: astro zamazan4ik StephenWakely mladedav diff --git a/scripts/integration/mqtt/compose.yaml b/scripts/integration/mqtt/compose.yaml new file mode 100644 index 0000000000000..ab44771709155 --- /dev/null +++ b/scripts/integration/mqtt/compose.yaml @@ -0,0 +1,11 @@ +version: '3' + +services: + emqx: + image: docker.io/emqx:${CONFIG_VERSION} + ports: + - 1883:1883 + +networks: + default: + name: ${VECTOR_NETWORK} diff --git a/scripts/integration/mqtt/test.yaml b/scripts/integration/mqtt/test.yaml new file mode 100644 index 0000000000000..607da45eeb5e1 --- /dev/null +++ b/scripts/integration/mqtt/test.yaml @@ -0,0 +1,12 @@ +features: +- mqtt-integration-tests + +test_filter: '::mqtt::' + +matrix: + version: ['5.0.15'] + +paths: +- "src/internal_events/mqtt.rs" +- "src/sinks/mqtt/**" +- "src/sinks/util/**" diff --git a/src/internal_events/mod.rs b/src/internal_events/mod.rs index ce7bf0b50a507..5bf373a87d2c6 100644 --- a/src/internal_events/mod.rs +++ b/src/internal_events/mod.rs @@ -87,6 +87,8 @@ mod lua; mod metric_to_log; #[cfg(feature = "sources-mongodb_metrics")] mod mongodb_metrics; +#[cfg(feature = "sinks-mqtt")] +mod mqtt; #[cfg(feature = "sources-nginx_metrics")] mod nginx_metrics; mod open; @@ -221,6 +223,8 @@ pub(crate) use self::loki::*; pub(crate) use self::lua::*; #[cfg(feature = "transforms-metric_to_log")] pub(crate) use self::metric_to_log::*; +#[cfg(feature = "sinks-mqtt")] +pub(crate) use self::mqtt::*; #[cfg(feature = "sources-nginx_metrics")] pub(crate) use self::nginx_metrics::*; #[allow(unused_imports)] diff --git a/src/internal_events/mqtt.rs b/src/internal_events/mqtt.rs new file mode 100644 index 0000000000000..fa80f738da827 --- /dev/null +++ b/src/internal_events/mqtt.rs @@ -0,0 +1,34 @@ +use std::fmt::Debug; + +use metrics::counter; +use rumqttc::ConnectionError; +use vector_lib::internal_event::InternalEvent; +use vector_lib::internal_event::{error_stage, error_type}; + +#[derive(Debug)] +pub struct MqttConnectionError { + pub error: ConnectionError, +} + +impl InternalEvent for MqttConnectionError { + fn emit(self) { + error!( + message = "MQTT connection error.", + error = %self.error, + error_code = "mqtt_connection_error", + error_type = error_type::WRITER_FAILED, + stage = error_stage::SENDING, + internal_log_rate_limit = true, + ); + counter!( + "component_errors_total", 1, + "error_code" => "mqtt_connection_error", + "error_type" => error_type::WRITER_FAILED, + "stage" => error_stage::SENDING, + ); + } + + fn name(&self) -> Option<&'static str> { + Some("MqttConnectionError") + } +} diff --git a/src/sinks/mod.rs b/src/sinks/mod.rs index 2ccd4c3f9818a..143889e153e78 100644 --- a/src/sinks/mod.rs +++ b/src/sinks/mod.rs @@ -69,6 +69,8 @@ pub mod kafka; pub mod loki; #[cfg(feature = "sinks-mezmo")] pub mod mezmo; +#[cfg(feature = "sinks-mqtt")] +pub mod mqtt; #[cfg(feature = "sinks-nats")] pub mod nats; #[cfg(feature = "sinks-new_relic")] diff --git a/src/sinks/mqtt/config.rs b/src/sinks/mqtt/config.rs new file mode 100644 index 0000000000000..cd88d6f5e58f7 --- /dev/null +++ b/src/sinks/mqtt/config.rs @@ -0,0 +1,217 @@ +use std::time::Duration; + +use rand::Rng; +use rumqttc::{MqttOptions, QoS, TlsConfiguration, Transport}; +use snafu::{ResultExt, Snafu}; +use vector_lib::codecs::JsonSerializerConfig; + +use crate::template::Template; +use crate::{ + codecs::EncodingConfig, + config::{AcknowledgementsConfig, Input, SinkConfig, SinkContext}, + sinks::{ + mqtt::sink::{ConfigurationSnafu, MqttConnector, MqttError, MqttSink, TlsSnafu}, + prelude::*, + Healthcheck, VectorSink, + }, + tls::{MaybeTlsSettings, TlsEnableableConfig}, +}; + +/// Configuration for the `mqtt` sink +#[configurable_component(sink("mqtt"))] +#[derive(Clone, Debug)] +pub struct MqttSinkConfig { + /// MQTT server address (The broker’s domain name or IP address). + #[configurable(metadata(docs::examples = "mqtt.example.com", docs::examples = "127.0.0.1"))] + pub host: String, + + /// TCP port of the MQTT server to connect to. + #[serde(default = "default_port")] + pub port: u16, + + /// MQTT username. + pub user: Option, + + /// MQTT password. + pub password: Option, + + /// MQTT client ID. + pub client_id: Option, + + /// Connection keep-alive interval. + #[serde(default = "default_keep_alive")] + pub keep_alive: u16, + + /// If set to true, the MQTT session is cleaned on login. + #[serde(default = "default_clean_session")] + pub clean_session: bool, + + #[configurable(derived)] + pub tls: Option, + + /// MQTT publish topic (templates allowed) + pub topic: Template, + + #[configurable(derived)] + pub encoding: EncodingConfig, + + #[configurable(derived)] + #[serde( + default, + deserialize_with = "crate::serde::bool_or_struct", + skip_serializing_if = "crate::serde::is_default" + )] + pub acknowledgements: AcknowledgementsConfig, + + #[configurable(derived)] + #[serde(default = "default_qos")] + pub quality_of_service: MqttQoS, +} + +/// Supported Quality of Service types for MQTT. +#[configurable_component] +#[derive(Clone, Copy, Debug, Derivative)] +#[derivative(Default)] +#[serde(rename_all = "lowercase")] +#[allow(clippy::enum_variant_names)] +pub enum MqttQoS { + /// AtLeastOnce. + #[derivative(Default)] + AtLeastOnce, + + /// AtMostOnce. + AtMostOnce, + + /// ExactlyOnce. + ExactlyOnce, +} + +impl From for QoS { + fn from(value: MqttQoS) -> Self { + match value { + MqttQoS::AtLeastOnce => QoS::AtLeastOnce, + MqttQoS::AtMostOnce => QoS::AtMostOnce, + MqttQoS::ExactlyOnce => QoS::ExactlyOnce, + } + } +} + +const fn default_port() -> u16 { + 1883 +} + +const fn default_keep_alive() -> u16 { + 60 +} + +const fn default_clean_session() -> bool { + false +} + +const fn default_qos() -> MqttQoS { + MqttQoS::AtLeastOnce +} + +impl Default for MqttSinkConfig { + fn default() -> Self { + Self { + host: "localhost".into(), + port: default_port(), + user: None, + password: None, + client_id: None, + keep_alive: default_keep_alive(), + clean_session: default_clean_session(), + tls: None, + topic: Template::try_from("vector").expect("Cannot parse as a template"), + encoding: JsonSerializerConfig::default().into(), + acknowledgements: AcknowledgementsConfig::default(), + quality_of_service: MqttQoS::default(), + } + } +} + +impl_generate_config_from_default!(MqttSinkConfig); + +#[async_trait::async_trait] +#[typetag::serde(name = "mqtt")] +impl SinkConfig for MqttSinkConfig { + async fn build(&self, _cx: SinkContext) -> crate::Result<(VectorSink, Healthcheck)> { + let connector = self.build_connector()?; + let sink = MqttSink::new(self, connector.clone())?; + + Ok(( + VectorSink::from_event_streamsink(sink), + Box::pin(async move { connector.healthcheck().await }), + )) + } + + fn input(&self) -> Input { + Input::log() + } + + fn acknowledgements(&self) -> &AcknowledgementsConfig { + &self.acknowledgements + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Snafu)] +pub enum ConfigurationError { + #[snafu(display("Client ID is not allowed to be empty."))] + EmptyClientId, + #[snafu(display("Username and password must be either both provided or both missing."))] + InvalidCredentials, +} + +impl MqttSinkConfig { + fn build_connector(&self) -> Result { + let client_id = self.client_id.clone().unwrap_or_else(|| { + let hash = rand::thread_rng() + .sample_iter(&rand_distr::Alphanumeric) + .take(6) + .map(char::from) + .collect::(); + format!("vectorSink{hash}") + }); + + if client_id.is_empty() { + return Err(ConfigurationError::EmptyClientId).context(ConfigurationSnafu); + } + let tls = MaybeTlsSettings::from_config(&self.tls, false).context(TlsSnafu)?; + let mut options = MqttOptions::new(&client_id, &self.host, self.port); + options.set_keep_alive(Duration::from_secs(self.keep_alive.into())); + options.set_clean_session(self.clean_session); + match (&self.user, &self.password) { + (Some(user), Some(password)) => { + options.set_credentials(user, password); + } + (None, None) => {} + _ => { + return Err(MqttError::Configuration { + source: ConfigurationError::InvalidCredentials, + }); + } + } + if let Some(tls) = tls.tls() { + let ca = tls.authorities_pem().flatten().collect(); + let client_auth = None; + let alpn = Some(vec!["mqtt".into()]); + options.set_transport(Transport::Tls(TlsConfiguration::Simple { + ca, + client_auth, + alpn, + })); + } + MqttConnector::new(options, self.topic.to_string()) + } +} + +#[cfg(test)] +mod test { + use super::*; + + #[test] + fn generate_config() { + crate::test_util::test_generate_config::(); + } +} diff --git a/src/sinks/mqtt/integration_tests.rs b/src/sinks/mqtt/integration_tests.rs new file mode 100644 index 0000000000000..cce25c0b81b10 --- /dev/null +++ b/src/sinks/mqtt/integration_tests.rs @@ -0,0 +1,104 @@ +use crate::config::{SinkConfig, SinkContext}; +use crate::sinks::mqtt::config::MqttQoS; +use crate::sinks::mqtt::MqttSinkConfig; +use crate::template::Template; +use crate::test_util::components::{run_and_assert_sink_compliance, SINK_TAGS}; +use crate::test_util::{random_lines_with_stream, trace_init}; +use rumqttc::{AsyncClient, Event, Incoming, MqttOptions, QoS}; +use std::time::Duration; + +fn mqtt_broker_address() -> String { + let result = std::env::var("MQTT_BROKER_ADDRESS").unwrap_or_else(|_| "emqx".into()); + result +} + +fn mqtt_broker_port() -> u16 { + let result = std::env::var("MQTT_BROKER_PORT") + .unwrap_or_else(|_| "1883".into()) + .parse::() + .expect("Cannot parse as u16"); + result +} + +#[tokio::test] +async fn mqtt_happy() { + trace_init(); + + let topic = "test"; + let cnf = MqttSinkConfig { + host: mqtt_broker_address(), + port: mqtt_broker_port(), + topic: Template::try_from(topic).expect("Cannot parse the topic template"), + quality_of_service: MqttQoS::AtLeastOnce, + ..Default::default() + }; + + let cx = SinkContext::default(); + let (sink, healthcheck) = cnf.build(cx).await.expect("Cannot build the sink"); + healthcheck.await.expect("Health check failed"); + + // prepare consumer + let mut mqtt_options = MqttOptions::new( + "integration-test-consumer", + mqtt_broker_address(), + mqtt_broker_port(), + ); + mqtt_options.set_keep_alive(Duration::from_secs(5)); + + let (client, mut eventloop) = AsyncClient::new(mqtt_options, 10); + client + .subscribe("test", QoS::AtLeastOnce) + .await + .expect("Cannot subscribe to the topic"); + + let num_events = 10; + let (input, events) = random_lines_with_stream(100, num_events, None); + + let (tx, mut rx) = tokio::sync::mpsc::channel(10); + let (ready_tx, ready_rx) = tokio::sync::oneshot::channel(); + let mut ready_tx = Some(ready_tx); + + tokio::spawn(async move { + loop { + if let Ok(try_msg) = + tokio::time::timeout(Duration::from_secs(1), eventloop.poll()).await + { + let msg = try_msg.expect("Cannot extract the message"); + if let Event::Incoming(Incoming::SubAck(_)) = msg { + ready_tx + .take() + .expect("We cannot receive multiple SubAcks in the same test.") + .send(()) + .expect("Cannot send readiness signal."); + continue; + } + + if let Event::Incoming(Incoming::Publish(publish)) = msg { + let message = + serde_json::from_slice::(&publish.payload).unwrap(); + tx.send(Ok(message["message"].as_str().unwrap().to_string())) + .await + .unwrap(); + } + } else { + tx.send(Err("oh no")).await.unwrap(); + tokio::time::sleep(Duration::from_millis(50)).await; + } + } + }); + + ready_rx.await.expect("Cannot receive readiness signal."); + run_and_assert_sink_compliance(sink, events, &SINK_TAGS).await; + + let mut messages = Vec::new(); + + let mut failures = 0; + while failures < 5 && messages.len() < input.len() { + match rx.recv().await.unwrap() { + Ok(message) => messages.push(message), + Err(_) => failures += 1, + } + } + + assert_eq!(messages, input); +} diff --git a/src/sinks/mqtt/mod.rs b/src/sinks/mqtt/mod.rs new file mode 100644 index 0000000000000..26df23d0d3fa6 --- /dev/null +++ b/src/sinks/mqtt/mod.rs @@ -0,0 +1,9 @@ +mod config; +mod request_builder; +mod service; +mod sink; + +#[cfg(all(test, feature = "mqtt-integration-tests"))] +mod integration_tests; + +pub use config::MqttSinkConfig; diff --git a/src/sinks/mqtt/request_builder.rs b/src/sinks/mqtt/request_builder.rs new file mode 100644 index 0000000000000..62b75910a5454 --- /dev/null +++ b/src/sinks/mqtt/request_builder.rs @@ -0,0 +1,92 @@ +use std::io; + +use bytes::{Bytes, BytesMut}; +use tokio_util::codec::Encoder as _; + +use crate::sinks::prelude::*; + +use super::{service::MqttRequest, sink::MqttEvent}; + +pub(super) struct MqttMetadata { + topic: String, + finalizers: EventFinalizers, +} + +pub(super) struct MqttEncoder { + pub(super) encoder: crate::codecs::Encoder<()>, + pub(super) transformer: crate::codecs::Transformer, +} + +impl encoding::Encoder for MqttEncoder { + fn encode_input( + &self, + mut input: Event, + writer: &mut dyn io::Write, + ) -> io::Result<(usize, GroupedCountByteSize)> { + let mut body = BytesMut::new(); + self.transformer.transform(&mut input); + + let mut byte_size = telemetry().create_request_count_byte_size(); + byte_size.add_event(&input, input.estimated_json_encoded_size_of()); + + let mut encoder = self.encoder.clone(); + encoder + .encode(input, &mut body) + .map_err(|_| io::Error::new(io::ErrorKind::Other, "unable to encode"))?; + + let body = body.freeze(); + write_all(writer, 1, body.as_ref())?; + + Ok((body.len(), byte_size)) + } +} + +pub(super) struct MqttRequestBuilder { + pub(super) encoder: MqttEncoder, +} + +impl RequestBuilder for MqttRequestBuilder { + type Metadata = MqttMetadata; + type Events = Event; + type Encoder = MqttEncoder; + type Payload = Bytes; + type Request = MqttRequest; + type Error = io::Error; + + fn compression(&self) -> Compression { + Compression::None + } + + fn encoder(&self) -> &Self::Encoder { + &self.encoder + } + + fn split_input( + &self, + mut input: MqttEvent, + ) -> (Self::Metadata, RequestMetadataBuilder, Self::Events) { + let builder = RequestMetadataBuilder::from_event(&input.event); + + let metadata = MqttMetadata { + topic: input.topic, + finalizers: input.event.take_finalizers(), + }; + + (metadata, builder, input.event) + } + + fn build_request( + &self, + mqtt_metadata: Self::Metadata, + metadata: RequestMetadata, + payload: EncodeResult, + ) -> Self::Request { + let body = payload.into_payload(); + MqttRequest { + body, + topic: mqtt_metadata.topic, + finalizers: mqtt_metadata.finalizers, + metadata, + } + } +} diff --git a/src/sinks/mqtt/service.rs b/src/sinks/mqtt/service.rs new file mode 100644 index 0000000000000..fcba9c808fdf6 --- /dev/null +++ b/src/sinks/mqtt/service.rs @@ -0,0 +1,92 @@ +use std::task::{Context, Poll}; + +use crate::sinks::prelude::*; +use bytes::Bytes; +use futures::future::BoxFuture; +use rumqttc::{AsyncClient, ClientError}; +use snafu::Snafu; + +use super::config::MqttQoS; + +pub(super) struct MqttResponse { + byte_size: usize, + json_size: GroupedCountByteSize, +} + +impl DriverResponse for MqttResponse { + fn event_status(&self) -> EventStatus { + EventStatus::Delivered + } + + fn events_sent(&self) -> &GroupedCountByteSize { + &self.json_size + } + + fn bytes_sent(&self) -> Option { + Some(self.byte_size) + } +} + +pub(super) struct MqttRequest { + pub(super) body: Bytes, + pub(super) topic: String, + pub(super) finalizers: EventFinalizers, + pub(super) metadata: RequestMetadata, +} + +impl Finalizable for MqttRequest { + fn take_finalizers(&mut self) -> EventFinalizers { + std::mem::take(&mut self.finalizers) + } +} + +impl MetaDescriptive for MqttRequest { + fn get_metadata(&self) -> &RequestMetadata { + &self.metadata + } + + fn metadata_mut(&mut self) -> &mut RequestMetadata { + &mut self.metadata + } +} + +pub(super) struct MqttService { + pub(super) client: AsyncClient, + pub(super) quality_of_service: MqttQoS, +} + +#[derive(Debug, Snafu)] +pub(super) enum MqttError { + #[snafu(display("error"))] + Error { error: ClientError }, +} + +impl Service for MqttService { + type Response = MqttResponse; + type Error = MqttError; + type Future = BoxFuture<'static, Result>; + + fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll> { + Poll::Ready(Ok(())) + } + + fn call(&mut self, req: MqttRequest) -> Self::Future { + let quality_of_service = self.quality_of_service; + let client = self.client.clone(); + + Box::pin(async move { + let byte_size = req.body.len(); + + let res = client + .publish(&req.topic, quality_of_service.into(), false, req.body) + .await; + match res { + Ok(()) => Ok(MqttResponse { + byte_size, + json_size: req.metadata.into_events_estimated_json_encoded_byte_size(), + }), + Err(error) => Err(MqttError::Error { error }), + } + }) + } +} diff --git a/src/sinks/mqtt/sink.rs b/src/sinks/mqtt/sink.rs new file mode 100644 index 0000000000000..248a193c37def --- /dev/null +++ b/src/sinks/mqtt/sink.rs @@ -0,0 +1,151 @@ +use async_trait::async_trait; +use futures::{stream::BoxStream, StreamExt}; +use rumqttc::{AsyncClient, EventLoop, MqttOptions}; +use snafu::{ResultExt, Snafu}; +use vector_lib::tls::TlsError; + +use crate::internal_events::MqttConnectionError; +use crate::sinks::prelude::*; + +use super::{ + config::{ConfigurationError, MqttQoS}, + request_builder::{MqttEncoder, MqttRequestBuilder}, + service::MqttService, + MqttSinkConfig, +}; + +#[derive(Debug, Snafu)] +#[snafu(visibility(pub))] +pub enum MqttError { + #[snafu(display("invalid topic template: {}", source))] + TopicTemplate { source: TemplateParseError }, + #[snafu(display("TLS error: {}", source))] + Tls { source: TlsError }, + #[snafu(display("MQTT configuration error: {}", source))] + Configuration { source: ConfigurationError }, +} + +#[derive(Clone)] +pub struct MqttConnector { + options: MqttOptions, + topic: Template, +} + +impl MqttConnector { + pub fn new(options: MqttOptions, topic: String) -> Result { + let topic = Template::try_from(topic).context(TopicTemplateSnafu)?; + Ok(Self { options, topic }) + } + + fn connect(&self) -> (AsyncClient, EventLoop) { + AsyncClient::new(self.options.clone(), 1024) + } + + pub async fn healthcheck(&self) -> crate::Result<()> { + // TODO: Right now there is no way to implement the healthcheck properly: https://github.com/bytebeamio/rumqtt/issues/562 + Ok(()) + } +} + +pub struct MqttSink { + transformer: Transformer, + encoder: Encoder<()>, + connector: MqttConnector, + quality_of_service: MqttQoS, +} + +pub(super) struct MqttEvent { + pub(super) topic: String, + pub(super) event: Event, +} + +impl MqttSink { + pub fn new(config: &MqttSinkConfig, connector: MqttConnector) -> crate::Result { + let transformer = config.encoding.transformer(); + let serializer = config.encoding.build()?; + let encoder = Encoder::<()>::new(serializer); + + Ok(Self { + transformer, + encoder, + connector, + quality_of_service: config.quality_of_service, + }) + } + + fn make_mqtt_event(&self, event: Event) -> Option { + let topic = self + .connector + .topic + .render_string(&event) + .map_err(|missing_keys| { + emit!(TemplateRenderingError { + error: missing_keys, + field: Some("topic"), + drop_event: true, + }) + }) + .ok()?; + + Some(MqttEvent { topic, event }) + } + + async fn run_inner(self: Box, input: BoxStream<'_, Event>) -> Result<(), ()> { + let (client, mut connection) = self.connector.connect(); + + // This is necessary to keep the mqtt event loop moving forward. + tokio::spawn(async move { + loop { + // If an error is returned here there is currently no way to tie this back + // to the event that was posted which means we can't accurately provide + // delivery guarantees. + // We need this issue resolved first: + // https://github.com/bytebeamio/rumqtt/issues/349 + match connection.poll().await { + Ok(_) => {} + Err(connection_error) => { + emit!(MqttConnectionError { + error: connection_error + }); + } + } + } + }); + + let service = ServiceBuilder::new().service(MqttService { + client, + quality_of_service: self.quality_of_service, + }); + + let request_builder = MqttRequestBuilder { + encoder: MqttEncoder { + encoder: self.encoder.clone(), + transformer: self.transformer.clone(), + }, + }; + + input + .filter_map(|event| std::future::ready(self.make_mqtt_event(event))) + .request_builder(default_request_builder_concurrency_limit(), request_builder) + .filter_map(|request| async move { + match request { + Err(e) => { + error!("Failed to build MQTT request: {:?}.", e); + None + } + Ok(req) => Some(req), + } + }) + .into_driver(service) + .protocol("mqtt") + .run() + .await + } +} + +#[async_trait] +impl StreamSink for MqttSink { + async fn run(mut self: Box, input: BoxStream<'_, Event>) -> Result<(), ()> { + self.run_inner(input).await + } +} diff --git a/website/content/en/docs/reference/configuration/sinks/mqtt.md b/website/content/en/docs/reference/configuration/sinks/mqtt.md new file mode 100644 index 0000000000000..f71f4319b8500 --- /dev/null +++ b/website/content/en/docs/reference/configuration/sinks/mqtt.md @@ -0,0 +1,14 @@ +--- +title: MQTT +description: Deliver observability event data to an [MQTT](https://mqtt.org) broker +kind: sink +layout: component +tags: ["mqtt", "component", "sink"] +--- + +{{/* +This doc is generated using: + +1. The template in layouts/docs/component.html +2. The relevant CUE data in cue/reference/components/... +*/}} diff --git a/website/cue/reference/components/sinks/base/mqtt.cue b/website/cue/reference/components/sinks/base/mqtt.cue new file mode 100644 index 0000000000000..2a747cd13c3e9 --- /dev/null +++ b/website/cue/reference/components/sinks/base/mqtt.cue @@ -0,0 +1,423 @@ +package metadata + +base: components: sinks: mqtt: configuration: { + acknowledgements: { + description: """ + Controls how acknowledgements are handled for this sink. + + See [End-to-end Acknowledgements][e2e_acks] for more information on how event acknowledgement is handled. + + [e2e_acks]: https://vector.dev/docs/about/under-the-hood/architecture/end-to-end-acknowledgements/ + """ + required: false + type: object: options: enabled: { + description: """ + Whether or not end-to-end acknowledgements are enabled. + + When enabled for a sink, any source connected to that sink, where the source supports + end-to-end acknowledgements as well, waits for events to be acknowledged by the sink + before acknowledging them at the source. + + Enabling or disabling acknowledgements at the sink level takes precedence over any global + [`acknowledgements`][global_acks] configuration. + + [global_acks]: https://vector.dev/docs/reference/configuration/global-options/#acknowledgements + """ + required: false + type: bool: {} + } + } + clean_session: { + description: "If set to true, the MQTT session is cleaned on login." + required: false + type: bool: default: false + } + client_id: { + description: "MQTT client ID." + required: false + type: string: {} + } + encoding: { + description: "Configures how events are encoded into raw bytes." + required: true + type: object: options: { + avro: { + description: "Apache Avro-specific encoder options." + relevant_when: "codec = \"avro\"" + required: true + type: object: options: schema: { + description: "The Avro schema." + required: true + type: string: examples: ["{ \"type\": \"record\", \"name\": \"log\", \"fields\": [{ \"name\": \"message\", \"type\": \"string\" }] }"] + } + } + codec: { + description: "The codec to use for encoding events." + required: true + type: string: enum: { + avro: """ + Encodes an event as an [Apache Avro][apache_avro] message. + + [apache_avro]: https://avro.apache.org/ + """ + csv: """ + Encodes an event as a CSV message. + + This codec must be configured with fields to encode. + """ + gelf: """ + Encodes an event as a [GELF][gelf] message. + + This codec is experimental for the following reason: + + The GELF specification is more strict than the actual Graylog receiver. + Vector's encoder currently adheres more strictly to the GELF spec, with + the exception that some characters such as `@` are allowed in field names. + + Other GELF codecs such as Loki's, use a [Go SDK][implementation] that is maintained + by Graylog, and is much more relaxed than the GELF spec. + + Going forward, Vector will use that [Go SDK][implementation] as the reference implementation, which means + the codec may continue to relax the enforcement of specification. + + [gelf]: https://docs.graylog.org/docs/gelf + [implementation]: https://github.com/Graylog2/go-gelf/blob/v2/gelf/reader.go + """ + json: """ + Encodes an event as [JSON][json]. + + [json]: https://www.json.org/ + """ + logfmt: """ + Encodes an event as a [logfmt][logfmt] message. + + [logfmt]: https://brandur.org/logfmt + """ + native: """ + Encodes an event in the [native Protocol Buffers format][vector_native_protobuf]. + + This codec is **[experimental][experimental]**. + + [vector_native_protobuf]: https://github.com/vectordotdev/vector/blob/master/lib/vector-core/proto/event.proto + [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs + """ + native_json: """ + Encodes an event in the [native JSON format][vector_native_json]. + + This codec is **[experimental][experimental]**. + + [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue + [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs + """ + protobuf: """ + Encodes an event as a [Protobuf][protobuf] message. + + [protobuf]: https://protobuf.dev/ + """ + raw_message: """ + No encoding. + + This encoding uses the `message` field of a log event. + + Be careful if you are modifying your log events (for example, by using a `remap` + transform) and removing the message field while doing additional parsing on it, as this + could lead to the encoding emitting empty strings for the given event. + """ + text: """ + Plain text encoding. + + This encoding uses the `message` field of a log event. For metrics, it uses an + encoding that resembles the Prometheus export format. + + Be careful if you are modifying your log events (for example, by using a `remap` + transform) and removing the message field while doing additional parsing on it, as this + could lead to the encoding emitting empty strings for the given event. + """ + } + } + csv: { + description: "The CSV Serializer Options." + relevant_when: "codec = \"csv\"" + required: true + type: object: options: { + capacity: { + description: """ + Set the capacity (in bytes) of the internal buffer used in the CSV writer. + This defaults to a reasonable setting. + """ + required: false + type: uint: default: 8192 + } + delimiter: { + description: "The field delimiter to use when writing CSV." + required: false + type: uint: default: 44 + } + double_quote: { + description: """ + Enable double quote escapes. + + This is enabled by default, but it may be disabled. When disabled, quotes in + field data are escaped instead of doubled. + """ + required: false + type: bool: default: true + } + escape: { + description: """ + The escape character to use when writing CSV. + + In some variants of CSV, quotes are escaped using a special escape character + like \\ (instead of escaping quotes by doubling them). + + To use this, `double_quotes` needs to be disabled as well otherwise it is ignored. + """ + required: false + type: uint: default: 34 + } + fields: { + description: """ + Configures the fields that will be encoded, as well as the order in which they + appear in the output. + + If a field is not present in the event, the output will be an empty string. + + Values of type `Array`, `Object`, and `Regex` are not supported and the + output will be an empty string. + """ + required: true + type: array: items: type: string: {} + } + quote: { + description: "The quote character to use when writing CSV." + required: false + type: uint: default: 34 + } + quote_style: { + description: "The quoting style to use when writing CSV data." + required: false + type: string: { + default: "necessary" + enum: { + always: "Always puts quotes around every field." + necessary: """ + Puts quotes around fields only when necessary. + They are necessary when fields contain a quote, delimiter, or record terminator. + Quotes are also necessary when writing an empty record + (which is indistinguishable from a record with one empty field). + """ + never: "Never writes quotes, even if it produces invalid CSV data." + non_numeric: """ + Puts quotes around all fields that are non-numeric. + Namely, when writing a field that does not parse as a valid float or integer, + then quotes are used even if they aren't strictly necessary. + """ + } + } + } + } + } + except_fields: { + description: "List of fields that are excluded from the encoded event." + required: false + type: array: items: type: string: {} + } + metric_tag_values: { + description: """ + Controls how metric tag values are encoded. + + When set to `single`, only the last non-bare value of tags are displayed with the + metric. When set to `full`, all metric tags are exposed as separate assignments. + """ + relevant_when: "codec = \"json\" or codec = \"text\"" + required: false + type: string: { + default: "single" + enum: { + full: "All tags are exposed as arrays of either string or null values." + single: """ + Tag values are exposed as single strings, the same as they were before this config + option. Tags with multiple values show the last assigned value, and null values + are ignored. + """ + } + } + } + only_fields: { + description: "List of fields that are included in the encoded event." + required: false + type: array: items: type: string: {} + } + protobuf: { + description: "Options for the Protobuf serializer." + relevant_when: "codec = \"protobuf\"" + required: true + type: object: options: { + desc_file: { + description: """ + The path to the protobuf descriptor set file. + + This file is the output of `protoc -o ...` + """ + required: true + type: string: examples: ["/etc/vector/protobuf_descriptor_set.desc"] + } + message_type: { + description: "The name of the message type to use for serializing." + required: true + type: string: examples: ["package.Message"] + } + } + } + timestamp_format: { + description: "Format used for timestamp fields." + required: false + type: string: enum: { + rfc3339: "Represent the timestamp as a RFC 3339 timestamp." + unix: "Represent the timestamp as a Unix timestamp." + unix_float: "Represent the timestamp as a Unix timestamp in floating point." + unix_ms: "Represent the timestamp as a Unix timestamp in milliseconds." + unix_ns: "Represent the timestamp as a Unix timestamp in nanoseconds." + unix_us: "Represent the timestamp as a Unix timestamp in microseconds" + } + } + } + } + host: { + description: "MQTT server address (The broker’s domain name or IP address)." + required: true + type: string: examples: ["mqtt.example.com", "127.0.0.1"] + } + keep_alive: { + description: "Connection keep-alive interval." + required: false + type: uint: default: 60 + } + password: { + description: "MQTT password." + required: false + type: string: {} + } + port: { + description: "TCP port of the MQTT server to connect to." + required: false + type: uint: default: 1883 + } + quality_of_service: { + description: "Supported Quality of Service types for MQTT." + required: false + type: string: { + default: "atleastonce" + enum: { + atleastonce: "AtLeastOnce." + atmostonce: "AtMostOnce." + exactlyonce: "ExactlyOnce." + } + } + } + tls: { + description: "Configures the TLS options for incoming/outgoing connections." + required: false + type: object: options: { + alpn_protocols: { + description: """ + Sets the list of supported ALPN protocols. + + Declare the supported ALPN protocols, which are used during negotiation with peer. They are prioritized in the order + that they are defined. + """ + required: false + type: array: items: type: string: examples: ["h2"] + } + ca_file: { + description: """ + Absolute path to an additional CA certificate file. + + The certificate must be in the DER or PEM (X.509) format. Additionally, the certificate can be provided as an inline string in PEM format. + """ + required: false + type: string: examples: ["/path/to/certificate_authority.crt"] + } + crt_file: { + description: """ + Absolute path to a certificate file used to identify this server. + + The certificate must be in DER, PEM (X.509), or PKCS#12 format. Additionally, the certificate can be provided as + an inline string in PEM format. + + If this is set, and is not a PKCS#12 archive, `key_file` must also be set. + """ + required: false + type: string: examples: ["/path/to/host_certificate.crt"] + } + enabled: { + description: """ + Whether or not to require TLS for incoming or outgoing connections. + + When enabled and used for incoming connections, an identity certificate is also required. See `tls.crt_file` for + more information. + """ + required: false + type: bool: {} + } + key_file: { + description: """ + Absolute path to a private key file used to identify this server. + + The key must be in DER or PEM (PKCS#8) format. Additionally, the key can be provided as an inline string in PEM format. + """ + required: false + type: string: examples: ["/path/to/host_certificate.key"] + } + key_pass: { + description: """ + Passphrase used to unlock the encrypted key file. + + This has no effect unless `key_file` is set. + """ + required: false + type: string: examples: ["${KEY_PASS_ENV_VAR}", "PassWord1"] + } + verify_certificate: { + description: """ + Enables certificate verification. + + If enabled, certificates must not be expired and must be issued by a trusted + issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the + certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and + so on until the verification process reaches a root certificate. + + Relevant for both incoming and outgoing connections. + + Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. + """ + required: false + type: bool: {} + } + verify_hostname: { + description: """ + Enables hostname verification. + + If enabled, the hostname used to connect to the remote host must be present in the TLS certificate presented by + the remote host, either as the Common Name or as an entry in the Subject Alternative Name extension. + + Only relevant for outgoing connections. + + Do NOT set this to `false` unless you understand the risks of not verifying the remote hostname. + """ + required: false + type: bool: {} + } + } + } + topic: { + description: "MQTT publish topic (templates allowed)" + required: true + type: string: syntax: "template" + } + user: { + description: "MQTT username." + required: false + type: string: {} + } +} diff --git a/website/cue/reference/components/sinks/mqtt.cue b/website/cue/reference/components/sinks/mqtt.cue new file mode 100644 index 0000000000000..014bbcd69a6f6 --- /dev/null +++ b/website/cue/reference/components/sinks/mqtt.cue @@ -0,0 +1,85 @@ +package metadata + +components: sinks: mqtt: { + title: "MQTT" + + classes: { + commonly_used: false + delivery: "best_effort" + development: "beta" + egress_method: "stream" + service_providers: [] + stateful: false + } + + features: { + auto_generated: true + acknowledgements: true + healthcheck: enabled: false + send: { + compression: enabled: false + encoding: { + enabled: true + codec: { + enabled: true + enum: ["json", "text"] + } + } + request: enabled: false + tls: { + enabled: true + can_verify_certificate: true + can_verify_hostname: true + enabled_default: false + enabled_by_scheme: false + } + to: { + service: services.mqtt + interface: { + socket: { + direction: "outgoing" + protocols: ["tcp"] + ssl: "optional" + } + } + } + } + } + + support: { + targets: { + "aarch64-unknown-linux-gnu": true + "aarch64-unknown-linux-musl": true + "armv7-unknown-linux-gnueabihf": true + "armv7-unknown-linux-musleabihf": true + "x86_64-apple-darwin": true + "x86_64-pc-windows-msv": true + "x86_64-unknown-linux-gnu": true + "x86_64-unknown-linux-musl": true + } + requirements: [] + warnings: [] + notices: [] + } + + configuration: base.components.sinks.mqtt.configuration + + input: { + logs: true + metrics: null + traces: false + } + + telemetry: metrics: { + open_connections: components.sources.internal_metrics.output.metrics.open_connections + connection_shutdown_total: components.sources.internal_metrics.output.metrics.connection_shutdown_total + component_errors_total: components.sources.internal_metrics.output.metrics.component_errors_total + component_discarded_events_total: components.sources.internal_metrics.output.metrics.component_discarded_events_total + component_received_events_total: components.sources.internal_metrics.output.metrics.component_received_events_total + component_received_events_count: components.sources.internal_metrics.output.metrics.component_received_events_count + component_received_event_bytes_total: components.sources.internal_metrics.output.metrics.component_received_event_bytes_total + component_sent_bytes_total: components.sources.internal_metrics.output.metrics.component_sent_bytes_total + component_sent_events_total: components.sources.internal_metrics.output.metrics.component_sent_events_total + component_sent_event_bytes_total: components.sources.internal_metrics.output.metrics.component_sent_event_bytes_total + } +} diff --git a/website/cue/reference/services/mqtt.cue b/website/cue/reference/services/mqtt.cue new file mode 100644 index 0000000000000..d22aeaec894c0 --- /dev/null +++ b/website/cue/reference/services/mqtt.cue @@ -0,0 +1,10 @@ +package metadata + +services: mqtt: { + name: "MQTT" + thing: "\(name) topics" + url: urls.mqtt + versions: null + + description: "[MQTT](\(urls.mqtt)) is an OASIS standard messaging protocol for the Internet of Things (IoT)." +} diff --git a/website/cue/reference/urls.cue b/website/cue/reference/urls.cue index ca1118738152c..e0fe11129bc6f 100644 --- a/website/cue/reference/urls.cue +++ b/website/cue/reference/urls.cue @@ -361,6 +361,7 @@ urls: { mongodb: "https://www.mongodb.com" mongodb_command_server_status: "https://docs.mongodb.com/manual/reference/command/serverStatus/" mongodb_connection_string_uri_format: "https://docs.mongodb.com/manual/reference/connection-string/" + mqtt: "https://mqtt.org/" musl_builder_docker_image: "\(vector_repo)/blob/master/scripts/ci-docker-images/builder-x86_64-unknown-linux-musl/Dockerfile" native_proto_schema: "\(vector_repo)/blob/master/lib/vector-core/proto/event.proto" native_json_schema: "\(vector_repo)/blob/master/lib/codecs/tests/data/native_encoding/schema.cue" From a935c30785ad50adfea5a3344e2fb3673fffb73c Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Wed, 14 Feb 2024 11:44:56 -0800 Subject: [PATCH 0017/1491] feat(kubernetes): Bump manifests to chart v0.36.0 (#19877) Signed-off-by: Jesse Szwedko --- distribution/kubernetes/vector-agent/README.md | 2 +- distribution/kubernetes/vector-agent/configmap.yaml | 2 +- distribution/kubernetes/vector-agent/daemonset.yaml | 4 ++-- distribution/kubernetes/vector-agent/rbac.yaml | 4 ++-- distribution/kubernetes/vector-agent/service-headless.yaml | 2 +- distribution/kubernetes/vector-agent/serviceaccount.yaml | 2 +- distribution/kubernetes/vector-aggregator/README.md | 2 +- distribution/kubernetes/vector-aggregator/configmap.yaml | 2 +- .../kubernetes/vector-aggregator/service-headless.yaml | 2 +- distribution/kubernetes/vector-aggregator/service.yaml | 2 +- distribution/kubernetes/vector-aggregator/serviceaccount.yaml | 2 +- distribution/kubernetes/vector-aggregator/statefulset.yaml | 4 ++-- distribution/kubernetes/vector-stateless-aggregator/README.md | 2 +- .../kubernetes/vector-stateless-aggregator/configmap.yaml | 2 +- .../kubernetes/vector-stateless-aggregator/deployment.yaml | 4 ++-- .../vector-stateless-aggregator/service-headless.yaml | 2 +- .../kubernetes/vector-stateless-aggregator/service.yaml | 2 +- .../vector-stateless-aggregator/serviceaccount.yaml | 2 +- 18 files changed, 22 insertions(+), 22 deletions(-) diff --git a/distribution/kubernetes/vector-agent/README.md b/distribution/kubernetes/vector-agent/README.md index 6176f85ac41e1..e16e7123167ff 100644 --- a/distribution/kubernetes/vector-agent/README.md +++ b/distribution/kubernetes/vector-agent/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.30.2 with the following `values.yaml`: +version 0.31.0 with the following `values.yaml`: ```yaml role: Agent diff --git a/distribution/kubernetes/vector-agent/configmap.yaml b/distribution/kubernetes/vector-agent/configmap.yaml index 7a5abd7cdc10b..6131fadb49e6d 100644 --- a/distribution/kubernetes/vector-agent/configmap.yaml +++ b/distribution/kubernetes/vector-agent/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" data: agent.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-agent/daemonset.yaml b/distribution/kubernetes/vector-agent/daemonset.yaml index 1ed9b9c70cf68..26355970e0245 100644 --- a/distribution/kubernetes/vector-agent/daemonset.yaml +++ b/distribution/kubernetes/vector-agent/daemonset.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" annotations: {} spec: selector: @@ -30,7 +30,7 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.35.1-distroless-libc" + image: "timberio/vector:0.36.0-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir diff --git a/distribution/kubernetes/vector-agent/rbac.yaml b/distribution/kubernetes/vector-agent/rbac.yaml index 773db8846b5dd..457ddcdb500aa 100644 --- a/distribution/kubernetes/vector-agent/rbac.yaml +++ b/distribution/kubernetes/vector-agent/rbac.yaml @@ -10,7 +10,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" rules: - apiGroups: - "" @@ -31,7 +31,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole diff --git a/distribution/kubernetes/vector-agent/service-headless.yaml b/distribution/kubernetes/vector-agent/service-headless.yaml index 72e14567d171b..45b21cf3bccb4 100644 --- a/distribution/kubernetes/vector-agent/service-headless.yaml +++ b/distribution/kubernetes/vector-agent/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-agent/serviceaccount.yaml b/distribution/kubernetes/vector-agent/serviceaccount.yaml index 1472923230116..0c7fd9014dcb3 100644 --- a/distribution/kubernetes/vector-agent/serviceaccount.yaml +++ b/distribution/kubernetes/vector-agent/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" automountServiceAccountToken: true diff --git a/distribution/kubernetes/vector-aggregator/README.md b/distribution/kubernetes/vector-aggregator/README.md index 7b9fa8055150a..e9b192647230d 100644 --- a/distribution/kubernetes/vector-aggregator/README.md +++ b/distribution/kubernetes/vector-aggregator/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.30.2 with the following `values.yaml`: +version 0.31.0 with the following `values.yaml`: ```yaml diff --git a/distribution/kubernetes/vector-aggregator/configmap.yaml b/distribution/kubernetes/vector-aggregator/configmap.yaml index 04a8025d10fec..c81e8eb40126d 100644 --- a/distribution/kubernetes/vector-aggregator/configmap.yaml +++ b/distribution/kubernetes/vector-aggregator/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" data: aggregator.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-aggregator/service-headless.yaml b/distribution/kubernetes/vector-aggregator/service-headless.yaml index 232c59389131f..e4e607cf01f0d 100644 --- a/distribution/kubernetes/vector-aggregator/service-headless.yaml +++ b/distribution/kubernetes/vector-aggregator/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-aggregator/service.yaml b/distribution/kubernetes/vector-aggregator/service.yaml index 6d1243421541b..5d99f70aa1c97 100644 --- a/distribution/kubernetes/vector-aggregator/service.yaml +++ b/distribution/kubernetes/vector-aggregator/service.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" annotations: spec: ports: diff --git a/distribution/kubernetes/vector-aggregator/serviceaccount.yaml b/distribution/kubernetes/vector-aggregator/serviceaccount.yaml index 6e1aad1fea6bb..2ad01a8db7a0a 100644 --- a/distribution/kubernetes/vector-aggregator/serviceaccount.yaml +++ b/distribution/kubernetes/vector-aggregator/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" automountServiceAccountToken: true diff --git a/distribution/kubernetes/vector-aggregator/statefulset.yaml b/distribution/kubernetes/vector-aggregator/statefulset.yaml index 00cbf034de9db..3dbcb63db7975 100644 --- a/distribution/kubernetes/vector-aggregator/statefulset.yaml +++ b/distribution/kubernetes/vector-aggregator/statefulset.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" annotations: {} spec: replicas: 1 @@ -32,7 +32,7 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.35.1-distroless-libc" + image: "timberio/vector:0.36.0-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/README.md b/distribution/kubernetes/vector-stateless-aggregator/README.md index 5bb86c423daac..2d013cfd7ad1e 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/README.md +++ b/distribution/kubernetes/vector-stateless-aggregator/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.30.2 with the following `values.yaml`: +version 0.31.0 with the following `values.yaml`: ```yaml role: Stateless-Aggregator diff --git a/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml b/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml index 09c0aa160bf25..30224b72d03e7 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" data: aggregator.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml b/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml index e156ce9f59614..a87b68a9bd334 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" annotations: {} spec: replicas: 1 @@ -30,7 +30,7 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.35.1-distroless-libc" + image: "timberio/vector:0.36.0-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml b/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml index b9d2a1e783a3f..7d57fe61128a0 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-stateless-aggregator/service.yaml b/distribution/kubernetes/vector-stateless-aggregator/service.yaml index f6d52b1e7ea23..fbcaf26d05212 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/service.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/service.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" annotations: spec: ports: diff --git a/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml b/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml index c9c4d424df84b..549de176d0f4d 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.35.1-distroless-libc" + app.kubernetes.io/version: "0.36.0-distroless-libc" automountServiceAccountToken: true From b91be34a3c890505e7faeaeffa4a1bea54944ebf Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Wed, 14 Feb 2024 12:33:09 -0800 Subject: [PATCH 0018/1491] chore(releasing): Bump development version to v0.37.0 (#19874) Signed-off-by: Jesse Szwedko --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c52d6489b0d87..1f5492a3528f3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9845,7 +9845,7 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "vector" -version = "0.36.0" +version = "0.37.0" dependencies = [ "apache-avro", "approx", diff --git a/Cargo.toml b/Cargo.toml index e34bd1d90039f..7e84f9cd3e5b4 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "vector" -version = "0.36.0" +version = "0.37.0" authors = ["Vector Contributors "] edition = "2021" description = "A lightweight and ultra-fast tool for building observability pipelines" From 342b48c0f7c0aa1147a3a2a1b00089a482436560 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 09:42:36 -0500 Subject: [PATCH 0019/1491] chore(deps): Bump darling from 0.20.5 to 0.20.6 (#19882) Bumps [darling](https://github.com/TedDriggs/darling) from 0.20.5 to 0.20.6. - [Release notes](https://github.com/TedDriggs/darling/releases) - [Changelog](https://github.com/TedDriggs/darling/blob/master/CHANGELOG.md) - [Commits](https://github.com/TedDriggs/darling/compare/v0.20.5...v0.20.6) --- updated-dependencies: - dependency-name: darling dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1f5492a3528f3..b805f4b08d0ea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -463,7 +463,7 @@ checksum = "a6a7349168b79030e3172a620f4f0e0062268a954604e41475eff082380fe505" dependencies = [ "Inflector", "async-graphql-parser", - "darling 0.20.5", + "darling 0.20.6", "proc-macro-crate 1.3.1", "proc-macro2 1.0.78", "quote 1.0.35", @@ -2584,12 +2584,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.5" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc5d6b04b3fd0ba9926f945895de7d806260a2d7431ba82e7edaecb043c4c6b8" +checksum = "c376d08ea6aa96aafe61237c7200d1241cb177b7d3a542d791f2d118e9cbb955" dependencies = [ - "darling_core 0.20.5", - "darling_macro 0.20.5", + "darling_core 0.20.6", + "darling_macro 0.20.6", ] [[package]] @@ -2622,9 +2622,9 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.5" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04e48a959bcd5c761246f5d090ebc2fbf7b9cd527a492b07a67510c108f1e7e3" +checksum = "33043dcd19068b8192064c704b3f83eb464f91f1ff527b44a4e2b08d9cdb8855" dependencies = [ "fnv", "ident_case", @@ -2658,11 +2658,11 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.20.5" +version = "0.20.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d1545d67a2149e1d93b7e5c7752dce5a7426eb5d1357ddcfd89336b94444f77" +checksum = "c5a91391accf613803c2a9bf9abccdbaa07c54b4244a5b64883f9c3c137c86be" dependencies = [ - "darling_core 0.20.5", + "darling_core 0.20.6", "quote 1.0.35", "syn 2.0.48", ] @@ -8151,7 +8151,7 @@ version = "3.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "865f9743393e638991566a8b7a479043c2c8da94a33e0a31f18214c9cae0a64d" dependencies = [ - "darling 0.20.5", + "darling 0.20.6", "proc-macro2 1.0.78", "quote 1.0.35", "syn 2.0.48", @@ -10162,7 +10162,7 @@ name = "vector-config-common" version = "0.1.0" dependencies = [ "convert_case 0.6.0", - "darling 0.20.5", + "darling 0.20.6", "once_cell", "proc-macro2 1.0.78", "quote 1.0.35", @@ -10176,7 +10176,7 @@ dependencies = [ name = "vector-config-macros" version = "0.1.0" dependencies = [ - "darling 0.20.5", + "darling 0.20.6", "proc-macro2 1.0.78", "quote 1.0.35", "serde", From 2f1c7850fbc039a894f51b844e919adf2fdc925d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?David=20Ml=C3=A1dek?= Date: Thu, 15 Feb 2024 16:02:24 +0100 Subject: [PATCH 0020/1491] enhancement(vrl): RFC for return expression (#19828) * add rfc with return expression * Apply suggestions from code review Co-authored-by: Pavlos Rontidis * Add optional parameters to return and explicitly do not support closures * Remove return without arguments --------- Co-authored-by: Pavlos Rontidis --- rfcs/2023-02-08-7496-vrl-return.md | 74 ++++++++++++++++++++++++++++++ 1 file changed, 74 insertions(+) create mode 100644 rfcs/2023-02-08-7496-vrl-return.md diff --git a/rfcs/2023-02-08-7496-vrl-return.md b/rfcs/2023-02-08-7496-vrl-return.md new file mode 100644 index 0000000000000..ca4916e88163b --- /dev/null +++ b/rfcs/2023-02-08-7496-vrl-return.md @@ -0,0 +1,74 @@ +# RFC 7496 - 2023-02-02 - VRL Return Keyword + +Add return expression to the Vector Remap Language. + +## Context + +- #7496 + +## Cross cutting concerns + +- None. + +## Scope + +### In scope + +- Adding a return expression to VRL. +- The `return` can optionally take an expression as an argument and the result of that expression will be returned. + +### Out of scope + +- Adding new keywords for similar purposes such as `drop`. +- Defining semantics of keywords that are usually used for other purposes in other languages such as `break`. +- Implementation of `return` expressions inside closures. + +## Pain + +- Aborting with changing the input cannot be easily done. +- VRL code is often unnecessarily indented because of lack of early returns. + +## Proposal + +### User Experience + +- A `return` expression causes the VRL program to terminate, keeping any modifications made to the event. +- A `return` expression must be always followed by another expression, whose value will be used as the emitted event. +- The keyword cannot be used inside a closure. Trying to do that will result in a compilation error. + +### Implementation + +- Implementation will be similar to the current `abort` keyword when `drop_on_abort` is set to `false`. The only difference is that the returned value will be taken from the provided expression and not from original input. +- `drop_on_abort` will have no effect on return calls and configuration such as `drop_on_return` will not be added. + +## Rationale + +- It will be possible to write VRL with less indentation making it more readable. +- `return` is already a reserved word so it can be used without introducing a breaking change. + +## Drawbacks + +- The `return` keyword will be given a semantic meaning that will have to be supported going forward. + +## Prior Art + +- Most languages have a way to make early returns. +- There was no prior attempted implementation of returns in VRL to my knowledge. + +## Alternatives + +- New keywords that are not currently a reserved keyword can be added to the language. This would, however, constitute a breaking change. +- This feature can also be rejected as it does not add any functionality that cannot be currently expressed. + +## Outstanding Questions + +## Plan Of Attack + +Incremental steps to execute this change. These will be converted to issues after the RFC is approved: + +- [ ] Submit a PR with implementation of returns. + +## Future Improvements + +- Adding a `drop` keyword for explicit drop as an alternative to pre-configured `abort` for full control over passing the events to output unchanged, passing them changed, or routing them to the dropped output. +- Adding `return` to closures. From 4f0dbf4d2792dc266e0b9ea74158a6a96a1adccb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 16:49:31 +0000 Subject: [PATCH 0021/1491] chore(deps): Bump openssl-src from 300.2.2+3.2.1 to 300.2.3+3.2.1 (#19869) Bumps [openssl-src](https://github.com/alexcrichton/openssl-src-rs) from 300.2.2+3.2.1 to 300.2.3+3.2.1. - [Release notes](https://github.com/alexcrichton/openssl-src-rs/releases) - [Commits](https://github.com/alexcrichton/openssl-src-rs/commits) --- updated-dependencies: - dependency-name: openssl-src dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b805f4b08d0ea..6c33c2194339d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5954,9 +5954,9 @@ checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] name = "openssl-src" -version = "300.2.2+3.2.1" +version = "300.2.3+3.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8bbfad0063610ac26ee79f7484739e2b07555a75c42453b89263830b5c8103bc" +checksum = "5cff92b6f71555b61bb9315f7c64da3ca43d87531622120fea0195fc761b4843" dependencies = [ "cc", ] From c89099768af4ee63542dcb8c039e35bd7a6f2832 Mon Sep 17 00:00:00 2001 From: Pavlos Rontidis Date: Thu, 15 Feb 2024 13:01:47 -0500 Subject: [PATCH 0022/1491] chore(tests): expose more test utils (#19885) --- src/config/source.rs | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/config/source.rs b/src/config/source.rs index 902781a04a023..20b2f227e7198 100644 --- a/src/config/source.rs +++ b/src/config/source.rs @@ -138,7 +138,7 @@ pub struct SourceContext { } impl SourceContext { - #[cfg(test)] + #[cfg(any(test, feature = "test-utils"))] pub fn new_shutdown( key: &ComponentKey, out: SourceSender, @@ -161,7 +161,7 @@ impl SourceContext { ) } - #[cfg(test)] + #[cfg(any(test, feature = "test-utils"))] pub fn new_test( out: SourceSender, schema_definitions: Option, schema::Definition>>, From f920675d2658d5ea410847390d7ba3be435a932a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 15 Feb 2024 18:09:29 +0000 Subject: [PATCH 0023/1491] chore(deps): Bump enumflags2 from 0.7.8 to 0.7.9 (#19870) Bumps [enumflags2](https://github.com/meithecatte/enumflags2) from 0.7.8 to 0.7.9. - [Release notes](https://github.com/meithecatte/enumflags2/releases) - [Commits](https://github.com/meithecatte/enumflags2/compare/v0.7.8...v0.7.9) --- updated-dependencies: - dependency-name: enumflags2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- lib/vector-core/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6c33c2194339d..1bbfb0508cd72 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3067,18 +3067,18 @@ dependencies = [ [[package]] name = "enumflags2" -version = "0.7.8" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5998b4f30320c9d93aed72f63af821bfdac50465b75428fce77b48ec482c3939" +checksum = "3278c9d5fb675e0a51dabcf4c0d355f692b064171535ba72361be1528a9d8e8d" dependencies = [ "enumflags2_derive", ] [[package]] name = "enumflags2_derive" -version = "0.7.8" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f95e2801cd355d4a1a3e3953ce6ee5ae9603a5c833455343a8bfe3f44d418246" +checksum = "5c785274071b1b420972453b306eeca06acf4633829db4223b58a2a8c5953bc4" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index c555a8f610e9a..2d75ee12b0a5c 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -15,7 +15,7 @@ crossbeam-utils = { version = "0.8.19", default-features = false } db-key = { version = "0.0.5", default-features = false, optional = true } dyn-clone = { version = "1.0.16", default-features = false } enrichment = { path = "../enrichment", optional = true } -enumflags2 = { version = "0.7.8", default-features = false } +enumflags2 = { version = "0.7.9", default-features = false } float_eq = { version = "1.0", default-features = false } futures = { version = "0.3.30", default-features = false, features = ["std"] } futures-util = { version = "0.3.29", default-features = false, features = ["std"] } From a7fe0dbfbd41197bb09fb6a8f2d8562a22384c99 Mon Sep 17 00:00:00 2001 From: Sergey Yedrikov <48031344+syedriko@users.noreply.github.com> Date: Thu, 15 Feb 2024 13:16:09 -0500 Subject: [PATCH 0024/1491] enhancement(kubernetes): add support for include_paths_glob_patterns (#19521) Signed-off-by: Jesse Szwedko --- ...include_paths_glob_patterns.enhancement.md | 3 + .../kubernetes_logs/k8s_paths_provider.rs | 82 +++++++++++++++++-- src/sources/kubernetes_logs/mod.rs | 43 ++++++++-- .../sources/base/kubernetes_logs.cue | 10 +++ .../components/sources/kubernetes_logs.cue | 4 + 5 files changed, 127 insertions(+), 15 deletions(-) create mode 100644 changelog.d/include_paths_glob_patterns.enhancement.md diff --git a/changelog.d/include_paths_glob_patterns.enhancement.md b/changelog.d/include_paths_glob_patterns.enhancement.md new file mode 100644 index 0000000000000..96cc54fc037ef --- /dev/null +++ b/changelog.d/include_paths_glob_patterns.enhancement.md @@ -0,0 +1,3 @@ +A new configuration option include_paths_glob_patterns has been introduced in the Kubernetes Logs source. This option works alongside the existing exclude_paths_glob_patterns to help narrow down the selection of logs to be considered. include_paths_glob_patterns is evaluated before exclude_paths_glob_patterns. + +authors: syedriko diff --git a/src/sources/kubernetes_logs/k8s_paths_provider.rs b/src/sources/kubernetes_logs/k8s_paths_provider.rs index 0eacbcd74c76e..c1cdc13c7f008 100644 --- a/src/sources/kubernetes_logs/k8s_paths_provider.rs +++ b/src/sources/kubernetes_logs/k8s_paths_provider.rs @@ -16,6 +16,7 @@ use crate::kubernetes::pod_manager_logic::extract_static_pod_config_hashsum; pub struct K8sPathsProvider { pod_state: Store, namespace_state: Store, + include_paths: Vec, exclude_paths: Vec, } @@ -24,11 +25,13 @@ impl K8sPathsProvider { pub fn new( pod_state: Store, namespace_state: Store, + include_paths: Vec, exclude_paths: Vec, ) -> Self { Self { pod_state, namespace_state, + include_paths, exclude_paths, } } @@ -57,7 +60,12 @@ impl PathsProvider for K8sPathsProvider { .flat_map(|pod| { trace!(message = "Providing log paths for pod.", pod = ?pod.metadata.name); let paths_iter = list_pod_log_paths(real_glob, pod.as_ref()); - exclude_paths(paths_iter, &self.exclude_paths).collect::>() + filter_paths( + filter_paths(paths_iter, &self.include_paths, true), + &self.exclude_paths, + false, + ) + .collect::>() }) .collect() } @@ -159,7 +167,7 @@ where build_container_exclusion_patterns(dir, excluded_containers).collect(); // Return paths filtered with container exclusion. - exclude_paths(path_iter, exclusion_patterns) + filter_paths(path_iter, exclusion_patterns, false) }) } @@ -175,12 +183,13 @@ fn real_glob(pattern: &str) -> impl Iterator { .flat_map(|paths| paths.into_iter()) } -fn exclude_paths<'a>( +fn filter_paths<'a>( iter: impl Iterator + 'a, patterns: impl AsRef<[glob::Pattern]> + 'a, + include: bool, ) -> impl Iterator + 'a { iter.filter(move |path| { - !patterns.as_ref().iter().any(|pattern| { + let m = patterns.as_ref().iter().any(|pattern| { pattern.matches_path_with( path, glob::MatchOptions { @@ -188,7 +197,12 @@ fn exclude_paths<'a>( ..Default::default() }, ) - }) + }); + if include { + m + } else { + !m + } }) } @@ -199,8 +213,8 @@ mod tests { use k8s_openapi::{api::core::v1::Pod, apimachinery::pkg::apis::meta::v1::ObjectMeta}; use super::{ - build_container_exclusion_patterns, exclude_paths, extract_excluded_containers_for_pod, - extract_pod_logs_directory, list_pod_log_paths, + build_container_exclusion_patterns, extract_excluded_containers_for_pod, + extract_pod_logs_directory, filter_paths, list_pod_log_paths, }; #[test] @@ -508,7 +522,59 @@ mod tests { .map(|pattern| glob::Pattern::new(pattern).unwrap()) .collect(); let actual_paths: Vec<_> = - exclude_paths(input_paths.into_iter().map(Into::into), &patterns).collect(); + filter_paths(input_paths.into_iter().map(Into::into), &patterns, false).collect(); + let expected_paths: Vec<_> = expected_paths.into_iter().map(PathBuf::from).collect(); + assert_eq!( + actual_paths, expected_paths, + "failed for patterns {:?}", + &str_patterns + ) + } + } + + #[test] + fn test_include_paths() { + let cases = vec![ + ( + vec![ + "/var/log/pods/a.log", + "/var/log/pods/b.log", + "/var/log/pods/c.log.foo", + "/var/log/pods/d.logbar", + "/tmp/foo", + ], + vec!["/var/log/pods/*"], + vec![ + "/var/log/pods/a.log", + "/var/log/pods/b.log", + "/var/log/pods/c.log.foo", + "/var/log/pods/d.logbar", + ], + ), + ( + vec![ + "/var/log/pods/a.log", + "/var/log/pods/b.log", + "/var/log/pods/c.log.foo", + "/var/log/pods/d.logbar", + ], + vec!["/tmp/*"], + vec![], + ), + ( + vec!["/var/log/pods/a.log", "/tmp/foo"], + vec!["**/*"], + vec!["/var/log/pods/a.log", "/tmp/foo"], + ), + ]; + + for (input_paths, str_patterns, expected_paths) in cases { + let patterns: Vec<_> = str_patterns + .iter() + .map(|pattern| glob::Pattern::new(pattern).unwrap()) + .collect(); + let actual_paths: Vec<_> = + filter_paths(input_paths.into_iter().map(Into::into), &patterns, true).collect(); let expected_paths: Vec<_> = expected_paths.into_iter().map(PathBuf::from).collect(); assert_eq!( actual_paths, expected_paths, diff --git a/src/sources/kubernetes_logs/mod.rs b/src/sources/kubernetes_logs/mod.rs index 53a44e1601256..296947b4353c5 100644 --- a/src/sources/kubernetes_logs/mod.rs +++ b/src/sources/kubernetes_logs/mod.rs @@ -152,6 +152,10 @@ pub struct Config { #[configurable(derived)] node_annotation_fields: node_metadata_annotator::FieldsSpec, + /// A list of glob patterns to include while reading the files. + #[configurable(metadata(docs::examples = "**/include/**"))] + include_paths_glob_patterns: Vec, + /// A list of glob patterns to exclude from reading the files. #[configurable(metadata(docs::examples = "**/exclude/**"))] exclude_paths_glob_patterns: Vec, @@ -279,6 +283,7 @@ impl Default for Config { pod_annotation_fields: pod_metadata_annotator::FieldsSpec::default(), namespace_annotation_fields: namespace_metadata_annotator::FieldsSpec::default(), node_annotation_fields: node_metadata_annotator::FieldsSpec::default(), + include_paths_glob_patterns: default_path_inclusion(), exclude_paths_glob_patterns: default_path_exclusion(), read_from: default_read_from(), ignore_older_secs: None, @@ -534,6 +539,7 @@ struct Source { namespace_label_selector: String, node_selector: String, self_node_name: String, + include_paths: Vec, exclude_paths: Vec, read_from: ReadFrom, ignore_older_secs: Option, @@ -591,6 +597,8 @@ impl Source { let data_dir = globals.resolve_and_make_data_subdir(config.data_dir.as_ref(), key.id())?; + let include_paths = prepare_include_paths(config)?; + let exclude_paths = prepare_exclude_paths(config)?; let glob_minimum_cooldown = config.glob_minimum_cooldown_ms; @@ -614,6 +622,7 @@ impl Source { namespace_label_selector, node_selector, self_node_name, + include_paths, exclude_paths, read_from: ReadFrom::from(config.read_from), ignore_older_secs: config.ignore_older_secs, @@ -648,6 +657,7 @@ impl Source { namespace_label_selector, node_selector, self_node_name, + include_paths, exclude_paths, read_from, ignore_older_secs, @@ -740,8 +750,12 @@ impl Source { delay_deletion, ))); - let paths_provider = - K8sPathsProvider::new(pod_state.clone(), ns_state.clone(), exclude_paths); + let paths_provider = K8sPathsProvider::new( + pod_state.clone(), + ns_state.clone(), + include_paths, + exclude_paths, + ); let annotator = PodMetadataAnnotator::new(pod_state, pod_fields_spec, log_namespace); let ns_annotator = NamespaceMetadataAnnotator::new(ns_state, namespace_fields_spec, log_namespace); @@ -968,6 +982,10 @@ fn default_self_node_name_env_template() -> String { format!("${{{}}}", SELF_NODE_NAME_ENV_KEY.to_owned()) } +fn default_path_inclusion() -> Vec { + vec![PathBuf::from("**/*")] +} + fn default_path_exclusion() -> Vec { vec![PathBuf::from("**/*.gz"), PathBuf::from("**/*.tmp")] } @@ -1011,11 +1029,22 @@ const fn default_rotate_wait() -> Duration { Duration::from_secs(u64::MAX / 2) } +// This function constructs the patterns we include for file watching, created +// from the defaults or user provided configuration. +fn prepare_include_paths(config: &Config) -> crate::Result> { + prepare_glob_patterns(&config.include_paths_glob_patterns, "Including") +} + // This function constructs the patterns we exclude from file watching, created // from the defaults or user provided configuration. fn prepare_exclude_paths(config: &Config) -> crate::Result> { - let exclude_paths = config - .exclude_paths_glob_patterns + prepare_glob_patterns(&config.exclude_paths_glob_patterns, "Excluding") +} + +// This function constructs the patterns for file watching, created +// from the defaults or user provided configuration. +fn prepare_glob_patterns(paths: &[PathBuf], op: &str) -> crate::Result> { + let ret = paths .iter() .map(|pattern| { let pattern = pattern @@ -1026,14 +1055,14 @@ fn prepare_exclude_paths(config: &Config) -> crate::Result> { .collect::>>()?; info!( - message = "Excluding matching files.", - exclude_paths = ?exclude_paths + message = format!("{op} matching files."), + ret = ?ret .iter() .map(glob::Pattern::as_str) .collect::>() ); - Ok(exclude_paths) + Ok(ret) } // This function constructs the effective field selector to use, based on diff --git a/website/cue/reference/components/sources/base/kubernetes_logs.cue b/website/cue/reference/components/sources/base/kubernetes_logs.cue index 1db54ec13bbfe..bed681ab70e3e 100644 --- a/website/cue/reference/components/sources/base/kubernetes_logs.cue +++ b/website/cue/reference/components/sources/base/kubernetes_logs.cue @@ -133,6 +133,16 @@ base: components: sources: kubernetes_logs: configuration: { unit: "seconds" } } + include_paths_glob_patterns: { + description: "A list of glob patterns to include while reading the files." + required: false + type: array: { + default: [ + "**/*", + ] + items: type: string: examples: ["**/include/**"] + } + } ingestion_timestamp_field: { description: """ Overrides the name of the log field used to add the ingestion timestamp to each event. diff --git a/website/cue/reference/components/sources/kubernetes_logs.cue b/website/cue/reference/components/sources/kubernetes_logs.cue index b1a3986f9435d..3c5d09f55b7a5 100644 --- a/website/cue/reference/components/sources/kubernetes_logs.cue +++ b/website/cue/reference/components/sources/kubernetes_logs.cue @@ -279,8 +279,12 @@ components: sources: kubernetes_logs: { * Built-in [Pod](#pod-exclusion) and [Container](#container-exclusion) exclusion rules. + * The `include_paths_glob_patterns` option allows you to include + Kubernetes log files by the file name and path. * The `exclude_paths_glob_patterns` option allows you to exclude Kubernetes log files by the file name and path. + * The `include_paths_glob_patterns` option defaults to `include all` and is + evaluated before the `exclude_paths_glob_patterns` option. * The `extra_field_selector` option specifies the field selector to filter Pods with, to be used in addition to the built-in Node filter. * The `extra_label_selector` option specifies the label selector to From 9a0a5e4784bf80af8be7a7e8cfa8516a70d39704 Mon Sep 17 00:00:00 2001 From: Sebastian Tia <75666019+sebastiantia@users.noreply.github.com> Date: Thu, 15 Feb 2024 18:16:40 -0500 Subject: [PATCH 0025/1491] enhancement(http sink): Update HttpRequest struct to pass additional metadata (#19780) Updates the existing HttpService to permit passing arbitrary metadata to build in the HTTP request builder (ref). This allows for template-able headers to be configured with sinks that take advantage of the generic service implementation for HTTP stream sinks. This PR includes refactoring the Clickhouse sink to use this updated HttpService as an example. --- Cargo.toml | 2 +- .../clickhouse_acknowledgements.breaking.md | 1 + src/sinks/clickhouse/config.rs | 56 ++++-- src/sinks/clickhouse/integration_tests.rs | 2 +- src/sinks/clickhouse/mod.rs | 1 + src/sinks/clickhouse/request_builder.rs | 58 ++++++ src/sinks/clickhouse/service.rs | 174 +++++------------- src/sinks/clickhouse/sink.rs | 140 +++++--------- .../gcp/stackdriver/logs/request_builder.rs | 4 +- src/sinks/gcp/stackdriver/logs/service.rs | 17 +- src/sinks/gcp/stackdriver/logs/sink.rs | 4 +- src/sinks/gcp/stackdriver/logs/tests.rs | 14 +- src/sinks/gcp/stackdriver/metrics/config.rs | 10 +- .../stackdriver/metrics/request_builder.rs | 4 +- src/sinks/gcp/stackdriver/metrics/sink.rs | 4 +- src/sinks/honeycomb/request_builder.rs | 4 +- src/sinks/honeycomb/service.rs | 12 +- src/sinks/honeycomb/sink.rs | 4 +- src/sinks/http/request_builder.rs | 4 +- src/sinks/http/service.rs | 11 +- src/sinks/http/sink.rs | 4 +- src/sinks/util/http.rs | 43 +++-- 22 files changed, 271 insertions(+), 302 deletions(-) create mode 100644 changelog.d/clickhouse_acknowledgements.breaking.md create mode 100644 src/sinks/clickhouse/request_builder.rs diff --git a/Cargo.toml b/Cargo.toml index 7e84f9cd3e5b4..6210c2b4d1947 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,7 @@ default-run = "vector" autobenches = false # our benchmarks are not runnable on their own either way # Minimum supported rust version # See docs/DEVELOPING.md for policy -rust-version = "1.71.1" +rust-version = "1.74" [[bin]] name = "vector" diff --git a/changelog.d/clickhouse_acknowledgements.breaking.md b/changelog.d/clickhouse_acknowledgements.breaking.md new file mode 100644 index 0000000000000..5171179e20582 --- /dev/null +++ b/changelog.d/clickhouse_acknowledgements.breaking.md @@ -0,0 +1 @@ +When end-to-end acknowledgments are enabled, outgoing requests in the ClickHouse sink that encounter 500-level errors will now correctly report an errored (retriable) status, rather than a rejected (permanent) status, to Vector's clients. diff --git a/src/sinks/clickhouse/config.rs b/src/sinks/clickhouse/config.rs index 7330702fcf3a4..ed6763baa7935 100644 --- a/src/sinks/clickhouse/config.rs +++ b/src/sinks/clickhouse/config.rs @@ -1,18 +1,21 @@ -use http::{Request, StatusCode, Uri}; -use hyper::Body; -use std::fmt; +//! Configuration for the `Clickhouse` sink. use super::{ - service::{ClickhouseRetryLogic, ClickhouseService}, - sink::ClickhouseSink, + request_builder::ClickhouseRequestBuilder, + service::{ClickhouseRetryLogic, ClickhouseServiceRequestBuilder}, + sink::{ClickhouseSink, PartitionKey}, }; use crate::{ - http::{get_http_scheme_from_uri, Auth, HttpClient, MaybeAuth}, + http::{Auth, HttpClient, MaybeAuth}, sinks::{ prelude::*, - util::{RealtimeSizeBasedDefaultBatchSettings, UriSerde}, + util::{http::HttpService, RealtimeSizeBasedDefaultBatchSettings, UriSerde}, }, }; +use http::{Request, StatusCode, Uri}; +use hyper::Body; +use std::fmt; +use vector_lib::codecs::{encoding::Framer, JsonSerializerConfig, NewlineDelimitedEncoderConfig}; /// Data format. /// @@ -20,7 +23,7 @@ use crate::{ /// /// [formats]: https://clickhouse.com/docs/en/interfaces/formats #[configurable_component] -#[derive(Clone, Copy, Debug, Derivative, Eq, PartialEq)] +#[derive(Clone, Copy, Debug, Derivative, Eq, PartialEq, Hash)] #[serde(rename_all = "snake_case")] #[derivative(Default)] #[allow(clippy::enum_variant_names)] @@ -114,41 +117,56 @@ impl_generate_config_from_default!(ClickhouseConfig); impl SinkConfig for ClickhouseConfig { async fn build(&self, cx: SinkContext) -> crate::Result<(VectorSink, Healthcheck)> { let endpoint = self.endpoint.with_default_parts().uri; - let protocol = get_http_scheme_from_uri(&endpoint); let auth = self.auth.choose_one(&self.endpoint.auth)?; let tls_settings = TlsSettings::from_options(&self.tls)?; + let client = HttpClient::new(tls_settings, &cx.proxy)?; - let service = ClickhouseService::new( - client.clone(), - auth.clone(), - endpoint.clone(), - self.skip_unknown_fields, - self.date_time_best_effort, - ); + let clickhouse_service_request_builder = ClickhouseServiceRequestBuilder { + auth: auth.clone(), + endpoint: endpoint.clone(), + skip_unknown_fields: self.skip_unknown_fields, + date_time_best_effort: self.date_time_best_effort, + compression: self.compression, + }; + + let service: HttpService = + HttpService::new(client.clone(), clickhouse_service_request_builder); let request_limits = self.request.into_settings(); + let service = ServiceBuilder::new() .settings(request_limits, ClickhouseRetryLogic::default()) .service(service); let batch_settings = self.batch.into_batcher_settings()?; + let database = self.database.clone().unwrap_or_else(|| { "default" .try_into() .expect("'default' should be a valid template") }); + + let request_builder = ClickhouseRequestBuilder { + compression: self.compression, + encoding: ( + self.encoding.clone(), + Encoder::::new( + NewlineDelimitedEncoderConfig.build().into(), + JsonSerializerConfig::default().build().into(), + ), + ), + }; + let sink = ClickhouseSink::new( batch_settings, - self.compression, - self.encoding.clone(), service, - protocol, database, self.table.clone(), self.format, + request_builder, ); let healthcheck = Box::pin(healthcheck(client, endpoint, auth)); diff --git a/src/sinks/clickhouse/integration_tests.rs b/src/sinks/clickhouse/integration_tests.rs index 79a6bc9c1985f..703eb5fdc5b00 100644 --- a/src/sinks/clickhouse/integration_tests.rs +++ b/src/sinks/clickhouse/integration_tests.rs @@ -324,7 +324,7 @@ async fn no_retry_on_incorrect_data_warp() { .unwrap() .unwrap(); - assert_eq!(receiver.try_recv(), Ok(BatchStatus::Rejected)); + assert_eq!(receiver.try_recv(), Ok(BatchStatus::Errored)); } #[tokio::test] diff --git a/src/sinks/clickhouse/mod.rs b/src/sinks/clickhouse/mod.rs index b03eb89417691..b107b47356635 100644 --- a/src/sinks/clickhouse/mod.rs +++ b/src/sinks/clickhouse/mod.rs @@ -12,6 +12,7 @@ mod config; #[cfg(all(test, feature = "clickhouse-integration-tests"))] mod integration_tests; +mod request_builder; mod service; mod sink; pub use self::config::ClickhouseConfig; diff --git a/src/sinks/clickhouse/request_builder.rs b/src/sinks/clickhouse/request_builder.rs new file mode 100644 index 0000000000000..3b391d2bdaf2d --- /dev/null +++ b/src/sinks/clickhouse/request_builder.rs @@ -0,0 +1,58 @@ +//! `RequestBuilder` implementation for the `Clickhouse` sink. + +use super::sink::PartitionKey; +use crate::sinks::{prelude::*, util::http::HttpRequest}; +use bytes::Bytes; +use vector_lib::codecs::encoding::Framer; + +pub(super) struct ClickhouseRequestBuilder { + pub(super) compression: Compression, + pub(super) encoding: (Transformer, Encoder), +} + +impl RequestBuilder<(PartitionKey, Vec)> for ClickhouseRequestBuilder { + type Metadata = (PartitionKey, EventFinalizers); + type Events = Vec; + type Encoder = (Transformer, Encoder); + type Payload = Bytes; + type Request = HttpRequest; + type Error = std::io::Error; + + fn compression(&self) -> Compression { + self.compression + } + + fn encoder(&self) -> &Self::Encoder { + &self.encoding + } + + fn split_input( + &self, + input: (PartitionKey, Vec), + ) -> (Self::Metadata, RequestMetadataBuilder, Self::Events) { + let (key, mut events) = input; + + let finalizers = events.take_finalizers(); + let builder = RequestMetadataBuilder::from_events(&events); + ((key, finalizers), builder, events) + } + + fn build_request( + &self, + metadata: Self::Metadata, + request_metadata: RequestMetadata, + payload: EncodeResult, + ) -> Self::Request { + let (key, finalizers) = metadata; + HttpRequest::new( + payload.into_payload(), + finalizers, + request_metadata, + PartitionKey { + database: key.database, + table: key.table, + format: key.format, + }, + ) + } +} diff --git a/src/sinks/clickhouse/service.rs b/src/sinks/clickhouse/service.rs index f740e64191082..66180dee0582a 100644 --- a/src/sinks/clickhouse/service.rs +++ b/src/sinks/clickhouse/service.rs @@ -1,72 +1,24 @@ -use bytes::Bytes; -use http::{ - header::{CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TYPE}, - Request, Response, StatusCode, Uri, -}; -use hyper::{body, Body}; -use snafu::ResultExt; -use std::task::{Context, Poll}; -use tracing::Instrument; +//! Service implementation for the `Clickhouse` sink. +use super::sink::PartitionKey; use crate::{ - http::{Auth, HttpClient, HttpError}, + http::{Auth, HttpError}, sinks::{ clickhouse::config::Format, prelude::*, - util::{http::HttpRetryLogic, retries::RetryAction}, + util::{ + http::{HttpRequest, HttpResponse, HttpRetryLogic, HttpServiceRequestBuilder}, + retries::RetryAction, + }, UriParseSnafu, }, }; - -#[derive(Debug, Clone)] -pub struct ClickhouseRequest { - pub database: String, - pub table: String, - pub format: Format, - pub body: Bytes, - pub compression: Compression, - pub finalizers: EventFinalizers, - pub metadata: RequestMetadata, -} - -impl MetaDescriptive for ClickhouseRequest { - fn get_metadata(&self) -> &RequestMetadata { - &self.metadata - } - - fn metadata_mut(&mut self) -> &mut RequestMetadata { - &mut self.metadata - } -} - -impl Finalizable for ClickhouseRequest { - fn take_finalizers(&mut self) -> EventFinalizers { - self.finalizers.take_finalizers() - } -} - -pub struct ClickhouseResponse { - http_response: Response, - events_byte_size: GroupedCountByteSize, - raw_byte_size: usize, -} - -impl DriverResponse for ClickhouseResponse { - fn event_status(&self) -> EventStatus { - match self.http_response.status().is_success() { - true => EventStatus::Delivered, - false => EventStatus::Rejected, - } - } - - fn events_sent(&self) -> &GroupedCountByteSize { - &self.events_byte_size - } - - fn bytes_sent(&self) -> Option { - Some(self.raw_byte_size) - } -} +use bytes::Bytes; +use http::{ + header::{CONTENT_ENCODING, CONTENT_LENGTH, CONTENT_TYPE}, + Request, StatusCode, Uri, +}; +use snafu::ResultExt; #[derive(Debug, Default, Clone)] pub struct ClickhouseRetryLogic { @@ -75,7 +27,7 @@ pub struct ClickhouseRetryLogic { impl RetryLogic for ClickhouseRetryLogic { type Error = HttpError; - type Response = ClickhouseResponse; + type Response = HttpResponse; fn is_retriable_error(&self, error: &Self::Error) -> bool { self.inner.is_retriable_error(error) @@ -107,86 +59,46 @@ impl RetryLogic for ClickhouseRetryLogic { } } -/// `ClickhouseService` is a `Tower` service used to send logs to Clickhouse. #[derive(Debug, Clone)] -pub struct ClickhouseService { - client: HttpClient, - auth: Option, - endpoint: Uri, - skip_unknown_fields: bool, - date_time_best_effort: bool, -} - -impl ClickhouseService { - /// Creates a new `ClickhouseService`. - pub const fn new( - client: HttpClient, - auth: Option, - endpoint: Uri, - skip_unknown_fields: bool, - date_time_best_effort: bool, - ) -> Self { - Self { - client, - auth, - endpoint, - skip_unknown_fields, - date_time_best_effort, - } - } +pub(super) struct ClickhouseServiceRequestBuilder { + pub(super) auth: Option, + pub(super) endpoint: Uri, + pub(super) skip_unknown_fields: bool, + pub(super) date_time_best_effort: bool, + pub(super) compression: Compression, } -impl Service for ClickhouseService { - type Response = ClickhouseResponse; - type Error = crate::Error; - type Future = BoxFuture<'static, Result>; - - // Emission of Error internal event is handled upstream by the caller. - fn poll_ready(&mut self, _cx: &mut Context) -> Poll> { - Poll::Ready(Ok(())) - } - - // Emission of Error internal event is handled upstream by the caller. - fn call(&mut self, request: ClickhouseRequest) -> Self::Future { - let mut client = self.client.clone(); - let auth = self.auth.clone(); +impl HttpServiceRequestBuilder for ClickhouseServiceRequestBuilder { + fn build(&self, mut request: HttpRequest) -> Request { + let metadata = request.get_additional_metadata(); - // Build the URI outside of the boxed future to avoid unnecessary clones. let uri = set_uri_query( &self.endpoint, - &request.database, - &request.table, - request.format, + &metadata.database, + &metadata.table, + metadata.format, self.skip_unknown_fields, self.date_time_best_effort, - ); + ) + .expect("building uri failed unexpectedly"); - Box::pin(async move { - let mut builder = Request::post(&uri?) - .header(CONTENT_TYPE, "application/x-ndjson") - .header(CONTENT_LENGTH, request.body.len()); - if let Some(ce) = request.compression.content_encoding() { - builder = builder.header(CONTENT_ENCODING, ce); - } - if let Some(auth) = auth { - builder = auth.apply_builder(builder); - } + let auth: Option = self.auth.clone(); + + let payload = request.take_payload(); - let http_request = builder - .body(Body::from(request.body)) - .expect("building HTTP request failed unexpectedly"); + let mut builder = Request::post(&uri) + .header(CONTENT_TYPE, "application/x-ndjson") + .header(CONTENT_LENGTH, payload.len()); + if let Some(ce) = self.compression.content_encoding() { + builder = builder.header(CONTENT_ENCODING, ce); + } + if let Some(auth) = auth { + builder = auth.apply_builder(builder); + } - let response = client.call(http_request).in_current_span().await?; - let (parts, body) = response.into_parts(); - let body = body::to_bytes(body).await?; - Ok(ClickhouseResponse { - http_response: hyper::Response::from_parts(parts, body), - raw_byte_size: request.metadata.request_encoded_size(), - events_byte_size: request - .metadata - .into_events_estimated_json_encoded_byte_size(), - }) - }) + builder + .body(payload) + .expect("building HTTP request failed unexpectedly") } } diff --git a/src/sinks/clickhouse/sink.rs b/src/sinks/clickhouse/sink.rs index a34a86bdaee42..6e30172ba5a7d 100644 --- a/src/sinks/clickhouse/sink.rs +++ b/src/sinks/clickhouse/sink.rs @@ -1,49 +1,39 @@ -use bytes::Bytes; -use vector_lib::codecs::{encoding::Framer, JsonSerializerConfig, NewlineDelimitedEncoderConfig}; +//! Implementation of the `clickhouse` sink. -use super::service::{ClickhouseRequest, ClickhouseRetryLogic, ClickhouseService}; -use crate::sinks::prelude::*; +use super::{config::Format, request_builder::ClickhouseRequestBuilder}; +use crate::sinks::{prelude::*, util::http::HttpRequest}; -use crate::sinks::clickhouse::config::Format; - -pub struct ClickhouseSink { +pub struct ClickhouseSink { batch_settings: BatcherSettings, - compression: Compression, - encoding: (Transformer, Encoder), - service: Svc, - protocol: &'static str, + service: S, database: Template, table: Template, format: Format, + request_builder: ClickhouseRequestBuilder, } -impl ClickhouseSink { - #[allow(clippy::too_many_arguments)] - pub fn new( +impl ClickhouseSink +where + S: Service> + Send + 'static, + S::Future: Send + 'static, + S::Response: DriverResponse + Send + 'static, + S::Error: std::fmt::Debug + Into + Send, +{ + pub const fn new( batch_settings: BatcherSettings, - compression: Compression, - transformer: Transformer, - service: Svc, - protocol: &'static str, + service: S, database: Template, table: Template, format: Format, + request_builder: ClickhouseRequestBuilder, ) -> Self { Self { batch_settings, - compression, - encoding: ( - transformer, - Encoder::::new( - NewlineDelimitedEncoderConfig.build().into(), - JsonSerializerConfig::default().build().into(), - ), - ), service, - protocol, database, table, format, + request_builder, } } @@ -51,17 +41,14 @@ impl ClickhouseSink { let batch_settings = self.batch_settings; input - .batched_partitioned(KeyPartitioner::new(self.database, self.table), || { - batch_settings.as_byte_size_config() - }) + .batched_partitioned( + KeyPartitioner::new(self.database, self.table, self.format), + || batch_settings.as_byte_size_config(), + ) .filter_map(|(key, batch)| async move { key.map(move |k| (k, batch)) }) .request_builder( default_request_builder_concurrency_limit(), - ClickhouseRequestBuilder { - compression: self.compression, - encoding: self.encoding, - format: self.format, - }, + self.request_builder, ) .filter_map(|request| async { match request { @@ -73,14 +60,19 @@ impl ClickhouseSink { } }) .into_driver(self.service) - .protocol(self.protocol) .run() .await } } #[async_trait::async_trait] -impl StreamSink for ClickhouseSink { +impl StreamSink for ClickhouseSink +where + S: Service> + Send + 'static, + S::Future: Send + 'static, + S::Response: DriverResponse + Send + 'static, + S::Error: std::fmt::Debug + Into + Send, +{ async fn run( self: Box, input: futures_util::stream::BoxStream<'_, Event>, @@ -89,74 +81,28 @@ impl StreamSink for ClickhouseSink { } } -struct ClickhouseRequestBuilder { - compression: Compression, - encoding: (Transformer, Encoder), - format: Format, -} - -impl RequestBuilder<(PartitionKey, Vec)> for ClickhouseRequestBuilder { - type Metadata = (PartitionKey, EventFinalizers); - type Events = Vec; - type Encoder = (Transformer, Encoder); - type Payload = Bytes; - type Request = ClickhouseRequest; - type Error = std::io::Error; - - fn compression(&self) -> Compression { - self.compression - } - - fn encoder(&self) -> &Self::Encoder { - &self.encoding - } - - fn split_input( - &self, - input: (PartitionKey, Vec), - ) -> (Self::Metadata, RequestMetadataBuilder, Self::Events) { - let (key, mut events) = input; - - let finalizers = events.take_finalizers(); - let builder = RequestMetadataBuilder::from_events(&events); - ((key, finalizers), builder, events) - } - - fn build_request( - &self, - metadata: Self::Metadata, - request_metadata: RequestMetadata, - payload: EncodeResult, - ) -> Self::Request { - let (key, finalizers) = metadata; - ClickhouseRequest { - database: key.database, - table: key.table, - format: self.format, - body: payload.into_payload(), - compression: self.compression, - finalizers, - metadata: request_metadata, - } - } -} - /// PartitionKey used to partition events by (database, table) pair. #[derive(Hash, Eq, PartialEq, Clone, Debug)] -struct PartitionKey { - database: String, - table: String, +pub(super) struct PartitionKey { + pub database: String, + pub table: String, + pub format: Format, } /// KeyPartitioner that partitions events by (database, table) pair. struct KeyPartitioner { database: Template, table: Template, + format: Format, } impl KeyPartitioner { - const fn new(database: Template, table: Template) -> Self { - Self { database, table } + const fn new(database: Template, table: Template, format: Format) -> Self { + Self { + database, + table, + format, + } } fn render(template: &Template, item: &Event, field: &'static str) -> Option { @@ -180,6 +126,10 @@ impl Partitioner for KeyPartitioner { fn partition(&self, item: &Self::Item) -> Self::Key { let database = Self::render(&self.database, item, "database_key")?; let table = Self::render(&self.table, item, "table_key")?; - Some(PartitionKey { database, table }) + Some(PartitionKey { + database, + table, + format: self.format, + }) } } diff --git a/src/sinks/gcp/stackdriver/logs/request_builder.rs b/src/sinks/gcp/stackdriver/logs/request_builder.rs index 25de168cbe3ac..d46d43da3cd6d 100644 --- a/src/sinks/gcp/stackdriver/logs/request_builder.rs +++ b/src/sinks/gcp/stackdriver/logs/request_builder.rs @@ -16,7 +16,7 @@ impl RequestBuilder> for StackdriverLogsRequestBuilder { type Events = Vec; type Encoder = StackdriverLogsEncoder; type Payload = Bytes; - type Request = HttpRequest; + type Request = HttpRequest<()>; type Error = io::Error; fn compression(&self) -> Compression { @@ -42,6 +42,6 @@ impl RequestBuilder> for StackdriverLogsRequestBuilder { request_metadata: RequestMetadata, payload: EncodeResult, ) -> Self::Request { - HttpRequest::new(payload.into_payload(), metadata, request_metadata) + HttpRequest::new(payload.into_payload(), metadata, request_metadata, ()) } } diff --git a/src/sinks/gcp/stackdriver/logs/service.rs b/src/sinks/gcp/stackdriver/logs/service.rs index ee0a3d86e223f..45114d8dcd9a2 100644 --- a/src/sinks/gcp/stackdriver/logs/service.rs +++ b/src/sinks/gcp/stackdriver/logs/service.rs @@ -3,7 +3,10 @@ use bytes::Bytes; use http::{Request, Uri}; -use crate::{gcp::GcpAuthenticator, sinks::util::http::HttpServiceRequestBuilder}; +use crate::{ + gcp::GcpAuthenticator, + sinks::util::http::{HttpRequest, HttpServiceRequestBuilder}, +}; #[derive(Debug, Clone)] pub(super) struct StackdriverLogsServiceRequestBuilder { @@ -11,15 +14,15 @@ pub(super) struct StackdriverLogsServiceRequestBuilder { pub(super) auth: GcpAuthenticator, } -impl HttpServiceRequestBuilder for StackdriverLogsServiceRequestBuilder { - fn build(&self, body: Bytes) -> Request { - let mut request = Request::post(self.uri.clone()) +impl HttpServiceRequestBuilder<()> for StackdriverLogsServiceRequestBuilder { + fn build(&self, mut request: HttpRequest<()>) -> Request { + let mut builder = Request::post(self.uri.clone()) .header("Content-Type", "application/json") - .body(body) + .body(request.take_payload()) .unwrap(); - self.auth.apply(&mut request); + self.auth.apply(&mut builder); - request + builder } } diff --git a/src/sinks/gcp/stackdriver/logs/sink.rs b/src/sinks/gcp/stackdriver/logs/sink.rs index fbf3e2066d420..21d23037b620d 100644 --- a/src/sinks/gcp/stackdriver/logs/sink.rs +++ b/src/sinks/gcp/stackdriver/logs/sink.rs @@ -15,7 +15,7 @@ pub(super) struct StackdriverLogsSink { impl StackdriverLogsSink where - S: Service + Send + 'static, + S: Service> + Send + 'static, S::Future: Send + 'static, S::Response: DriverResponse + Send + 'static, S::Error: std::fmt::Debug + Into + Send, @@ -63,7 +63,7 @@ where #[async_trait::async_trait] impl StreamSink for StackdriverLogsSink where - S: Service + Send + 'static, + S: Service> + Send + 'static, S::Future: Send + 'static, S::Response: DriverResponse + Send + 'static, S::Error: std::fmt::Debug + Into + Send, diff --git a/src/sinks/gcp/stackdriver/logs/tests.rs b/src/sinks/gcp/stackdriver/logs/tests.rs index dee3db2dfd610..3e736baf54a84 100644 --- a/src/sinks/gcp/stackdriver/logs/tests.rs +++ b/src/sinks/gcp/stackdriver/logs/tests.rs @@ -19,7 +19,10 @@ use crate::{ service::StackdriverLogsServiceRequestBuilder, }, prelude::*, - util::{encoding::Encoder as _, http::HttpServiceRequestBuilder}, + util::{ + encoding::Encoder as _, + http::{HttpRequest, HttpServiceRequestBuilder}, + }, }, test_util::{ components::{run_and_assert_sink_compliance, HTTP_SINK_TAGS}, @@ -220,7 +223,14 @@ async fn correct_request() { auth: GcpAuthenticator::None, }; - let request = stackdriver_logs_service_request_builder.build(body); + let http_request = HttpRequest::new( + body, + EventFinalizers::default(), + RequestMetadata::default(), + (), + ); + + let request = stackdriver_logs_service_request_builder.build(http_request); let (parts, body) = request.into_parts(); let json: serde_json::Value = serde_json::from_slice(&body[..]).unwrap(); diff --git a/src/sinks/gcp/stackdriver/metrics/config.rs b/src/sinks/gcp/stackdriver/metrics/config.rs index 40f9a926b2098..28f7bf5862566 100644 --- a/src/sinks/gcp/stackdriver/metrics/config.rs +++ b/src/sinks/gcp/stackdriver/metrics/config.rs @@ -9,7 +9,9 @@ use crate::{ gcp, prelude::*, util::{ - http::{http_response_retry_logic, HttpService, HttpServiceRequestBuilder}, + http::{ + http_response_retry_logic, HttpRequest, HttpService, HttpServiceRequestBuilder, + }, service::TowerRequestConfigDefaults, }, }, @@ -156,11 +158,11 @@ pub(super) struct StackdriverMetricsServiceRequestBuilder { pub(super) auth: GcpAuthenticator, } -impl HttpServiceRequestBuilder for StackdriverMetricsServiceRequestBuilder { - fn build(&self, body: Bytes) -> Request { +impl HttpServiceRequestBuilder<()> for StackdriverMetricsServiceRequestBuilder { + fn build(&self, mut request: HttpRequest<()>) -> Request { let mut request = Request::post(self.uri.clone()) .header("Content-Type", "application/json") - .body(body) + .body(request.take_payload()) .unwrap(); self.auth.apply(&mut request); diff --git a/src/sinks/gcp/stackdriver/metrics/request_builder.rs b/src/sinks/gcp/stackdriver/metrics/request_builder.rs index ababa8c598cc5..c1db0e0319a5f 100644 --- a/src/sinks/gcp/stackdriver/metrics/request_builder.rs +++ b/src/sinks/gcp/stackdriver/metrics/request_builder.rs @@ -16,7 +16,7 @@ impl RequestBuilder> for StackdriverMetricsRequestBuilder { type Events = Vec; type Encoder = StackdriverMetricsEncoder; type Payload = Bytes; - type Request = HttpRequest; + type Request = HttpRequest<()>; type Error = io::Error; fn compression(&self) -> Compression { @@ -42,7 +42,7 @@ impl RequestBuilder> for StackdriverMetricsRequestBuilder { request_metadata: RequestMetadata, payload: EncodeResult, ) -> Self::Request { - HttpRequest::new(payload.into_payload(), metadata, request_metadata) + HttpRequest::new(payload.into_payload(), metadata, request_metadata, ()) } } diff --git a/src/sinks/gcp/stackdriver/metrics/sink.rs b/src/sinks/gcp/stackdriver/metrics/sink.rs index e87e69dcbe1b7..6eaf30898b4c3 100644 --- a/src/sinks/gcp/stackdriver/metrics/sink.rs +++ b/src/sinks/gcp/stackdriver/metrics/sink.rs @@ -32,7 +32,7 @@ pub(super) struct StackdriverMetricsSink { impl StackdriverMetricsSink where - S: Service + Send + 'static, + S: Service> + Send + 'static, S::Future: Send + 'static, S::Response: DriverResponse + Send + 'static, S::Error: std::fmt::Debug + Into + Send, @@ -89,7 +89,7 @@ where #[async_trait::async_trait] impl StreamSink for StackdriverMetricsSink where - S: Service + Send + 'static, + S: Service> + Send + 'static, S::Future: Send + 'static, S::Response: DriverResponse + Send + 'static, S::Error: std::fmt::Debug + Into + Send, diff --git a/src/sinks/honeycomb/request_builder.rs b/src/sinks/honeycomb/request_builder.rs index a84c9dec2ba4b..ba6a84f402039 100644 --- a/src/sinks/honeycomb/request_builder.rs +++ b/src/sinks/honeycomb/request_builder.rs @@ -16,7 +16,7 @@ impl RequestBuilder> for HoneycombRequestBuilder { type Events = Vec; type Encoder = HoneycombEncoder; type Payload = Bytes; - type Request = HttpRequest; + type Request = HttpRequest<()>; type Error = io::Error; fn compression(&self) -> Compression { @@ -42,6 +42,6 @@ impl RequestBuilder> for HoneycombRequestBuilder { request_metadata: RequestMetadata, payload: EncodeResult, ) -> Self::Request { - HttpRequest::new(payload.into_payload(), metadata, request_metadata) + HttpRequest::new(payload.into_payload(), metadata, request_metadata, ()) } } diff --git a/src/sinks/honeycomb/service.rs b/src/sinks/honeycomb/service.rs index 3e97c981382b6..8fce8e186e746 100644 --- a/src/sinks/honeycomb/service.rs +++ b/src/sinks/honeycomb/service.rs @@ -4,7 +4,7 @@ use bytes::Bytes; use http::{Request, Uri}; use vector_lib::sensitive_string::SensitiveString; -use crate::sinks::util::http::HttpServiceRequestBuilder; +use crate::sinks::util::http::{HttpRequest, HttpServiceRequestBuilder}; use super::config::HTTP_HEADER_HONEYCOMB; @@ -14,12 +14,12 @@ pub(super) struct HoneycombSvcRequestBuilder { pub(super) api_key: SensitiveString, } -impl HttpServiceRequestBuilder for HoneycombSvcRequestBuilder { - fn build(&self, body: Bytes) -> Request { - let request = Request::post(&self.uri).header(HTTP_HEADER_HONEYCOMB, self.api_key.inner()); +impl HttpServiceRequestBuilder<()> for HoneycombSvcRequestBuilder { + fn build(&self, mut request: HttpRequest<()>) -> Request { + let builder = Request::post(&self.uri).header(HTTP_HEADER_HONEYCOMB, self.api_key.inner()); - request - .body(body) + builder + .body(request.take_payload()) .expect("Failed to assign body to request- builder has errors") } } diff --git a/src/sinks/honeycomb/sink.rs b/src/sinks/honeycomb/sink.rs index fe2bf4cdfc598..2ccc6ac41323f 100644 --- a/src/sinks/honeycomb/sink.rs +++ b/src/sinks/honeycomb/sink.rs @@ -15,7 +15,7 @@ pub(super) struct HoneycombSink { impl HoneycombSink where - S: Service + Send + 'static, + S: Service> + Send + 'static, S::Future: Send + 'static, S::Response: DriverResponse + Send + 'static, S::Error: std::fmt::Debug + Into + Send, @@ -63,7 +63,7 @@ where #[async_trait::async_trait] impl StreamSink for HoneycombSink where - S: Service + Send + 'static, + S: Service> + Send + 'static, S::Future: Send + 'static, S::Response: DriverResponse + Send + 'static, S::Error: std::fmt::Debug + Into + Send, diff --git a/src/sinks/http/request_builder.rs b/src/sinks/http/request_builder.rs index 7c102a4dd9f9a..0b08ef7677c9b 100644 --- a/src/sinks/http/request_builder.rs +++ b/src/sinks/http/request_builder.rs @@ -17,7 +17,7 @@ impl RequestBuilder> for HttpRequestBuilder { type Events = Vec; type Encoder = HttpEncoder; type Payload = Bytes; - type Request = HttpRequest; + type Request = HttpRequest<()>; type Error = io::Error; fn compression(&self) -> Compression { @@ -43,6 +43,6 @@ impl RequestBuilder> for HttpRequestBuilder { request_metadata: RequestMetadata, payload: EncodeResult, ) -> Self::Request { - HttpRequest::new(payload.into_payload(), metadata, request_metadata) + HttpRequest::new(payload.into_payload(), metadata, request_metadata, ()) } } diff --git a/src/sinks/http/service.rs b/src/sinks/http/service.rs index 8a096ebe6f3f4..dffbbcac498e2 100644 --- a/src/sinks/http/service.rs +++ b/src/sinks/http/service.rs @@ -6,7 +6,10 @@ use indexmap::IndexMap; use crate::{ http::Auth, - sinks::util::{http::HttpServiceRequestBuilder, UriSerde}, + sinks::util::{ + http::{HttpRequest, HttpServiceRequestBuilder}, + UriSerde, + }, }; use super::config::HttpMethod; @@ -42,8 +45,8 @@ impl HttpSinkRequestBuilder { } } -impl HttpServiceRequestBuilder for HttpSinkRequestBuilder { - fn build(&self, body: Bytes) -> Request { +impl HttpServiceRequestBuilder<()> for HttpSinkRequestBuilder { + fn build(&self, mut request: HttpRequest<()>) -> Request { let method: Method = self.method.into(); let uri: Uri = self.uri.uri.clone(); let mut builder = Request::builder().method(method).uri(uri); @@ -67,7 +70,7 @@ impl HttpServiceRequestBuilder for HttpSinkRequestBuilder { // The request building should not have errors at this point let mut request = builder - .body(body) + .body(request.take_payload()) .expect("Failed to assign body to request- builder has errors"); if let Some(auth) = &self.auth { diff --git a/src/sinks/http/sink.rs b/src/sinks/http/sink.rs index 575e558e2d013..79cea97c0dc27 100644 --- a/src/sinks/http/sink.rs +++ b/src/sinks/http/sink.rs @@ -12,7 +12,7 @@ pub(super) struct HttpSink { impl HttpSink where - S: Service + Send + 'static, + S: Service> + Send + 'static, S::Future: Send + 'static, S::Response: DriverResponse + Send + 'static, S::Error: std::fmt::Debug + Into + Send, @@ -62,7 +62,7 @@ where #[async_trait::async_trait] impl StreamSink for HttpSink where - S: Service + Send + 'static, + S: Service> + Send + 'static, S::Future: Send + 'static, S::Response: DriverResponse + Send + 'static, S::Error: std::fmt::Debug + Into + Send, diff --git a/src/sinks/util/http.rs b/src/sinks/util/http.rs index 168cc95d8151c..8ce74f6e98ebb 100644 --- a/src/sinks/util/http.rs +++ b/src/sinks/util/http.rs @@ -607,34 +607,45 @@ pub fn validate_headers( /// Request type for use in the `Service` implementation of HTTP stream sinks. #[derive(Clone)] -pub struct HttpRequest { +pub struct HttpRequest { payload: Bytes, finalizers: EventFinalizers, request_metadata: RequestMetadata, + additional_metadata: T, } -impl HttpRequest { +impl HttpRequest { /// Creates a new `HttpRequest`. pub fn new( payload: Bytes, finalizers: EventFinalizers, request_metadata: RequestMetadata, + additional_metadata: T, ) -> Self { Self { payload, finalizers, request_metadata, + additional_metadata, } } + + pub const fn get_additional_metadata(&self) -> &T { + &self.additional_metadata + } + + pub fn take_payload(&mut self) -> Bytes { + std::mem::take(&mut self.payload) + } } -impl Finalizable for HttpRequest { +impl Finalizable for HttpRequest { fn take_finalizers(&mut self) -> EventFinalizers { self.finalizers.take_finalizers() } } -impl MetaDescriptive for HttpRequest { +impl MetaDescriptive for HttpRequest { fn get_metadata(&self) -> &RequestMetadata { &self.request_metadata } @@ -644,7 +655,7 @@ impl MetaDescriptive for HttpRequest { } } -impl ByteSizeOf for HttpRequest { +impl ByteSizeOf for HttpRequest { fn allocated_bytes(&self) -> usize { self.payload.allocated_bytes() + self.finalizers.allocated_bytes() } @@ -696,30 +707,30 @@ impl ItemBatchSize for HttpJsonBatchSizer { } /// HTTP request builder for HTTP stream sinks using the generic `HttpService` -pub trait HttpServiceRequestBuilder { - fn build(&self, body: Bytes) -> Request; +pub trait HttpServiceRequestBuilder { + fn build(&self, request: HttpRequest) -> Request; } /// Generic 'Service' implementation for HTTP stream sinks. #[derive(Clone)] -pub struct HttpService { +pub struct HttpService { batch_service: - HttpBatchService, crate::Error>>, HttpRequest>, + HttpBatchService, crate::Error>>, HttpRequest>, _phantom: PhantomData, } -impl HttpService +impl HttpService where - B: HttpServiceRequestBuilder + std::marker::Sync + std::marker::Send + 'static, + B: HttpServiceRequestBuilder + std::marker::Sync + std::marker::Send + 'static, { pub fn new(http_client: HttpClient, http_request_builder: B) -> Self { let http_request_builder = Arc::new(http_request_builder); - let batch_service = HttpBatchService::new(http_client, move |req: HttpRequest| { + let batch_service = HttpBatchService::new(http_client, move |req: HttpRequest| { let request_builder = Arc::clone(&http_request_builder); let fut: BoxFuture<'static, Result, crate::Error>> = - Box::pin(async move { Ok(request_builder.build(req.payload)) }); + Box::pin(async move { Ok(request_builder.build(req)) }); fut }); @@ -730,9 +741,9 @@ where } } -impl Service for HttpService +impl Service> for HttpService where - B: HttpServiceRequestBuilder + std::marker::Sync + std::marker::Send + 'static, + B: HttpServiceRequestBuilder + std::marker::Sync + std::marker::Send + 'static, { type Response = HttpResponse; type Error = crate::Error; @@ -742,7 +753,7 @@ where Poll::Ready(Ok(())) } - fn call(&mut self, mut request: HttpRequest) -> Self::Future { + fn call(&mut self, mut request: HttpRequest) -> Self::Future { let mut http_service = self.batch_service.clone(); // NOTE: By taking the metadata here, when passing the request to `call()` below, From 448c9d19148c3707af54c7e2be90440de3a0316c Mon Sep 17 00:00:00 2001 From: Sebastian Tia <75666019+sebastiantia@users.noreply.github.com> Date: Thu, 15 Feb 2024 14:32:58 -0500 Subject: [PATCH 0026/1491] chore(deps): Bump MSRV from 1.71.1 to 1.74 (#19884) update From 2b0f06eb5de6dc008bd4c98e49ce82a5f0837942 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Feb 2024 14:52:33 +0000 Subject: [PATCH 0027/1491] chore(deps): Bump syn from 2.0.48 to 2.0.49 (#19890) Bumps [syn](https://github.com/dtolnay/syn) from 2.0.48 to 2.0.49. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/2.0.48...2.0.49) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 94 +++++++++++++++++++++++++++--------------------------- 1 file changed, 47 insertions(+), 47 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1bbfb0508cd72..e1f521bdbf2bb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "strum 0.25.0", - "syn 2.0.48", + "syn 2.0.49", "thiserror", ] @@ -651,7 +651,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -691,7 +691,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -708,7 +708,7 @@ checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -1466,7 +1466,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9990737a6d5740ff51cdbbc0f0503015cb30c390f6623968281eb214a520cfc0" dependencies = [ "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -1593,7 +1593,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", "syn_derive", ] @@ -2031,7 +2031,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -2559,7 +2559,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -2631,7 +2631,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "strsim 0.10.0", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -2664,7 +2664,7 @@ checksum = "c5a91391accf613803c2a9bf9abccdbaa07c54b4244a5b64883f9c3c137c86be" dependencies = [ "darling_core 0.20.6", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -2769,7 +2769,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3050,7 +3050,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3062,7 +3062,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3082,7 +3082,7 @@ checksum = "5c785274071b1b420972453b306eeca06acf4633829db4223b58a2a8c5953bc4" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -3484,7 +3484,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -5174,7 +5174,7 @@ checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -5294,7 +5294,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "regex", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -5743,7 +5743,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -5755,7 +5755,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -5943,7 +5943,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -6228,7 +6228,7 @@ dependencies = [ "pest_meta", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -6316,7 +6316,7 @@ checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -6593,7 +6593,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ "proc-macro2 1.0.78", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -6796,7 +6796,7 @@ dependencies = [ "prost 0.12.3", "prost-types 0.12.3", "regex", - "syn 2.0.48", + "syn 2.0.49", "tempfile", "which 4.4.2", ] @@ -6824,7 +6824,7 @@ dependencies = [ "itertools 0.11.0", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -7613,7 +7613,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.0", - "syn 2.0.48", + "syn 2.0.49", "unicode-ident", ] @@ -8008,7 +8008,7 @@ checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -8019,7 +8019,7 @@ checksum = "330f01ce65a3a5fe59a60c82f3c9a024b573b8a6e875bd233fe5f934e71d54e3" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -8081,7 +8081,7 @@ checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -8154,7 +8154,7 @@ dependencies = [ "darling 0.20.6", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -8433,7 +8433,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -8617,7 +8617,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "rustversion", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -8630,7 +8630,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "rustversion", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -8673,9 +8673,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.48" +version = "2.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f3531638e407dfc0814761abb7c00a5b54992b849452a0646b7f65c9f770f3f" +checksum = "915aea9e586f80826ee59f8453c1101f9d1c4b3964cd2460185ee8e299ada496" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", @@ -8691,7 +8691,7 @@ dependencies = [ "proc-macro-error", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -8859,7 +8859,7 @@ checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -9007,7 +9007,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -9243,7 +9243,7 @@ dependencies = [ "proc-macro2 1.0.78", "prost-build 0.12.3", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -9346,7 +9346,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -9580,7 +9580,7 @@ checksum = "f03ca4cb38206e2bef0700092660bb74d696f808514dae47fa1467cbfe26e96e" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -9610,7 +9610,7 @@ checksum = "291db8a81af4840c10d636e047cac67664e343be44e24dfdbd1492df9a5d3390" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] @@ -10168,7 +10168,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_json", - "syn 2.0.48", + "syn 2.0.49", "tracing 0.1.40", ] @@ -10181,7 +10181,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_derive_internals", - "syn 2.0.48", + "syn 2.0.49", "vector-config", "vector-config-common", ] @@ -10589,7 +10589,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", "wasm-bindgen-shared", ] @@ -10623,7 +10623,7 @@ checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11068,7 +11068,7 @@ checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.48", + "syn 2.0.49", ] [[package]] From 8223dca26efd790ec4fdbf5eb7626f2cc32d99a2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Feb 2024 15:24:15 +0000 Subject: [PATCH 0028/1491] chore(deps): Bump roaring from 0.10.2 to 0.10.3 (#19889) * chore(deps): Bump roaring from 0.10.2 to 0.10.3 Bumps [roaring](https://github.com/RoaringBitmap/roaring-rs) from 0.10.2 to 0.10.3. - [Release notes](https://github.com/RoaringBitmap/roaring-rs/releases) - [Commits](https://github.com/RoaringBitmap/roaring-rs/compare/v0.10.2...v0.10.3) --- updated-dependencies: - dependency-name: roaring dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * Update licenses Signed-off-by: Jesse Szwedko --------- Signed-off-by: dependabot[bot] Signed-off-by: Jesse Szwedko Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jesse Szwedko --- Cargo.lock | 15 ++------------- Cargo.toml | 2 +- LICENSE-3rdparty.csv | 2 -- 3 files changed, 3 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e1f521bdbf2bb..aad230852c509 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1668,12 +1668,6 @@ dependencies = [ "syn 1.0.109", ] -[[package]] -name = "bytemuck" -version = "1.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "374d28ec25809ee0e23827c2ab573d729e293f281dfe393500e7ad618baa61c6" - [[package]] name = "byteorder" version = "1.5.0" @@ -7553,14 +7547,9 @@ dependencies = [ [[package]] name = "roaring" -version = "0.10.2" +version = "0.10.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6106b5cf8587f5834158895e9715a3c6c9716c8aefab57f1f7680917191c7873" -dependencies = [ - "bytemuck", - "byteorder", - "retain_mut", -] +checksum = "a1c77081a55300e016cb86f2864415b7518741879db925b8d488a0ee0d2da6bf" [[package]] name = "roxmltree" diff --git a/Cargo.toml b/Cargo.toml index 6210c2b4d1947..3974c4c383458 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -312,7 +312,7 @@ rand_distr = { version = "0.4.3", default-features = false } rdkafka = { version = "0.35.0", default-features = false, features = ["tokio", "libz", "ssl", "zstd"], optional = true } redis = { version = "0.24.0", default-features = false, features = ["connection-manager", "tokio-comp", "tokio-native-tls-comp"], optional = true } regex = { version = "1.10.3", default-features = false, features = ["std", "perf"] } -roaring = { version = "0.10.2", default-features = false, optional = true } +roaring = { version = "0.10.3", default-features = false, optional = true } rumqttc = { version = "0.23.0", default-features = false, features = ["use-rustls"], optional = true } seahash = { version = "4.1.0", default-features = false } semver = { version = "1.0.21", default-features = false, features = ["serde", "std"], optional = true } diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 646bd38c122ad..98d18786c20b3 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -98,7 +98,6 @@ bson,https://github.com/mongodb/bson-rust,MIT,"Y. T. Chung , bstr,https://github.com/BurntSushi/bstr,MIT OR Apache-2.0,Andrew Gallant bumpalo,https://github.com/fitzgen/bumpalo,MIT OR Apache-2.0,Nick Fitzgerald bytecheck,https://github.com/djkoloski/bytecheck,MIT,David Koloski -bytemuck,https://github.com/Lokathor/bytemuck,Zlib OR Apache-2.0 OR MIT,Lokathor byteorder,https://github.com/BurntSushi/byteorder,Unlicense OR MIT,Andrew Gallant bytes,https://github.com/carllerche/bytes,MIT,Carl Lerche bytes,https://github.com/tokio-rs/bytes,MIT,"Carl Lerche , Sean McArthur " @@ -454,7 +453,6 @@ regex-syntax,https://github.com/rust-lang/regex/tree/master/regex-syntax,MIT OR rend,https://github.com/djkoloski/rend,MIT,David Koloski reqwest,https://github.com/seanmonstar/reqwest,MIT OR Apache-2.0,Sean McArthur resolv-conf,http://github.com/tailhook/resolv-conf,MIT OR Apache-2.0,paul@colomiets.name -retain_mut,https://github.com/upsuper/retain_mut,MIT,Xidorn Quan rfc6979,https://github.com/RustCrypto/signatures/tree/master/rfc6979,Apache-2.0 OR MIT,RustCrypto Developers ring,https://github.com/briansmith/ring,ISC AND Custom,Brian Smith rkyv,https://github.com/rkyv/rkyv,MIT,David Koloski From 788f0c30ee259d5e918be074d059085107bd69bc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 16 Feb 2024 15:46:52 +0000 Subject: [PATCH 0029/1491] chore(deps): Bump the aws group with 4 updates (#19888) Bumps the aws group with 4 updates: [aws-smithy-types](https://github.com/smithy-lang/smithy-rs), [aws-smithy-runtime-api](https://github.com/smithy-lang/smithy-rs), [aws-smithy-runtime](https://github.com/smithy-lang/smithy-rs) and [aws-smithy-async](https://github.com/smithy-lang/smithy-rs). Updates `aws-smithy-types` from 1.1.5 to 1.1.6 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-smithy-runtime-api` from 1.1.5 to 1.1.6 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-smithy-runtime` from 1.1.5 to 1.1.6 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-smithy-async` from 1.1.5 to 1.1.6 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) --- updated-dependencies: - dependency-name: aws-smithy-types dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-smithy-runtime-api dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-smithy-runtime dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-smithy-async dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 17 +++++++++-------- Cargo.toml | 8 ++++---- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index aad230852c509..9d325652205a9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1046,9 +1046,9 @@ dependencies = [ [[package]] name = "aws-smithy-async" -version = "1.1.5" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "426a5bc369ca7c8d3686439e46edc727f397a47ab3696b13f3ae8c81b3b36132" +checksum = "8ec441341e019c441aa78472ed6d206cfe198026c495277a95ac5bebda520742" dependencies = [ "futures-util", "pin-project-lite", @@ -1129,9 +1129,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.1.5" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4cb6b3afa5fc9825a75675975dcc3e21764b5476bc91dbc63df4ea3d30a576e" +checksum = "3b36f1f98c8d7b6256b86d4a3c8c4abb120670267baa9712a485ba477eaac9e9" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -1154,14 +1154,15 @@ dependencies = [ [[package]] name = "aws-smithy-runtime-api" -version = "1.1.5" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23165433e80c04e8c09cee66d171292ae7234bae05fa9d5636e33095eae416b2" +checksum = "180898ed701a773fb3fadbd94b9e9559125cf88eeb1815ab99e35d4f5f34f7fb" dependencies = [ "aws-smithy-async", "aws-smithy-types", "bytes 1.5.0", "http 0.2.9", + "http 1.0.0", "pin-project-lite", "tokio", "tracing 0.1.40", @@ -1169,9 +1170,9 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.1.5" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c94a5bec34850b92c9a054dad57b95c1d47f25125f55973e19f6ad788f0381ff" +checksum = "897f1db4020ad91f2c2421945ec49b7e3eb81cc3fea99e8b5dd5be721e697fed" dependencies = [ "base64-simd", "bytes 1.5.0", diff --git a/Cargo.toml b/Cargo.toml index 3974c4c383458..7536407311683 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -186,9 +186,9 @@ aws-sigv4 = { version = "1.1.5", default-features = false, features = ["sign-htt aws-config = { version = "1.0.1", default-features = false, features = ["behavior-version-latest"], optional = true } aws-credential-types = { version = "1.1.5", default-features = false, features = ["hardcoded-credentials"], optional = true } aws-smithy-http = { version = "0.60", default-features = false, features = ["event-stream"], optional = true } -aws-smithy-types = { version = "1.0.2", default-features = false, optional = true } -aws-smithy-runtime-api = { version = "1.1.3", default-features = false, optional = true } -aws-smithy-runtime = { version = "1.1.5", default-features = false, features = ["client", "connector-hyper-0-14-x", "rt-tokio"], optional = true } +aws-smithy-types = { version = "1.1.6", default-features = false, optional = true } +aws-smithy-runtime-api = { version = "1.1.6", default-features = false, optional = true } +aws-smithy-runtime = { version = "1.1.6", default-features = false, features = ["client", "connector-hyper-0-14-x", "rt-tokio"], optional = true } aws-smithy-async = { version = "1.0.2", default-features = false, features = ["rt-tokio"], optional = true } # Azure @@ -358,7 +358,7 @@ openssl-src = { version = "300", default-features = false, features = ["force-en [dev-dependencies] approx = "0.5.1" assert_cmd = { version = "2.0.13", default-features = false } -aws-smithy-runtime = { version = "1.1.5", default-features = false, features = ["tls-rustls"] } +aws-smithy-runtime = { version = "1.1.6", default-features = false, features = ["tls-rustls"] } azure_core = { version = "0.17", default-features = false, features = ["enable_reqwest", "azurite_workaround"] } azure_identity = { version = "0.17", default-features = false, features = ["enable_reqwest"] } azure_storage_blobs = { version = "0.17", default-features = false, features = ["azurite_workaround"] } From 5d8160d72743df1e02fff9f69a8d4e37e1f2577a Mon Sep 17 00:00:00 2001 From: Siavash Safi Date: Fri, 16 Feb 2024 17:32:26 +0100 Subject: [PATCH 0030/1491] enhancement(s3 sink): add express one zone storage class (#19893) enhancement(s3 sink): add express one zone class This change adds support for S3 Express One Zone Storage Class. Closes #19891 Signed-off-by: Siavash Safi --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- ..._s3_sink_add_express_onezone_storage_class.enhancement.md | 3 +++ src/sinks/s3_common/config.rs | 5 +++++ website/cue/reference/components/sinks/base/aws_s3.cue | 1 + 5 files changed, 12 insertions(+), 3 deletions(-) create mode 100644 changelog.d/19891_s3_sink_add_express_onezone_storage_class.enhancement.md diff --git a/Cargo.lock b/Cargo.lock index 9d325652205a9..4a7abbbf93cb3 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -924,9 +924,9 @@ dependencies = [ [[package]] name = "aws-sdk-s3" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "859a207781360445504b89e790aebf682d80883280aa0d9b6e2e67740a733147" +checksum = "9dcafc2fe52cc30b2d56685e2fa6a879ba50d79704594852112337a472ddbd24" dependencies = [ "aws-credential-types", "aws-http", diff --git a/Cargo.toml b/Cargo.toml index 7536407311683..3c9a429e397ce 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -173,7 +173,7 @@ metrics = "0.21.1" metrics-tracing-context = { version = "0.14.0", default-features = false } # AWS - Official SDK -aws-sdk-s3 = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } +aws-sdk-s3 = { version = "1.4.0", default-features = false, features = ["behavior-version-latest"], optional = true } aws-sdk-sqs = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } aws-sdk-sns = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } aws-sdk-cloudwatch = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } diff --git a/changelog.d/19891_s3_sink_add_express_onezone_storage_class.enhancement.md b/changelog.d/19891_s3_sink_add_express_onezone_storage_class.enhancement.md new file mode 100644 index 0000000000000..f27511bf2abe7 --- /dev/null +++ b/changelog.d/19891_s3_sink_add_express_onezone_storage_class.enhancement.md @@ -0,0 +1,3 @@ +A new `EXPRESS_ONEZONE` option was added to `storage_class` for `aws_s3` sink. + +authors: siavashs diff --git a/src/sinks/s3_common/config.rs b/src/sinks/s3_common/config.rs index dad9d8ddfca44..a8256d8654f39 100644 --- a/src/sinks/s3_common/config.rs +++ b/src/sinks/s3_common/config.rs @@ -158,6 +158,9 @@ pub enum S3StorageClass { /// Infrequently Accessed. StandardIa, + /// High Performance (single Availability zone). + ExpressOnezone, + /// Infrequently Accessed (single Availability zone). OnezoneIa, @@ -175,6 +178,7 @@ impl From for StorageClass { S3StorageClass::ReducedRedundancy => Self::ReducedRedundancy, S3StorageClass::IntelligentTiering => Self::IntelligentTiering, S3StorageClass::StandardIa => Self::StandardIa, + S3StorageClass::ExpressOnezone => Self::ExpressOnezone, S3StorageClass::OnezoneIa => Self::OnezoneIa, S3StorageClass::Glacier => Self::Glacier, S3StorageClass::DeepArchive => Self::DeepArchive, @@ -379,6 +383,7 @@ mod tests { ("DEEP_ARCHIVE", S3StorageClass::DeepArchive), ("GLACIER", S3StorageClass::Glacier), ("INTELLIGENT_TIERING", S3StorageClass::IntelligentTiering), + ("EXPRESS_ONEZONE", S3StorageClass::ExpressOnezone), ("ONEZONE_IA", S3StorageClass::OnezoneIa), ("REDUCED_REDUNDANCY", S3StorageClass::ReducedRedundancy), ("STANDARD", S3StorageClass::Standard), diff --git a/website/cue/reference/components/sinks/base/aws_s3.cue b/website/cue/reference/components/sinks/base/aws_s3.cue index 8b0b152d8b997..f8b7e29a7a15d 100644 --- a/website/cue/reference/components/sinks/base/aws_s3.cue +++ b/website/cue/reference/components/sinks/base/aws_s3.cue @@ -940,6 +940,7 @@ base: components: sinks: aws_s3: configuration: { default: "STANDARD" enum: { DEEP_ARCHIVE: "Glacier Deep Archive." + EXPRESS_ONEZONE: "High Performance (single Availability zone)." GLACIER: "Glacier Flexible Retrieval." INTELLIGENT_TIERING: "Intelligent Tiering." ONEZONE_IA: "Infrequently Accessed (single Availability zone)." From a798f681d392e761d3e1e185ca9d7e8075a892c5 Mon Sep 17 00:00:00 2001 From: Sebastian Tia <75666019+sebastiantia@users.noreply.github.com> Date: Fri, 16 Feb 2024 16:39:40 -0500 Subject: [PATCH 0031/1491] enhancement(http sink): update HTTP request builder to return error (#19886) * mvp * formatting * test fix * update * take ownership of payload * changelog * remove Option wrap * changelog * review * test fix * bump rust version * update build func and usages * merge * fix template rendering error metric * decided against sneaking in a fix, will put in separate PR * use statics --- src/sinks/clickhouse/service.rs | 13 +++++++---- src/sinks/gcp/stackdriver/logs/service.rs | 22 ++++++++++------- src/sinks/gcp/stackdriver/logs/tests.rs | 5 +++- src/sinks/gcp/stackdriver/metrics/config.rs | 25 +++++++++++--------- src/sinks/honeycomb/service.rs | 11 ++++++--- src/sinks/http/service.rs | 26 ++++++++++++++------- src/sinks/mod.rs | 2 ++ src/sinks/util/http.rs | 4 ++-- 8 files changed, 69 insertions(+), 39 deletions(-) diff --git a/src/sinks/clickhouse/service.rs b/src/sinks/clickhouse/service.rs index 66180dee0582a..f707c6fab5706 100644 --- a/src/sinks/clickhouse/service.rs +++ b/src/sinks/clickhouse/service.rs @@ -10,7 +10,7 @@ use crate::{ http::{HttpRequest, HttpResponse, HttpRetryLogic, HttpServiceRequestBuilder}, retries::RetryAction, }, - UriParseSnafu, + HTTPRequestBuilderSnafu, UriParseSnafu, }, }; use bytes::Bytes; @@ -69,7 +69,10 @@ pub(super) struct ClickhouseServiceRequestBuilder { } impl HttpServiceRequestBuilder for ClickhouseServiceRequestBuilder { - fn build(&self, mut request: HttpRequest) -> Request { + fn build( + &self, + mut request: HttpRequest, + ) -> Result, crate::Error> { let metadata = request.get_additional_metadata(); let uri = set_uri_query( @@ -79,8 +82,7 @@ impl HttpServiceRequestBuilder for ClickhouseServiceRequestBuilder metadata.format, self.skip_unknown_fields, self.date_time_best_effort, - ) - .expect("building uri failed unexpectedly"); + )?; let auth: Option = self.auth.clone(); @@ -98,7 +100,8 @@ impl HttpServiceRequestBuilder for ClickhouseServiceRequestBuilder builder .body(payload) - .expect("building HTTP request failed unexpectedly") + .context(HTTPRequestBuilderSnafu) + .map_err(Into::into) } } diff --git a/src/sinks/gcp/stackdriver/logs/service.rs b/src/sinks/gcp/stackdriver/logs/service.rs index 45114d8dcd9a2..9a08214fc1959 100644 --- a/src/sinks/gcp/stackdriver/logs/service.rs +++ b/src/sinks/gcp/stackdriver/logs/service.rs @@ -1,12 +1,16 @@ //! Service implementation for the `gcp_stackdriver_logs` sink. use bytes::Bytes; -use http::{Request, Uri}; +use http::{header::CONTENT_TYPE, Request, Uri}; use crate::{ gcp::GcpAuthenticator, - sinks::util::http::{HttpRequest, HttpServiceRequestBuilder}, + sinks::{ + util::http::{HttpRequest, HttpServiceRequestBuilder}, + HTTPRequestBuilderSnafu, + }, }; +use snafu::ResultExt; #[derive(Debug, Clone)] pub(super) struct StackdriverLogsServiceRequestBuilder { @@ -15,14 +19,16 @@ pub(super) struct StackdriverLogsServiceRequestBuilder { } impl HttpServiceRequestBuilder<()> for StackdriverLogsServiceRequestBuilder { - fn build(&self, mut request: HttpRequest<()>) -> Request { - let mut builder = Request::post(self.uri.clone()) - .header("Content-Type", "application/json") + fn build(&self, mut request: HttpRequest<()>) -> Result, crate::Error> { + let builder = Request::post(self.uri.clone()).header(CONTENT_TYPE, "application/json"); + + let mut request = builder .body(request.take_payload()) - .unwrap(); + .context(HTTPRequestBuilderSnafu) + .map_err(Into::::into)?; - self.auth.apply(&mut builder); + self.auth.apply(&mut request); - builder + Ok(request) } } diff --git a/src/sinks/gcp/stackdriver/logs/tests.rs b/src/sinks/gcp/stackdriver/logs/tests.rs index 3e736baf54a84..94b2b1f278158 100644 --- a/src/sinks/gcp/stackdriver/logs/tests.rs +++ b/src/sinks/gcp/stackdriver/logs/tests.rs @@ -230,7 +230,10 @@ async fn correct_request() { (), ); - let request = stackdriver_logs_service_request_builder.build(http_request); + let request = stackdriver_logs_service_request_builder + .build(http_request) + .unwrap(); + let (parts, body) = request.into_parts(); let json: serde_json::Value = serde_json::from_slice(&body[..]).unwrap(); diff --git a/src/sinks/gcp/stackdriver/metrics/config.rs b/src/sinks/gcp/stackdriver/metrics/config.rs index 28f7bf5862566..d613f2806d102 100644 --- a/src/sinks/gcp/stackdriver/metrics/config.rs +++ b/src/sinks/gcp/stackdriver/metrics/config.rs @@ -1,7 +1,11 @@ use bytes::Bytes; use goauth::scopes::Scope; -use http::{Request, Uri}; +use http::{header::CONTENT_TYPE, Request, Uri}; +use super::{ + request_builder::{StackdriverMetricsEncoder, StackdriverMetricsRequestBuilder}, + sink::StackdriverMetricsSink, +}; use crate::{ gcp::{GcpAuthConfig, GcpAuthenticator}, http::HttpClient, @@ -14,13 +18,10 @@ use crate::{ }, service::TowerRequestConfigDefaults, }, + HTTPRequestBuilderSnafu, }, }; - -use super::{ - request_builder::{StackdriverMetricsEncoder, StackdriverMetricsRequestBuilder}, - sink::StackdriverMetricsSink, -}; +use snafu::ResultExt; #[derive(Clone, Copy, Debug)] pub struct StackdriverMetricsTowerRequestConfigDefaults; @@ -159,15 +160,17 @@ pub(super) struct StackdriverMetricsServiceRequestBuilder { } impl HttpServiceRequestBuilder<()> for StackdriverMetricsServiceRequestBuilder { - fn build(&self, mut request: HttpRequest<()>) -> Request { - let mut request = Request::post(self.uri.clone()) - .header("Content-Type", "application/json") + fn build(&self, mut request: HttpRequest<()>) -> Result, crate::Error> { + let builder = Request::post(self.uri.clone()).header(CONTENT_TYPE, "application/json"); + + let mut request = builder .body(request.take_payload()) - .unwrap(); + .context(HTTPRequestBuilderSnafu) + .map_err(Into::::into)?; self.auth.apply(&mut request); - request + Ok(request) } } diff --git a/src/sinks/honeycomb/service.rs b/src/sinks/honeycomb/service.rs index 8fce8e186e746..296bb6ef94121 100644 --- a/src/sinks/honeycomb/service.rs +++ b/src/sinks/honeycomb/service.rs @@ -4,7 +4,11 @@ use bytes::Bytes; use http::{Request, Uri}; use vector_lib::sensitive_string::SensitiveString; -use crate::sinks::util::http::{HttpRequest, HttpServiceRequestBuilder}; +use crate::sinks::{ + util::http::{HttpRequest, HttpServiceRequestBuilder}, + HTTPRequestBuilderSnafu, +}; +use snafu::ResultExt; use super::config::HTTP_HEADER_HONEYCOMB; @@ -15,11 +19,12 @@ pub(super) struct HoneycombSvcRequestBuilder { } impl HttpServiceRequestBuilder<()> for HoneycombSvcRequestBuilder { - fn build(&self, mut request: HttpRequest<()>) -> Request { + fn build(&self, mut request: HttpRequest<()>) -> Result, crate::Error> { let builder = Request::post(&self.uri).header(HTTP_HEADER_HONEYCOMB, self.api_key.inner()); builder .body(request.take_payload()) - .expect("Failed to assign body to request- builder has errors") + .context(HTTPRequestBuilderSnafu) + .map_err(Into::into) } } diff --git a/src/sinks/http/service.rs b/src/sinks/http/service.rs index dffbbcac498e2..460482e6b6800 100644 --- a/src/sinks/http/service.rs +++ b/src/sinks/http/service.rs @@ -1,16 +1,23 @@ //! Service implementation for the `http` sink. use bytes::Bytes; -use http::{HeaderName, HeaderValue, Method, Request, Uri}; +use http::{ + header::{CONTENT_ENCODING, CONTENT_TYPE}, + HeaderName, HeaderValue, Method, Request, Uri, +}; use indexmap::IndexMap; use crate::{ http::Auth, - sinks::util::{ - http::{HttpRequest, HttpServiceRequestBuilder}, - UriSerde, + sinks::{ + util::{ + http::{HttpRequest, HttpServiceRequestBuilder}, + UriSerde, + }, + HTTPRequestBuilderSnafu, }, }; +use snafu::ResultExt; use super::config::HttpMethod; @@ -46,17 +53,17 @@ impl HttpSinkRequestBuilder { } impl HttpServiceRequestBuilder<()> for HttpSinkRequestBuilder { - fn build(&self, mut request: HttpRequest<()>) -> Request { + fn build(&self, mut request: HttpRequest<()>) -> Result, crate::Error> { let method: Method = self.method.into(); let uri: Uri = self.uri.uri.clone(); let mut builder = Request::builder().method(method).uri(uri); if let Some(content_type) = &self.content_type { - builder = builder.header("Content-Type", content_type); + builder = builder.header(CONTENT_TYPE, content_type); } if let Some(content_encoding) = &self.content_encoding { - builder = builder.header("Content-Encoding", content_encoding); + builder = builder.header(CONTENT_ENCODING, content_encoding); } let headers = builder @@ -71,12 +78,13 @@ impl HttpServiceRequestBuilder<()> for HttpSinkRequestBuilder { // The request building should not have errors at this point let mut request = builder .body(request.take_payload()) - .expect("Failed to assign body to request- builder has errors"); + .context(HTTPRequestBuilderSnafu) + .map_err(Into::::into)?; if let Some(auth) = &self.auth { auth.apply(&mut request); } - request + Ok(request) } } diff --git a/src/sinks/mod.rs b/src/sinks/mod.rs index 143889e153e78..a61f0ecaaf031 100644 --- a/src/sinks/mod.rs +++ b/src/sinks/mod.rs @@ -117,6 +117,8 @@ pub enum BuildError { SocketAddressError { source: std::io::Error }, #[snafu(display("URI parse error: {}", source))] UriParseError { source: ::http::uri::InvalidUri }, + #[snafu(display("HTTP request build error: {}", source))] + HTTPRequestBuilderError { source: ::http::Error }, } /// Common healthcheck errors diff --git a/src/sinks/util/http.rs b/src/sinks/util/http.rs index 8ce74f6e98ebb..879f8b59ca5cf 100644 --- a/src/sinks/util/http.rs +++ b/src/sinks/util/http.rs @@ -708,7 +708,7 @@ impl ItemBatchSize for HttpJsonBatchSizer { /// HTTP request builder for HTTP stream sinks using the generic `HttpService` pub trait HttpServiceRequestBuilder { - fn build(&self, request: HttpRequest) -> Request; + fn build(&self, request: HttpRequest) -> Result, crate::Error>; } /// Generic 'Service' implementation for HTTP stream sinks. @@ -730,7 +730,7 @@ where let request_builder = Arc::clone(&http_request_builder); let fut: BoxFuture<'static, Result, crate::Error>> = - Box::pin(async move { Ok(request_builder.build(req)) }); + Box::pin(async move { request_builder.build(req) }); fut }); From 50a0c9bc118ee282144b14b3ed49f84cb5ce7c93 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Fri, 16 Feb 2024 13:41:48 -0800 Subject: [PATCH 0032/1491] chore(ci): Add a timeout to all CI jobs (#19895) * chore(ci): Add a timeout to all CI jobs To override the default of 6 hours. This'll let us bump up the merge queue timeout. Signed-off-by: Jesse Szwedko * Missed that e2e tests already had a timeout Signed-off-by: Jesse Szwedko --------- Signed-off-by: Jesse Szwedko --- .github/workflows/changes.yml | 3 +++ .github/workflows/cli.yml | 1 + .github/workflows/comment-trigger.yml | 1 + .github/workflows/create_preview_sites.yml | 1 + .github/workflows/cross.yml | 2 ++ .github/workflows/deny.yml | 1 + .github/workflows/e2e.yml | 3 ++- .github/workflows/environment.yml | 1 + .github/workflows/gardener_issue_comment.yml | 1 + .github/workflows/gardener_open_issue.yml | 1 + .github/workflows/gardener_open_pr.yml | 2 ++ .../gardener_remove_waiting_author.yml | 1 + .github/workflows/install-sh.yml | 2 ++ .github/workflows/integration-comment.yml | 4 ++++ .github/workflows/integration.yml | 1 + .github/workflows/k8s_e2e.yml | 4 ++++ .github/workflows/labeler.yml | 1 + .github/workflows/master_merge_queue.yml | 1 + .github/workflows/misc.yml | 1 + .github/workflows/msrv.yml | 1 + .github/workflows/preview_site_trigger.yml | 1 + .github/workflows/protobuf.yml | 1 + .github/workflows/publish.yml | 17 +++++++++++++++++ .github/workflows/regression.yml | 11 +++++++++++ .github/workflows/spelling.yml | 1 + .github/workflows/test.yml | 2 ++ .github/workflows/unit_mac.yml | 1 + .github/workflows/unit_windows.yml | 1 + .github/workflows/workload_checks.yml | 2 ++ 29 files changed, 69 insertions(+), 1 deletion(-) diff --git a/.github/workflows/changes.yml b/.github/workflows/changes.yml index 7dec7b4a0162e..a497ad2dd16fd 100644 --- a/.github/workflows/changes.yml +++ b/.github/workflows/changes.yml @@ -128,6 +128,7 @@ jobs: # Detects changes that are not specific to integration tests source: runs-on: ubuntu-20.04 + timeout-minutes: 5 if: ${{ inputs.source }} outputs: source: ${{ steps.filter.outputs.source }} @@ -195,6 +196,7 @@ jobs: # Detects changes that are specific to integration tests int_tests: runs-on: ubuntu-latest + timeout-minutes: 5 if: ${{ inputs.int_tests }} outputs: all-tests: ${{ steps.filter.outputs.all-tests}} @@ -252,6 +254,7 @@ jobs: # Detects changes that are specific to e2e tests e2e_tests: runs-on: ubuntu-latest + timeout-minutes: 5 if: ${{ inputs.e2e_tests }} outputs: all-tests: ${{ steps.filter.outputs.all-tests}} diff --git a/.github/workflows/cli.yml b/.github/workflows/cli.yml index 460bfdaa01cf9..a75bce8a84d03 100644 --- a/.github/workflows/cli.yml +++ b/.github/workflows/cli.yml @@ -6,6 +6,7 @@ on: jobs: test-cli: runs-on: ubuntu-latest + timeout-minutes: 15 env: CARGO_INCREMENTAL: 0 steps: diff --git a/.github/workflows/comment-trigger.yml b/.github/workflows/comment-trigger.yml index 1a6dba864219b..cde8982661374 100644 --- a/.github/workflows/comment-trigger.yml +++ b/.github/workflows/comment-trigger.yml @@ -48,6 +48,7 @@ jobs: validate: name: Validate comment runs-on: ubuntu-latest + timeout-minutes: 5 if: | github.event.issue.pull_request && ( contains(github.event.comment.body, '/ci-run-all') || contains(github.event.comment.body, '/ci-run-cli') diff --git a/.github/workflows/create_preview_sites.yml b/.github/workflows/create_preview_sites.yml index bb6b7aa8e0982..ecb2023c425a1 100644 --- a/.github/workflows/create_preview_sites.yml +++ b/.github/workflows/create_preview_sites.yml @@ -25,6 +25,7 @@ on: jobs: create_preview_site: runs-on: ubuntu-latest + timeout-minutes: 5 steps: # Get the artifacts with the PR number and branch name diff --git a/.github/workflows/cross.yml b/.github/workflows/cross.yml index cc139a6efad55..cbac1658e3d7b 100644 --- a/.github/workflows/cross.yml +++ b/.github/workflows/cross.yml @@ -7,6 +7,7 @@ jobs: cross-linux: name: Cross - ${{ matrix.target }} runs-on: ubuntu-latest + timeout-minutes: 30 env: CARGO_INCREMENTAL: 0 strategy: @@ -79,6 +80,7 @@ jobs: update-pr-status: name: (PR comment) Signal result to PR runs-on: ubuntu-20.04 + timeout-minutes: 5 needs: cross-linux if: needs.cross-linux.result == 'success' && github.event_name == 'issue_comment' steps: diff --git a/.github/workflows/deny.yml b/.github/workflows/deny.yml index e3e7036bb8668..e42d8bedba666 100644 --- a/.github/workflows/deny.yml +++ b/.github/workflows/deny.yml @@ -19,6 +19,7 @@ on: jobs: test-deny: runs-on: ubuntu-latest + timeout-minutes: 15 env: CARGO_INCREMENTAL: 0 steps: diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 7f1d6fde4a9b5..55ed97c8f125d 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -50,6 +50,7 @@ jobs: e2e-tests: name: E2E Tests runs-on: [linux, ubuntu-20.04-8core] + timeout-minutes: 45 needs: changes if: always() && ( github.event_name == 'schedule' || ( @@ -58,7 +59,6 @@ jobs: || needs.changes.outputs.e2e-datadog-metrics == 'true' ) ) - timeout-minutes: 45 steps: - uses: actions/checkout@v3 with: @@ -101,6 +101,7 @@ jobs: e2e-test-suite: name: E2E Test Suite runs-on: ubuntu-latest + timeout-minutes: 5 if: always() needs: e2e-tests env: diff --git a/.github/workflows/environment.yml b/.github/workflows/environment.yml index 923e5f36f3a9f..671e11e7538d7 100644 --- a/.github/workflows/environment.yml +++ b/.github/workflows/environment.yml @@ -14,6 +14,7 @@ env: jobs: publish-new-environment: runs-on: ubuntu-20.04 + timeout-minutes: 15 steps: - name: (PR comment) Get PR branch if: ${{ github.event_name == 'issue_comment' }} diff --git a/.github/workflows/gardener_issue_comment.yml b/.github/workflows/gardener_issue_comment.yml index 73625863650c7..91afeff7b3c0c 100644 --- a/.github/workflows/gardener_issue_comment.yml +++ b/.github/workflows/gardener_issue_comment.yml @@ -14,6 +14,7 @@ jobs: move-to-backlog: name: Move issues back to Gardener project board Triage runs-on: ubuntu-latest + timeout-minutes: 5 if: ${{ !github.event.issue.pull_request }} steps: - name: Generate authentication token diff --git a/.github/workflows/gardener_open_issue.yml b/.github/workflows/gardener_open_issue.yml index 58e21be64f30d..3c215513f950a 100644 --- a/.github/workflows/gardener_open_issue.yml +++ b/.github/workflows/gardener_open_issue.yml @@ -10,6 +10,7 @@ jobs: add-to-project: name: Add issue to Gardener project board runs-on: ubuntu-latest + timeout-minutes: 5 steps: - uses: actions/add-to-project@v0.5.0 with: diff --git a/.github/workflows/gardener_open_pr.yml b/.github/workflows/gardener_open_pr.yml index c047b196330d8..55bccc2429435 100644 --- a/.github/workflows/gardener_open_pr.yml +++ b/.github/workflows/gardener_open_pr.yml @@ -11,6 +11,7 @@ jobs: add-contributor-to-project: name: Add contributor PR to Gardener project board runs-on: ubuntu-latest + timeout-minutes: 5 if: ${{ github.actor != 'dependabot[bot]' }} steps: - name: Generate authentication token @@ -33,6 +34,7 @@ jobs: add-dependabot-to-project: name: Add dependabot PR to Gardener project board runs-on: ubuntu-latest + timeout-minutes: 5 if: ${{ github.actor == 'dependabot[bot]' }} steps: - uses: actions/add-to-project@v0.5.0 diff --git a/.github/workflows/gardener_remove_waiting_author.yml b/.github/workflows/gardener_remove_waiting_author.yml index 9fe063e50b40d..0317d27454358 100644 --- a/.github/workflows/gardener_remove_waiting_author.yml +++ b/.github/workflows/gardener_remove_waiting_author.yml @@ -7,6 +7,7 @@ on: jobs: remove_label: runs-on: ubuntu-latest + timeout-minutes: 5 steps: - uses: actions/checkout@v3 - uses: actions-ecosystem/action-remove-labels@v1 diff --git a/.github/workflows/install-sh.yml b/.github/workflows/install-sh.yml index effc2c46bf95d..9b36e527bc530 100644 --- a/.github/workflows/install-sh.yml +++ b/.github/workflows/install-sh.yml @@ -8,6 +8,7 @@ jobs: sync-install: runs-on: ubuntu-20.04 + timeout-minutes: 10 steps: - name: (PR comment) Get PR branch if: ${{ github.event_name == 'issue_comment' }} @@ -51,6 +52,7 @@ jobs: test-install: needs: sync-install runs-on: ubuntu-20.04 + timeout-minutes: 5 steps: - run: sudo apt-get install --yes curl bc - run: curl --proto '=https' --tlsv1.2 -sSfL https://sh.vector.dev | bash -s -- -y diff --git a/.github/workflows/integration-comment.yml b/.github/workflows/integration-comment.yml index fc755e8b3256a..8cad0cc96049e 100644 --- a/.github/workflows/integration-comment.yml +++ b/.github/workflows/integration-comment.yml @@ -48,6 +48,7 @@ jobs: prep-pr: name: (PR comment) Signal pending to PR runs-on: ubuntu-latest + timeout-minutes: 5 if: contains(github.event.comment.body, '/ci-run-integration') || contains(github.event.comment.body, '/ci-run-all') steps: - name: Generate authentication token @@ -82,6 +83,7 @@ jobs: integration-tests: needs: prep-pr runs-on: [linux, ubuntu-20.04-4core] + timeout-minutes: 90 steps: - uses: actions/checkout@v3 with: @@ -459,6 +461,7 @@ jobs: e2e-tests: needs: prep-pr runs-on: [linux, ubuntu-20.04-8core] + timeout-minutes: 30 steps: - uses: actions/checkout@v3 with: @@ -490,6 +493,7 @@ jobs: update-pr-status: name: Signal result to PR runs-on: ubuntu-latest + timeout-minutes: 5 needs: - integration-tests - e2e-tests diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index 91c876bbf59a1..e425f53650aa3 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -414,6 +414,7 @@ jobs: integration-test-suite: name: Integration Test Suite runs-on: ubuntu-latest + timeout-minutes: 5 if: always() needs: - changes diff --git a/.github/workflows/k8s_e2e.yml b/.github/workflows/k8s_e2e.yml index d8d2d3ea55e06..fd60fb6d74ffa 100644 --- a/.github/workflows/k8s_e2e.yml +++ b/.github/workflows/k8s_e2e.yml @@ -60,6 +60,7 @@ jobs: build-x86_64-unknown-linux-gnu: name: Build - x86_64-unknown-linux-gnu runs-on: [linux, ubuntu-20.04-4core] + timeout-minutes: 45 needs: changes # Run this job even if `changes` job is skipped (non- pull request trigger) if: ${{ !failure() && !cancelled() && (github.event_name != 'pull_request' || needs.changes.outputs.k8s == 'true') }} @@ -126,6 +127,7 @@ jobs: compute-k8s-test-plan: name: Compute K8s test plan runs-on: ubuntu-latest + timeout-minutes: 5 needs: changes # Run this job even if `changes` job is skipped if: ${{ !failure() && !cancelled() && (github.event_name != 'pull_request' || needs.changes.outputs.k8s == 'true') }} @@ -180,6 +182,7 @@ jobs: test-e2e-kubernetes: name: K8s ${{ matrix.kubernetes_version.version }} / ${{ matrix.container_runtime }} (${{ matrix.kubernetes_version.role }}) runs-on: [linux, ubuntu-20.04-4core] + timeout-minutes: 45 needs: - build-x86_64-unknown-linux-gnu - compute-k8s-test-plan @@ -233,6 +236,7 @@ jobs: final-result: name: K8s E2E Suite runs-on: ubuntu-latest + timeout-minutes: 5 needs: - changes - build-x86_64-unknown-linux-gnu diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index 013496b77b275..68baa1a388104 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -5,6 +5,7 @@ on: jobs: label: runs-on: ubuntu-20.04 + timeout-minutes: 5 permissions: contents: read pull-requests: write diff --git a/.github/workflows/master_merge_queue.yml b/.github/workflows/master_merge_queue.yml index 49dd0706b24b5..c255596c9aef9 100644 --- a/.github/workflows/master_merge_queue.yml +++ b/.github/workflows/master_merge_queue.yml @@ -105,6 +105,7 @@ jobs: # Always run this so that pull_request triggers are marked as success. if: always() runs-on: ubuntu-20.04 + timeout-minutes: 5 needs: - changes - test-cli diff --git a/.github/workflows/misc.yml b/.github/workflows/misc.yml index 014e8afd3ee09..92aa816baacd3 100644 --- a/.github/workflows/misc.yml +++ b/.github/workflows/misc.yml @@ -6,6 +6,7 @@ on: jobs: test-misc: runs-on: [linux, ubuntu-20.04-4core] + timeout-minutes: 45 env: CARGO_INCREMENTAL: 0 steps: diff --git a/.github/workflows/msrv.yml b/.github/workflows/msrv.yml index 9a82d1ddecbd0..b485698dd9ca4 100644 --- a/.github/workflows/msrv.yml +++ b/.github/workflows/msrv.yml @@ -14,6 +14,7 @@ env: jobs: check-msrv: runs-on: ubuntu-latest + timeout-minutes: 20 steps: - uses: actions/checkout@v3 - run: sudo -E bash scripts/environment/bootstrap-ubuntu-20.04.sh diff --git a/.github/workflows/preview_site_trigger.yml b/.github/workflows/preview_site_trigger.yml index 8002913a37902..f3835b4790407 100644 --- a/.github/workflows/preview_site_trigger.yml +++ b/.github/workflows/preview_site_trigger.yml @@ -7,6 +7,7 @@ on: jobs: approval_check: runs-on: ubuntu-latest + timeout-minutes: 5 if: ${{ ! contains(github.head_ref, 'dependabot*') && ! contains(github.head_ref, 'gh-readonly-queue*') }} steps: - name: Echo approval diff --git a/.github/workflows/protobuf.yml b/.github/workflows/protobuf.yml index 01add48f69ff6..4bc925d25e003 100644 --- a/.github/workflows/protobuf.yml +++ b/.github/workflows/protobuf.yml @@ -16,6 +16,7 @@ concurrency: jobs: validate-protos: runs-on: ubuntu-latest + timeout-minutes: 5 steps: # Run `git checkout` - uses: actions/checkout@v3 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 0c21848bb90b9..73d3af2b6067b 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -29,6 +29,7 @@ jobs: generate-publish-metadata: name: Generate Publish-related Metadata runs-on: ubuntu-20.04 + timeout-minutes: 5 outputs: vector_version: ${{ steps.generate-publish-metadata.outputs.vector_version }} vector_build_desc: ${{ steps.generate-publish-metadata.outputs.vector_build_desc }} @@ -45,6 +46,7 @@ jobs: build-x86_64-unknown-linux-musl-packages: name: Build Vector for x86_64-unknown-linux-musl (.tar.gz) runs-on: [linux, release-builder] + timeout-minutes: 60 needs: generate-publish-metadata env: VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} @@ -71,6 +73,7 @@ jobs: name: Build Vector for x86_64-unknown-linux-gnu (.tar.gz, DEB, RPM) runs-on: [linux, release-builder] needs: generate-publish-metadata + timeout-minutes: 60 env: VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} VECTOR_BUILD_DESC: ${{ needs.generate-publish-metadata.outputs.vector_build_desc }} @@ -95,6 +98,7 @@ jobs: build-aarch64-unknown-linux-musl-packages: name: Build Vector for aarch64-unknown-linux-musl (.tar.gz) runs-on: [linux, release-builder] + timeout-minutes: 60 needs: generate-publish-metadata env: VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} @@ -122,6 +126,7 @@ jobs: build-aarch64-unknown-linux-gnu-packages: name: Build Vector for aarch64-unknown-linux-gnu (.tar.gz) runs-on: [linux, release-builder] + timeout-minutes: 60 needs: generate-publish-metadata env: VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} @@ -149,6 +154,7 @@ jobs: build-armv7-unknown-linux-gnueabihf-packages: name: Build Vector for armv7-unknown-linux-gnueabihf (.tar.gz) runs-on: [linux, release-builder] + timeout-minutes: 60 needs: generate-publish-metadata env: VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} @@ -176,6 +182,7 @@ jobs: build-armv7-unknown-linux-musleabihf-packages: name: Build Vector for armv7-unknown-linux-musleabihf (.tar.gz) runs-on: [linux, release-builder] + timeout-minutes: 60 needs: generate-publish-metadata env: VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} @@ -203,6 +210,7 @@ jobs: build-x86_64-apple-darwin-packages: name: Build Vector for x86_64-apple-darwin (.tar.gz) runs-on: macos-latest-xl + timeout-minutes: 90 needs: generate-publish-metadata env: VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} @@ -231,6 +239,7 @@ jobs: build-x86_64-pc-windows-msvc-packages: name: Build Vector for x86_64-pc-windows-msvc (.zip) runs-on: [windows, release-builder] + timeout-minutes: 90 needs: generate-publish-metadata env: VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} @@ -277,6 +286,7 @@ jobs: deb-verify: name: Verify DEB Packages runs-on: ubuntu-20.04 + timeout-minutes: 5 needs: - generate-publish-metadata - build-x86_64-unknown-linux-gnu-packages @@ -322,6 +332,7 @@ jobs: rpm-verify: name: Verify RPM Packages runs-on: ubuntu-20.04 + timeout-minutes: 5 needs: - generate-publish-metadata - build-x86_64-unknown-linux-gnu-packages @@ -371,6 +382,7 @@ jobs: macos-verify: name: Verify macOS Package runs-on: macos-12 + timeout-minutes: 5 needs: - generate-publish-metadata - build-x86_64-apple-darwin-packages @@ -393,6 +405,7 @@ jobs: publish-docker: name: Publish to Docker runs-on: ubuntu-20.04 + timeout-minutes: 15 needs: - generate-publish-metadata - build-aarch64-unknown-linux-gnu-packages @@ -465,6 +478,7 @@ jobs: publish-s3: name: Publish to S3 runs-on: ubuntu-20.04 + timeout-minutes: 10 needs: - generate-publish-metadata - build-x86_64-unknown-linux-gnu-packages @@ -537,6 +551,7 @@ jobs: # We only publish to GitHub for versioned releases, not nightlies. if: inputs.channel == 'release' runs-on: ubuntu-20.04 + timeout-minutes: 10 needs: - generate-publish-metadata - build-x86_64-unknown-linux-gnu-packages @@ -613,6 +628,7 @@ jobs: # We only publish to Homebrew for versioned releases, not nightlies. if: inputs.channel == 'release' runs-on: ubuntu-20.04 + timeout-minutes: 10 needs: - generate-publish-metadata - publish-s3 @@ -631,6 +647,7 @@ jobs: generate-sha256sum: name: Generate SHA256 checksums runs-on: ubuntu-20.04 + timeout-minutes: 5 needs: - generate-publish-metadata - build-x86_64-unknown-linux-gnu-packages diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 65a164a8a63ab..a6156c0d09df4 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -47,6 +47,7 @@ jobs: # Only run this workflow if files changed in areas that could possibly introduce a regression should-run: runs-on: ubuntu-latest + timeout-minutes: 5 if: github.event_name != 'pull_request' outputs: source_changed: ${{ steps.filter.outputs.SOURCE_CHANGED }} @@ -110,6 +111,7 @@ jobs: compute-metadata: name: Compute metadata runs-on: ubuntu-22.04 + timeout-minutes: 5 needs: should-run if: github.event_name != 'merge_group' || needs.should-run.outputs.source_changed == 'true' outputs: @@ -286,6 +288,7 @@ jobs: build-baseline: name: Build baseline Vector container runs-on: [linux, ubuntu-20.04-4core] + timeout-minutes: 30 needs: - compute-metadata steps: @@ -323,6 +326,7 @@ jobs: build-comparison: name: Build comparison Vector container runs-on: [linux, ubuntu-20.04-4core] + timeout-minutes: 30 needs: - compute-metadata steps: @@ -360,6 +364,7 @@ jobs: confirm-valid-credentials: name: Confirm AWS credentials are minimally valid runs-on: ubuntu-22.04 + timeout-minutes: 5 needs: - compute-metadata steps: @@ -381,6 +386,7 @@ jobs: upload-baseline-image-to-ecr: name: Upload baseline images to ECR runs-on: ubuntu-22.04 + timeout-minutes: 5 needs: - compute-metadata - confirm-valid-credentials @@ -419,6 +425,7 @@ jobs: upload-comparison-image-to-ecr: name: Upload comparison images to ECR runs-on: ubuntu-22.04 + timeout-minutes: 5 needs: - compute-metadata - confirm-valid-credentials @@ -457,6 +464,7 @@ jobs: submit-job: name: Submit regression job runs-on: ubuntu-22.04 + timeout-minutes: 45 needs: - compute-metadata - upload-baseline-image-to-ecr @@ -593,6 +601,7 @@ jobs: detect-regression: name: Determine regression status runs-on: ubuntu-22.04 + timeout-minutes: 5 needs: - submit-job - compute-metadata @@ -669,6 +678,7 @@ jobs: analyze-experiment: name: Download regression analysis & upload report runs-on: ubuntu-22.04 + timeout-minutes: 5 needs: - submit-job - compute-metadata @@ -782,6 +792,7 @@ jobs: regression-detection-suite: name: Regression Detection Suite runs-on: ubuntu-latest + timeout-minutes: 5 if: always() needs: - compute-metadata diff --git a/.github/workflows/spelling.yml b/.github/workflows/spelling.yml index 6bc9e822ba966..73de422266c44 100644 --- a/.github/workflows/spelling.yml +++ b/.github/workflows/spelling.yml @@ -78,6 +78,7 @@ jobs: outputs: followup: ${{ steps.spelling.outputs.followup }} runs-on: ubuntu-latest + timeout-minutes: 5 if: "contains(github.event_name, 'pull_request') || github.event_name == 'push'" concurrency: group: spelling-${{ github.event.pull_request.number || github.ref }} diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 6270fdbde79c9..3780e8e94b3ac 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -36,6 +36,7 @@ jobs: checks: name: Checks runs-on: [linux, ubuntu-20.04-8core] + timeout-minutes: 45 needs: changes env: CARGO_INCREMENTAL: 0 @@ -122,6 +123,7 @@ jobs: all-checks: name: Test Suite runs-on: ubuntu-20.04 + timeout-minutes: 5 if: always() needs: [changes, checks] env: diff --git a/.github/workflows/unit_mac.yml b/.github/workflows/unit_mac.yml index 9d32e78441dac..3b42fe12e7c45 100644 --- a/.github/workflows/unit_mac.yml +++ b/.github/workflows/unit_mac.yml @@ -6,6 +6,7 @@ on: jobs: unit-mac: runs-on: macos-13 + timeout-minutes: 90 env: CARGO_INCREMENTAL: 0 steps: diff --git a/.github/workflows/unit_windows.yml b/.github/workflows/unit_windows.yml index 34cf634b467fc..a06bccb1743d9 100644 --- a/.github/workflows/unit_windows.yml +++ b/.github/workflows/unit_windows.yml @@ -7,6 +7,7 @@ jobs: test-windows: runs-on: [windows, windows-2019-8core] + timeout-minutes: 60 steps: - name: (PR comment) Get PR branch if: ${{ github.event_name == 'issue_comment' }} diff --git a/.github/workflows/workload_checks.yml b/.github/workflows/workload_checks.yml index c10bdbf4ff6b0..b64f810ce652a 100644 --- a/.github/workflows/workload_checks.yml +++ b/.github/workflows/workload_checks.yml @@ -30,6 +30,7 @@ jobs: compute-metadata: name: Compute metadata runs-on: ubuntu-latest + timeout-minutes: 5 outputs: replicas: ${{ steps.experimental-meta.outputs.REPLICAS }} warmup-seconds: ${{ steps.experimental-meta.outputs.WARMUP_SECONDS }} @@ -81,6 +82,7 @@ jobs: submit-job: name: Submit workload checks job runs-on: ubuntu-latest + timeout-minutes: 90 needs: - compute-metadata steps: From 881077e26145c853d7680993c588e4c260346deb Mon Sep 17 00:00:00 2001 From: hdhoang Date: Sat, 17 Feb 2024 04:44:07 +0700 Subject: [PATCH 0033/1491] chore(releases website): 0.36 changelog fixes (#19875) * chore(website): remove changelog cliffhanger * chore(changelog): use more stable v0.36 source link --- .../content/en/highlights/2024-02-13-0-36-0-upgrade-guide.md | 2 +- website/cue/reference/releases/0.36.0.cue | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/website/content/en/highlights/2024-02-13-0-36-0-upgrade-guide.md b/website/content/en/highlights/2024-02-13-0-36-0-upgrade-guide.md index 216e86a9da7e7..a02fc3245c0ff 100644 --- a/website/content/en/highlights/2024-02-13-0-36-0-upgrade-guide.md +++ b/website/content/en/highlights/2024-02-13-0-36-0-upgrade-guide.md @@ -9,4 +9,4 @@ badges: type: breaking change --- -Vector's 0.36.0 release includes no **breaking changes** or **deprecations**: +Vector's 0.36.0 release includes no **breaking changes** or **deprecations**. diff --git a/website/cue/reference/releases/0.36.0.cue b/website/cue/reference/releases/0.36.0.cue index d4df4ecc78368..ad72d305c32d8 100644 --- a/website/cue/reference/releases/0.36.0.cue +++ b/website/cue/reference/releases/0.36.0.cue @@ -28,7 +28,7 @@ releases: "0.36.0": { description: """ Vector can now emulate a [Prometheus Pushgateway](https://github.com/prometheus/pushgateway) through the new `prometheus_pushgateway` source. Counters and histograms can optionally be aggregated across pushes to support use-cases like cron jobs. - There are some caveats, which are listed [here](https://github.com/Sinjo/vector/blob/0d4fc20091ddae7f3562bfdf07c9095c0c7223e0/src/sources/prometheus/pushgateway.rs#L8-L12). + There are some caveats, which are listed [in the implementation](https://github.com/vectordotdev/vector/tree/v0.36/src/sources/prometheus/pushgateway.rs#L8-L12). """ contributors: ["Sinjo"] From 1470f1ada2bbf71cfbe8fe9da683315e5472bebf Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 20 Feb 2024 13:49:55 -0800 Subject: [PATCH 0034/1491] docs(remap transform): Fix `drop_on_abort` docs (#19918) The docs assumed the default was `false` when it is actually `true`. I slimmed them down so that they don't refer to the default. Fixes: #19916 Signed-off-by: Jesse Szwedko --- src/transforms/remap.rs | 8 +++----- .../cue/reference/components/transforms/base/remap.cue | 8 +++----- 2 files changed, 6 insertions(+), 10 deletions(-) diff --git a/src/transforms/remap.rs b/src/transforms/remap.rs index 3e172b9aa970b..d0fe0bbccd49f 100644 --- a/src/transforms/remap.rs +++ b/src/transforms/remap.rs @@ -105,11 +105,9 @@ pub struct RemapConfig { /// Drops any event that is manually aborted during processing. /// - /// Normally, if a VRL program is manually aborted (using [`abort`][vrl_docs_abort]) when - /// processing an event, the original, unmodified event is sent downstream. In some cases, - /// you may not wish to send the event any further, such as if certain transformation or - /// enrichment is strictly required. Setting `drop_on_abort` to `true` allows you to ensure - /// these events do not get processed any further. + /// If a VRL program is manually aborted (using [`abort`][vrl_docs_abort]) when + /// processing an event, this option controls whether the original, unmodified event is sent + /// downstream without any modifications or if it is dropped. /// /// Additionally, dropped events can potentially be diverted to a specially-named output for /// further logging and analysis by setting `reroute_dropped`. diff --git a/website/cue/reference/components/transforms/base/remap.cue b/website/cue/reference/components/transforms/base/remap.cue index c7fd8648187a4..fbbbd14f84420 100644 --- a/website/cue/reference/components/transforms/base/remap.cue +++ b/website/cue/reference/components/transforms/base/remap.cue @@ -5,11 +5,9 @@ base: components: transforms: remap: configuration: { description: """ Drops any event that is manually aborted during processing. - Normally, if a VRL program is manually aborted (using [`abort`][vrl_docs_abort]) when - processing an event, the original, unmodified event is sent downstream. In some cases, - you may not wish to send the event any further, such as if certain transformation or - enrichment is strictly required. Setting `drop_on_abort` to `true` allows you to ensure - these events do not get processed any further. + If a VRL program is manually aborted (using [`abort`][vrl_docs_abort]) when + processing an event, this option controls whether the original, unmodified event is sent + downstream without any modifications or if it is dropped. Additionally, dropped events can potentially be diverted to a specially-named output for further logging and analysis by setting `reroute_dropped`. From 78f0e31c8445355203fb5295224af7da1de19e1b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 06:53:03 -0800 Subject: [PATCH 0035/1491] chore(deps): Bump the aws group with 1 update (#19919) Bumps the aws group with 1 update: [aws-credential-types](https://github.com/smithy-lang/smithy-rs). Updates `aws-credential-types` from 1.1.5 to 1.1.6 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) --- updated-dependencies: - dependency-name: aws-credential-types dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4a7abbbf93cb3..7d4e5fc3d0e99 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -762,9 +762,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.1.5" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2d56f287a9e65e4914bfedb5b22c056b65e4c232fca512d5509a9df36386759f" +checksum = "e5635d8707f265c773282a22abe1ecd4fbe96a8eb2f0f14c0796f8016f11a41a" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", diff --git a/Cargo.toml b/Cargo.toml index 3c9a429e397ce..6f4aa8dc10d25 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -184,7 +184,7 @@ aws-sdk-kinesis = { version = "1.3.0", default-features = false, features = ["be aws-types = { version = "1.1.5", default-features = false, optional = true } aws-sigv4 = { version = "1.1.5", default-features = false, features = ["sign-http"], optional = true } aws-config = { version = "1.0.1", default-features = false, features = ["behavior-version-latest"], optional = true } -aws-credential-types = { version = "1.1.5", default-features = false, features = ["hardcoded-credentials"], optional = true } +aws-credential-types = { version = "1.1.6", default-features = false, features = ["hardcoded-credentials"], optional = true } aws-smithy-http = { version = "0.60", default-features = false, features = ["event-stream"], optional = true } aws-smithy-types = { version = "1.1.6", default-features = false, optional = true } aws-smithy-runtime-api = { version = "1.1.6", default-features = false, optional = true } From 6a76be2173ad5a3d919e20e0661a7f3fc543427d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 14:55:56 +0000 Subject: [PATCH 0036/1491] chore(deps): Bump mock_instant from 0.3.1 to 0.3.2 (#19900) Bumps [mock_instant](https://github.com/museun/mock_instant) from 0.3.1 to 0.3.2. - [Commits](https://github.com/museun/mock_instant/commits/v0.3.2) --- updated-dependencies: - dependency-name: mock_instant dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7d4e5fc3d0e99..ddede8520afa8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5294,9 +5294,9 @@ dependencies = [ [[package]] name = "mock_instant" -version = "0.3.1" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c1a54de846c4006b88b1516731cc1f6026eb5dc4bcb186aa071ef66d40524ec" +checksum = "9366861eb2a2c436c20b12c8dbec5f798cea6b47ad99216be0282942e2c81ea0" [[package]] name = "mongodb" From 837c64cffd3624e32178a1e5078ed5ed3e6ebc8a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 14:56:27 +0000 Subject: [PATCH 0037/1491] chore(deps): Bump serde_yaml from 0.9.31 to 0.9.32 (#19907) Bumps [serde_yaml](https://github.com/dtolnay/serde-yaml) from 0.9.31 to 0.9.32. - [Release notes](https://github.com/dtolnay/serde-yaml/releases) - [Commits](https://github.com/dtolnay/serde-yaml/compare/0.9.31...0.9.32) --- updated-dependencies: - dependency-name: serde_yaml dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 14 +++++++------- Cargo.toml | 2 +- lib/vector-core/Cargo.toml | 2 +- vdev/Cargo.toml | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ddede8520afa8..cef7375110e5f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4723,7 +4723,7 @@ dependencies = [ "secrecy", "serde", "serde_json", - "serde_yaml 0.9.31", + "serde_yaml 0.9.32", "thiserror", "tokio", "tokio-util", @@ -8161,9 +8161,9 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.9.31" +version = "0.9.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "adf8a49373e98a4c5f0ceb5d05aa7c648d75f63774981ed95b7c7443bbd50c6e" +checksum = "8fd075d994154d4a774f95b51fb96bdc2832b0ea48425c92546073816cda1f2f" dependencies = [ "indexmap 2.2.3", "itoa", @@ -9821,7 +9821,7 @@ dependencies = [ "reqwest", "serde", "serde_json", - "serde_yaml 0.9.31", + "serde_yaml 0.9.32", "sha2", "tempfile", "toml", @@ -9976,7 +9976,7 @@ dependencies = [ "serde_bytes", "serde_json", "serde_with 3.6.1", - "serde_yaml 0.9.31", + "serde_yaml 0.9.32", "sha2", "similar-asserts", "smallvec", @@ -10073,7 +10073,7 @@ dependencies = [ "rand 0.8.5", "rkyv", "serde", - "serde_yaml 0.9.31", + "serde_yaml 0.9.32", "snafu 0.7.5", "temp-dir", "tokio", @@ -10230,7 +10230,7 @@ dependencies = [ "serde", "serde_json", "serde_with 3.6.1", - "serde_yaml 0.9.31", + "serde_yaml 0.9.32", "similar-asserts", "smallvec", "snafu 0.7.5", diff --git a/Cargo.toml b/Cargo.toml index 6f4aa8dc10d25..22ae5caeecb46 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -209,7 +209,7 @@ serde-toml-merge = { version = "0.3.4", default-features = false } serde_bytes = { version = "0.11.14", default-features = false, features = ["std"], optional = true } serde_json.workspace = true serde_with = { version = "3.6.1", default-features = false, features = ["macros", "std"] } -serde_yaml = { version = "0.9.31", default-features = false } +serde_yaml = { version = "0.9.32", default-features = false } # Messagepack rmp-serde = { version = "1.1.2", default-features = false, optional = true } diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index 2d75ee12b0a5c..7540c44a8efd7 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -89,7 +89,7 @@ ndarray-stats = "0.5.1" noisy_float = "0.2.0" rand = "0.8.5" rand_distr = "0.4.3" -serde_yaml = { version = "0.9.31", default-features = false } +serde_yaml = { version = "0.9.32", default-features = false } tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt", "ansi", "registry"] } vector-common = { path = "../vector-common", default-features = false, features = ["test"] } diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index bc80fad4524ea..7bf31944a8623 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -33,7 +33,7 @@ regex = { version = "1.10.3", default-features = false, features = ["std", "perf reqwest = { version = "0.11", features = ["json", "blocking"] } serde.workspace = true serde_json.workspace = true -serde_yaml = "0.9.31" +serde_yaml = "0.9.32" sha2 = "0.10.8" tempfile = "3.10.0" toml.workspace = true From bb4190b028f24c51fa6296830aa6036f68c5596b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 14:58:13 +0000 Subject: [PATCH 0038/1491] chore(deps): Bump assert_cmd from 2.0.13 to 2.0.14 (#19908) Bumps [assert_cmd](https://github.com/assert-rs/assert_cmd) from 2.0.13 to 2.0.14. - [Changelog](https://github.com/assert-rs/assert_cmd/blob/master/CHANGELOG.md) - [Commits](https://github.com/assert-rs/assert_cmd/compare/v2.0.13...v2.0.14) --- updated-dependencies: - dependency-name: assert_cmd dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cef7375110e5f..6850032ad5123 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -333,9 +333,9 @@ dependencies = [ [[package]] name = "assert_cmd" -version = "2.0.13" +version = "2.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00ad3f3a942eee60335ab4342358c161ee296829e0d16ff42fc1d6cb07815467" +checksum = "ed72493ac66d5804837f480ab3766c72bdfab91a65e565fc54fa9e42db0073a8" dependencies = [ "anstyle", "bstr 1.9.0", diff --git a/Cargo.toml b/Cargo.toml index 22ae5caeecb46..050b825f47401 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -357,7 +357,7 @@ openssl-src = { version = "300", default-features = false, features = ["force-en [dev-dependencies] approx = "0.5.1" -assert_cmd = { version = "2.0.13", default-features = false } +assert_cmd = { version = "2.0.14", default-features = false } aws-smithy-runtime = { version = "1.1.6", default-features = false, features = ["tls-rustls"] } azure_core = { version = "0.17", default-features = false, features = ["enable_reqwest", "azurite_workaround"] } azure_identity = { version = "0.17", default-features = false, features = ["enable_reqwest"] } From 7311c0aaa01cac20d4cdc71c21c516de7326405c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 14:58:35 +0000 Subject: [PATCH 0039/1491] chore(deps): Bump serde from 1.0.196 to 1.0.197 (#19910) Bumps [serde](https://github.com/serde-rs/serde) from 1.0.196 to 1.0.197. - [Release notes](https://github.com/serde-rs/serde/releases) - [Commits](https://github.com/serde-rs/serde/compare/v1.0.196...v1.0.197) --- updated-dependencies: - dependency-name: serde dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6850032ad5123..98723bacaf6f0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7944,9 +7944,9 @@ checksum = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3" [[package]] name = "serde" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "870026e60fa08c69f064aa766c10f10b1d62db9ccd4d0abb206472bee0ce3b32" +checksum = "3fb1c873e1b9b056a4dc4c0c198b24c3ffa059243875552b2bd0933b1aee4ce2" dependencies = [ "serde_derive", ] @@ -7992,9 +7992,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.196" +version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33c85360c95e7d137454dc81d9a4ed2b8efd8fbe19cee57357b32b9771fccb67" +checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", diff --git a/Cargo.toml b/Cargo.toml index 050b825f47401..8316c2967f3fd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -133,7 +133,7 @@ pin-project = { version = "1.1.4", default-features = false } proptest = "1.4" proptest-derive = "0.4.0" serde_json = { version = "1.0.112", default-features = false, features = ["raw_value", "std"] } -serde = { version = "1.0.196", default-features = false, features = ["alloc", "derive", "rc"] } +serde = { version = "1.0.197", default-features = false, features = ["alloc", "derive", "rc"] } toml = { version = "0.8.10", default-features = false, features = ["display", "parse"] } vrl = { version = "0.11.0", features = ["arbitrary", "cli", "test", "test_framework"] } From b8d89a03459a32f9c227b6fab21b5081c75d934f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 14:58:49 +0000 Subject: [PATCH 0040/1491] chore(deps): Bump semver from 1.0.21 to 1.0.22 (#19911) Bumps [semver](https://github.com/dtolnay/semver) from 1.0.21 to 1.0.22. - [Release notes](https://github.com/dtolnay/semver/releases) - [Commits](https://github.com/dtolnay/semver/compare/1.0.21...1.0.22) --- updated-dependencies: - dependency-name: semver dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 98723bacaf6f0..fca39c7902278 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7668,7 +7668,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.21", + "semver 1.0.22", ] [[package]] @@ -7929,9 +7929,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.21" +version = "1.0.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97ed7a9823b74f99c7742f5336af7be5ecd3eeafcb1507d1fa93347b1d589b0" +checksum = "92d43fe69e652f3df9bdc2b85b2854a0825b86e4fb76bc44d945137d053639ca" dependencies = [ "serde", ] @@ -9970,7 +9970,7 @@ dependencies = [ "rstest", "rumqttc", "seahash", - "semver 1.0.21", + "semver 1.0.22", "serde", "serde-toml-merge", "serde_bytes", diff --git a/Cargo.toml b/Cargo.toml index 8316c2967f3fd..46f7d7100f84d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -315,7 +315,7 @@ regex = { version = "1.10.3", default-features = false, features = ["std", "perf roaring = { version = "0.10.3", default-features = false, optional = true } rumqttc = { version = "0.23.0", default-features = false, features = ["use-rustls"], optional = true } seahash = { version = "4.1.0", default-features = false } -semver = { version = "1.0.21", default-features = false, features = ["serde", "std"], optional = true } +semver = { version = "1.0.22", default-features = false, features = ["serde", "std"], optional = true } smallvec = { version = "1", default-features = false, features = ["union", "serde"] } snafu = { version = "0.7.5", default-features = false, features = ["futures"] } snap = { version = "1.1.1", default-features = false } From 1d91742e70a3c5ef4ae3a86c26a6d89846e35157 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 14:59:00 +0000 Subject: [PATCH 0041/1491] chore(deps): Bump ryu from 1.0.16 to 1.0.17 (#19912) Bumps [ryu](https://github.com/dtolnay/ryu) from 1.0.16 to 1.0.17. - [Release notes](https://github.com/dtolnay/ryu/releases) - [Commits](https://github.com/dtolnay/ryu/compare/1.0.16...1.0.17) --- updated-dependencies: - dependency-name: ryu dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fca39c7902278..58fc7cdeb93bb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7790,9 +7790,9 @@ dependencies = [ [[package]] name = "ryu" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f98d2aa92eebf49b69786be48e4477826b256916e84a57ff2a4f21923b48eb4c" +checksum = "e86697c916019a8588c99b5fac3cead74ec0b4b819707a682fd4d23fa0ce1ba1" [[package]] name = "salsa20" From 7fb4513424aa9c3d19fa0e43c7be2360d2ac412d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 14:59:18 +0000 Subject: [PATCH 0042/1491] chore(deps): Bump anyhow from 1.0.79 to 1.0.80 (#19914) Bumps [anyhow](https://github.com/dtolnay/anyhow) from 1.0.79 to 1.0.80. - [Release notes](https://github.com/dtolnay/anyhow/releases) - [Commits](https://github.com/dtolnay/anyhow/compare/1.0.79...1.0.80) --- updated-dependencies: - dependency-name: anyhow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- lib/docs-renderer/Cargo.toml | 2 +- lib/vector-api-client/Cargo.toml | 2 +- vdev/Cargo.toml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 58fc7cdeb93bb..8d57004f7511a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -227,9 +227,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.79" +version = "1.0.80" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "080e9890a082662b09c1ad45f567faeeb47f22b5fb23895fbe1e651e718e25ca" +checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" [[package]] name = "apache-avro" diff --git a/lib/docs-renderer/Cargo.toml b/lib/docs-renderer/Cargo.toml index 3edf3d598940b..eca94c8ef139b 100644 --- a/lib/docs-renderer/Cargo.toml +++ b/lib/docs-renderer/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" publish = false [dependencies] -anyhow = { version = "1.0.79", default-features = false, features = ["std"] } +anyhow = { version = "1.0.80", default-features = false, features = ["std"] } serde.workspace = true serde_json.workspace = true snafu = { version = "0.7.5", default-features = false } diff --git a/lib/vector-api-client/Cargo.toml b/lib/vector-api-client/Cargo.toml index 0696a53fdfaf6..72be949d2e691 100644 --- a/lib/vector-api-client/Cargo.toml +++ b/lib/vector-api-client/Cargo.toml @@ -13,7 +13,7 @@ serde.workspace = true serde_json.workspace = true # Error handling -anyhow = { version = "1.0.79", default-features = false, features = ["std"] } +anyhow = { version = "1.0.80", default-features = false, features = ["std"] } # Tokio / Futures async-trait = { version = "0.1", default-features = false } diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index 7bf31944a8623..df2b3bf461abd 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -8,7 +8,7 @@ readme = "README.md" publish = false [dependencies] -anyhow = "1.0.79" +anyhow = "1.0.80" cached = "0.48.0" chrono.workspace = true clap.workspace = true From 282a58d410a05f2bf0def7cfcca98e84342134ff Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Wed, 21 Feb 2024 15:22:44 -0800 Subject: [PATCH 0043/1491] chore(dev): Update release instructions for deploying vector.dev (#19925) We've switched to having a long-running branch representing what to deploy to vector.dev to reduce coordination. Signed-off-by: Jesse Szwedko --- .github/ISSUE_TEMPLATE/minor-release.md | 4 ++-- .github/ISSUE_TEMPLATE/patch-release.md | 2 ++ 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/minor-release.md b/.github/ISSUE_TEMPLATE/minor-release.md index a9d49abfafdfb..8c82e25067c71 100644 --- a/.github/ISSUE_TEMPLATE/minor-release.md +++ b/.github/ISSUE_TEMPLATE/minor-release.md @@ -49,6 +49,6 @@ On the day of release: - [ ] Add docker images to [https://github.com/DataDog/images](https://github.com/DataDog/images/tree/master/vector) to have them available internally. - [ ] Cherry-pick any release commits from the release branch that are not on `master`, to `master` - [ ] Bump the release number in the `Cargo.toml` on master to the next major release -- [ ] Drop a note in the #websites Slack channel to request an update of the branch deployed - at https://vector.dev to the new release branch. +- [ ] Reset the `website` branch to the `HEAD` of the release branch to update https://vector.dev + - [ ] `git checkout website && git reset --hard origin/v0. && git push` - [ ] Kick-off post-mortems for any regressions resolved by the release diff --git a/.github/ISSUE_TEMPLATE/patch-release.md b/.github/ISSUE_TEMPLATE/patch-release.md index 903c7d5befbad..ac6dd224ec6a2 100644 --- a/.github/ISSUE_TEMPLATE/patch-release.md +++ b/.github/ISSUE_TEMPLATE/patch-release.md @@ -51,4 +51,6 @@ On the day of release: - [ ] Add docker images to [https://github.com/DataDog/images](https://github.com/DataDog/images/tree/master/vector) to have them available internally. - Follow the [instructions at the top of the mirror.yaml file](https://github.com/DataDog/images/blob/fbf12868e90d52e513ebca0389610dea8a3c7e1a/mirror.yaml#L33-L49). - [ ] Cherry-pick any release commits from the release branch that are not on `master`, to `master` +- [ ] Reset the `website` branch to the `HEAD` of the release branch to update https://vector.dev + - [ ] `git checkout website && git reset --hard origin/v0.. && git push` - [ ] Kick-off post-mortems for any regressions resolved by the release From 3cb92727ab948c26d8df792eea1c237c0a44bc70 Mon Sep 17 00:00:00 2001 From: Hugo Hromic Date: Wed, 21 Feb 2024 23:48:21 +0000 Subject: [PATCH 0044/1491] chore(website): bump openssl version used for links in docs (#19880) Signed-off-by: Hugo Hromic --- website/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/config.toml b/website/config.toml index 06c37f106cde5..a188ef60da331 100644 --- a/website/config.toml +++ b/website/config.toml @@ -33,7 +33,7 @@ undertagline = "Collect, transform, and route all your observability data with o subtagline = "Vector is deployed over 1,000,000 times per month by Fortune 500 companies and startups" alpine_js_version = "2.8.2" ionicons_version = "5.4.0" -openssl_version = "3.1" +openssl_version = "3.2" site_logo = "img/vector-open-graph.png" display_banner = true # Whether to display the top banner in layouts/partials/banner.html favicon = "favicon.ico" From a32895ec096c5c55c449c8d3ad6bed658d69b71b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 23:48:57 +0000 Subject: [PATCH 0045/1491] chore(deps): Bump the clap group with 3 updates (#19899) * chore(deps): Bump the clap group with 3 updates Bumps the clap group with 3 updates: [clap](https://github.com/clap-rs/clap), [clap-verbosity-flag](https://github.com/clap-rs/clap-verbosity-flag) and [clap_complete](https://github.com/clap-rs/clap). Updates `clap` from 4.4.18 to 4.5.1 - [Release notes](https://github.com/clap-rs/clap/releases) - [Changelog](https://github.com/clap-rs/clap/blob/master/CHANGELOG.md) - [Commits](https://github.com/clap-rs/clap/compare/v4.4.18...clap_complete-v4.5.1) Updates `clap-verbosity-flag` from 2.1.2 to 2.2.0 - [Changelog](https://github.com/clap-rs/clap-verbosity-flag/blob/master/CHANGELOG.md) - [Commits](https://github.com/clap-rs/clap-verbosity-flag/compare/v2.1.2...v2.2.0) Updates `clap_complete` from 4.4.10 to 4.5.1 - [Release notes](https://github.com/clap-rs/clap/releases) - [Changelog](https://github.com/clap-rs/clap/blob/master/CHANGELOG.md) - [Commits](https://github.com/clap-rs/clap/compare/clap_complete-v4.4.10...clap_complete-v4.5.1) --- updated-dependencies: - dependency-name: clap dependency-type: direct:production update-type: version-update:semver-minor dependency-group: clap - dependency-name: clap-verbosity-flag dependency-type: direct:production update-type: version-update:semver-minor dependency-group: clap - dependency-name: clap_complete dependency-type: direct:production update-type: version-update:semver-minor dependency-group: clap ... Signed-off-by: dependabot[bot] * Update licenses --------- Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Bruce Guenter --- Cargo.lock | 52 ++++++++++++++++++++++++-------------------- Cargo.toml | 2 +- LICENSE-3rdparty.csv | 1 + vdev/Cargo.toml | 4 ++-- 4 files changed, 33 insertions(+), 26 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8d57004f7511a..d87802012a979 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1977,9 +1977,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.18" +version = "4.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e578d6ec4194633722ccf9544794b71b1385c3c027efe0c55db226fc880865c" +checksum = "c918d541ef2913577a0f9566e9ce27cb35b6df072075769e0b26cb5a554520da" dependencies = [ "clap_builder", "clap_derive", @@ -1987,41 +1987,41 @@ dependencies = [ [[package]] name = "clap-verbosity-flag" -version = "2.1.2" +version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b57f73ca21b17a0352944b9bb61803b6007bd911b6cccfef7153f7f0600ac495" +checksum = "bb9b20c0dd58e4c2e991c8d203bbeb76c11304d1011659686b5b644bc29aa478" dependencies = [ - "clap 4.4.18", + "clap 4.5.1", "log", ] [[package]] name = "clap_builder" -version = "4.4.18" +version = "4.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4df4df40ec50c46000231c914968278b1eb05098cf8f1b3a518a95030e71d1c7" +checksum = "9f3e7391dad68afb0c2ede1bf619f579a3dc9c2ec67f089baa397123a2f3d1eb" dependencies = [ "anstream", "anstyle", "clap_lex", - "strsim 0.10.0", + "strsim 0.11.0", "terminal_size", ] [[package]] name = "clap_complete" -version = "4.4.10" +version = "4.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "abb745187d7f4d76267b37485a65e0149edd0e91a4cfcdd3f27524ad86cee9f3" +checksum = "885e4d7d5af40bfb99ae6f9433e292feac98d452dcb3ec3d25dfe7552b77da8c" dependencies = [ - "clap 4.4.18", + "clap 4.5.1", ] [[package]] name = "clap_derive" -version = "4.4.7" +version = "4.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" +checksum = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47" dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", @@ -2031,9 +2031,9 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.6.0" +version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" +checksum = "98cc8fbded0c607b7ba9dd60cd98df59af97e84d24e49c8557331cfc26d301ce" [[package]] name = "clipboard-win" @@ -2351,7 +2351,7 @@ dependencies = [ "anes", "cast", "ciborium", - "clap 4.4.18", + "clap 4.5.1", "criterion-plot", "futures 0.3.30", "is-terminal", @@ -8555,6 +8555,12 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "73473c0e59e6d5812c5dfe2a064a6444949f089e20eec9a2e5506596494e4623" +[[package]] +name = "strsim" +version = "0.11.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ee073c9e4cd00e28217186dbe12796d692868f432bf2e97ee73bed0c56dfa01" + [[package]] name = "structopt" version = "0.3.26" @@ -9801,7 +9807,7 @@ dependencies = [ "anyhow", "cached", "chrono", - "clap 4.4.18", + "clap 4.5.1", "clap-verbosity-flag", "clap_complete", "confy", @@ -9878,7 +9884,7 @@ dependencies = [ "chrono", "chrono-tz", "cidr-utils 0.6.1", - "clap 4.4.18", + "clap 4.5.1", "colored", "console-subscriber", "criterion", @@ -10028,7 +10034,7 @@ dependencies = [ "anyhow", "async-trait", "chrono", - "clap 4.4.18", + "clap 4.5.1", "futures 0.3.30", "graphql_client", "indoc", @@ -10051,7 +10057,7 @@ dependencies = [ "async-trait", "bytecheck", "bytes 1.5.0", - "clap 4.4.18", + "clap 4.5.1", "crc32fast", "criterion", "crossbeam-queue", @@ -10310,7 +10316,7 @@ dependencies = [ name = "vector-vrl-cli" version = "0.1.0" dependencies = [ - "clap 4.4.18", + "clap 4.5.1", "vector-vrl-functions", "vrl", ] @@ -10329,7 +10335,7 @@ dependencies = [ "ansi_term", "chrono", "chrono-tz", - "clap 4.4.18", + "clap 4.5.1", "enrichment", "glob", "prettydiff", @@ -10389,7 +10395,7 @@ dependencies = [ "chrono", "chrono-tz", "cidr-utils 0.6.1", - "clap 4.4.18", + "clap 4.5.1", "codespan-reporting", "community-id", "crypto_secretbox", diff --git a/Cargo.toml b/Cargo.toml index 46f7d7100f84d..4476ecab632a9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -127,7 +127,7 @@ members = [ [workspace.dependencies] chrono = { version = "0.4.34", default-features = false, features = ["clock", "serde"] } -clap = { version = "4.4.18", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } +clap = { version = "4.5.1", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } indexmap = { version = "2.2.3", default-features = false, features = ["serde", "std"] } pin-project = { version = "1.1.4", default-features = false } proptest = "1.4" diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 98d18786c20b3..0a45a8d7b8024 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -539,6 +539,7 @@ stringprep,https://github.com/sfackler/rust-stringprep,MIT OR Apache-2.0,Steven strip-ansi-escapes,https://github.com/luser/strip-ansi-escapes,Apache-2.0 OR MIT,Ted Mielczarek strsim,https://github.com/dguo/strsim-rs,MIT,Danny Guo strsim,https://github.com/dguo/strsim-rs,MIT,Danny Guo +strsim,https://github.com/rapidfuzz/strsim-rs,MIT,"Danny Guo , maxbachmann " structopt,https://github.com/TeXitoi/structopt,Apache-2.0 OR MIT,"Guillaume Pinot , others" structopt-derive,https://github.com/TeXitoi/structopt,Apache-2.0 OR MIT,Guillaume Pinot strum,https://github.com/Peternator7/strum,MIT,Peter Glotfelty diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index df2b3bf461abd..d1717412016e8 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -12,8 +12,8 @@ anyhow = "1.0.80" cached = "0.48.0" chrono.workspace = true clap.workspace = true -clap-verbosity-flag = "2.1.2" -clap_complete = "4.4.10" +clap-verbosity-flag = "2.2.0" +clap_complete = "4.5.1" confy = "0.6.0" directories = "5.0.1" # remove this when stabilized https://doc.rust-lang.org/stable/std/path/fn.absolute.html From 4cd4b6a26de5f70a687b934df7193aa9ba2d46f7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 23:49:05 +0000 Subject: [PATCH 0046/1491] chore(deps): Bump serde_json from 1.0.113 to 1.0.114 (#19909) Bumps [serde_json](https://github.com/serde-rs/json) from 1.0.113 to 1.0.114. - [Release notes](https://github.com/serde-rs/json/releases) - [Commits](https://github.com/serde-rs/json/compare/v1.0.113...v1.0.114) --- updated-dependencies: - dependency-name: serde_json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d87802012a979..1e97558cac813 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8014,9 +8014,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.113" +version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69801b70b1c3dac963ecb03a364ba0ceda9cf60c71cfe475e99864759c8b8a79" +checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ "indexmap 2.2.3", "itoa", diff --git a/Cargo.toml b/Cargo.toml index 4476ecab632a9..e441d35a1a794 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -132,7 +132,7 @@ indexmap = { version = "2.2.3", default-features = false, features = ["serde", " pin-project = { version = "1.1.4", default-features = false } proptest = "1.4" proptest-derive = "0.4.0" -serde_json = { version = "1.0.112", default-features = false, features = ["raw_value", "std"] } +serde_json = { version = "1.0.114", default-features = false, features = ["raw_value", "std"] } serde = { version = "1.0.197", default-features = false, features = ["alloc", "derive", "rc"] } toml = { version = "0.8.10", default-features = false, features = ["display", "parse"] } vrl = { version = "0.11.0", features = ["arbitrary", "cli", "test", "test_framework"] } From c9e24003095f3a6271aa9a3d50c83c3b6f857014 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 23:49:08 +0000 Subject: [PATCH 0047/1491] chore(deps): Bump syn from 2.0.49 to 2.0.50 (#19913) Bumps [syn](https://github.com/dtolnay/syn) from 2.0.49 to 2.0.50. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/2.0.49...2.0.50) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 94 +++++++++++++++++++++++++++--------------------------- 1 file changed, 47 insertions(+), 47 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1e97558cac813..3e5b1e60997b8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "strum 0.25.0", - "syn 2.0.49", + "syn 2.0.50", "thiserror", ] @@ -651,7 +651,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -691,7 +691,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -708,7 +708,7 @@ checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -1467,7 +1467,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9990737a6d5740ff51cdbbc0f0503015cb30c390f6623968281eb214a520cfc0" dependencies = [ "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -1594,7 +1594,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", "syn_derive", ] @@ -2026,7 +2026,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -2554,7 +2554,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -2626,7 +2626,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "strsim 0.10.0", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -2659,7 +2659,7 @@ checksum = "c5a91391accf613803c2a9bf9abccdbaa07c54b4244a5b64883f9c3c137c86be" dependencies = [ "darling_core 0.20.6", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -2764,7 +2764,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -3045,7 +3045,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -3057,7 +3057,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -3077,7 +3077,7 @@ checksum = "5c785274071b1b420972453b306eeca06acf4633829db4223b58a2a8c5953bc4" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -3479,7 +3479,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -5169,7 +5169,7 @@ checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -5289,7 +5289,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "regex", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -5738,7 +5738,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -5750,7 +5750,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -5938,7 +5938,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -6223,7 +6223,7 @@ dependencies = [ "pest_meta", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -6311,7 +6311,7 @@ checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -6588,7 +6588,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ "proc-macro2 1.0.78", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -6791,7 +6791,7 @@ dependencies = [ "prost 0.12.3", "prost-types 0.12.3", "regex", - "syn 2.0.49", + "syn 2.0.50", "tempfile", "which 4.4.2", ] @@ -6819,7 +6819,7 @@ dependencies = [ "itertools 0.11.0", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -7603,7 +7603,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.0", - "syn 2.0.49", + "syn 2.0.50", "unicode-ident", ] @@ -7998,7 +7998,7 @@ checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -8009,7 +8009,7 @@ checksum = "330f01ce65a3a5fe59a60c82f3c9a024b573b8a6e875bd233fe5f934e71d54e3" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -8071,7 +8071,7 @@ checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -8144,7 +8144,7 @@ dependencies = [ "darling 0.20.6", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -8423,7 +8423,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -8613,7 +8613,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "rustversion", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -8626,7 +8626,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "rustversion", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -8669,9 +8669,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.49" +version = "2.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "915aea9e586f80826ee59f8453c1101f9d1c4b3964cd2460185ee8e299ada496" +checksum = "74f1bdc9872430ce9b75da68329d1c1746faf50ffac5f19e02b71e37ff881ffb" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", @@ -8687,7 +8687,7 @@ dependencies = [ "proc-macro-error", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -8855,7 +8855,7 @@ checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -9003,7 +9003,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -9239,7 +9239,7 @@ dependencies = [ "proc-macro2 1.0.78", "prost-build 0.12.3", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -9342,7 +9342,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -9576,7 +9576,7 @@ checksum = "f03ca4cb38206e2bef0700092660bb74d696f808514dae47fa1467cbfe26e96e" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -9606,7 +9606,7 @@ checksum = "291db8a81af4840c10d636e047cac67664e343be44e24dfdbd1492df9a5d3390" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] @@ -10164,7 +10164,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_json", - "syn 2.0.49", + "syn 2.0.50", "tracing 0.1.40", ] @@ -10177,7 +10177,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_derive_internals", - "syn 2.0.49", + "syn 2.0.50", "vector-config", "vector-config-common", ] @@ -10585,7 +10585,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", "wasm-bindgen-shared", ] @@ -10619,7 +10619,7 @@ checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11064,7 +11064,7 @@ checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.49", + "syn 2.0.50", ] [[package]] From 23ffe8812cd7df603cf3cf310773ee356c96c002 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 21 Feb 2024 23:49:13 +0000 Subject: [PATCH 0048/1491] chore(ci): Bump myrotvorets/set-commit-status-action from 2.0.0 to 2.0.1 (#19924) Bumps [myrotvorets/set-commit-status-action](https://github.com/myrotvorets/set-commit-status-action) from 2.0.0 to 2.0.1. - [Release notes](https://github.com/myrotvorets/set-commit-status-action/releases) - [Commits](https://github.com/myrotvorets/set-commit-status-action/compare/v2.0.0...v2.0.1) --- updated-dependencies: - dependency-name: myrotvorets/set-commit-status-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/cli.yml | 4 ++-- .github/workflows/component_features.yml | 4 ++-- .github/workflows/cross.yml | 6 +++--- .github/workflows/deny.yml | 4 ++-- .github/workflows/environment.yml | 4 ++-- .github/workflows/install-sh.yml | 6 +++--- .github/workflows/integration-comment.yml | 4 ++-- .github/workflows/k8s_e2e.yml | 8 ++++---- .github/workflows/misc.yml | 4 ++-- .github/workflows/regression.yml | 6 +++--- .github/workflows/unit_mac.yml | 4 ++-- .github/workflows/unit_windows.yml | 4 ++-- 12 files changed, 29 insertions(+), 29 deletions(-) diff --git a/.github/workflows/cli.yml b/.github/workflows/cli.yml index a75bce8a84d03..0ac6fb3070e02 100644 --- a/.github/workflows/cli.yml +++ b/.github/workflows/cli.yml @@ -17,7 +17,7 @@ jobs: - name: (PR comment) Set latest commit status as pending if: ${{ github.event_name == 'issue_comment' }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} @@ -55,7 +55,7 @@ jobs: if: always() - name: (PR comment) Set latest commit status as ${{ job.status }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 if: always() && github.event_name == 'issue_comment' with: sha: ${{ steps.comment-branch.outputs.head_sha }} diff --git a/.github/workflows/component_features.yml b/.github/workflows/component_features.yml index c441d4bf374d6..41ff4cff96025 100644 --- a/.github/workflows/component_features.yml +++ b/.github/workflows/component_features.yml @@ -29,7 +29,7 @@ jobs: - name: (PR comment) Set latest commit status as pending if: github.event_name == 'issue_comment' - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} @@ -53,7 +53,7 @@ jobs: - name: (PR comment) Set latest commit status as ${{ job.status }} if: always() && github.event_name == 'issue_comment' - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/cross.yml b/.github/workflows/cross.yml index cbac1658e3d7b..20878c8c7d9f9 100644 --- a/.github/workflows/cross.yml +++ b/.github/workflows/cross.yml @@ -28,7 +28,7 @@ jobs: - name: (PR comment) Set latest commit status as pending if: ${{ github.event_name == 'issue_comment' }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} @@ -69,7 +69,7 @@ jobs: path: "./target/${{ matrix.target }}/debug/vector" - name: (PR comment) Set latest commit status as failed - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 if: failure() && github.event_name == 'issue_comment' with: sha: ${{ steps.comment-branch.outputs.head_sha }} @@ -89,7 +89,7 @@ jobs: id: comment-branch - name: (PR comment) Submit PR result as success - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/deny.yml b/.github/workflows/deny.yml index e42d8bedba666..d3cbc15304e20 100644 --- a/.github/workflows/deny.yml +++ b/.github/workflows/deny.yml @@ -30,7 +30,7 @@ jobs: - name: (PR comment) Set latest commit status as pending if: ${{ github.event_name == 'issue_comment' }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} @@ -66,7 +66,7 @@ jobs: run: make check-deny - name: (PR comment) Set latest commit status as ${{ job.status }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 if: always() && github.event_name == 'issue_comment' with: sha: ${{ steps.comment-branch.outputs.head_sha }} diff --git a/.github/workflows/environment.yml b/.github/workflows/environment.yml index 671e11e7538d7..20d1a7941724a 100644 --- a/.github/workflows/environment.yml +++ b/.github/workflows/environment.yml @@ -23,7 +23,7 @@ jobs: - name: (PR comment) Set latest commit status as pending if: ${{ github.event_name == 'issue_comment' }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} @@ -73,7 +73,7 @@ jobs: labels: ${{ steps.meta.outputs.labels }} - name: (PR comment) Set latest commit status as ${{ job.status }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 if: always() && github.event_name == 'issue_comment' with: sha: ${{ steps.comment-branch.outputs.head_sha }} diff --git a/.github/workflows/install-sh.yml b/.github/workflows/install-sh.yml index 9b36e527bc530..edc14cd22012b 100644 --- a/.github/workflows/install-sh.yml +++ b/.github/workflows/install-sh.yml @@ -17,7 +17,7 @@ jobs: - name: (PR comment) Set latest commit status as pending if: ${{ github.event_name == 'issue_comment' }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} @@ -41,7 +41,7 @@ jobs: run: make sync-install - name: (PR comment) Set latest commit status as failed - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 if: failure() && github.event_name == 'issue_comment' with: sha: ${{ steps.comment-branch.outputs.head_sha }} @@ -65,7 +65,7 @@ jobs: - name: (PR comment) Set latest commit status as ${{ job.status }} if: github.event_name == 'issue_comment' - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/integration-comment.yml b/.github/workflows/integration-comment.yml index 8cad0cc96049e..f5d0324837f28 100644 --- a/.github/workflows/integration-comment.yml +++ b/.github/workflows/integration-comment.yml @@ -74,7 +74,7 @@ jobs: id: comment-branch - name: (PR comment) Set latest commit status as pending - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} @@ -523,7 +523,7 @@ jobs: - name: (PR comment) Submit PR result as success if: github.event_name == 'issue_comment' && env.FAILED != 'true' - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/k8s_e2e.yml b/.github/workflows/k8s_e2e.yml index fd60fb6d74ffa..6eb6624268e12 100644 --- a/.github/workflows/k8s_e2e.yml +++ b/.github/workflows/k8s_e2e.yml @@ -77,7 +77,7 @@ jobs: - name: (PR comment) Set latest commit status as pending if: ${{ github.event_name == 'issue_comment' }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} @@ -111,7 +111,7 @@ jobs: path: target/artifacts/* - name: (PR comment) Set latest commit status as 'failure' - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 if: failure() && github.event_name == 'issue_comment' with: sha: ${{ steps.comment-branch.outputs.head_sha }} @@ -226,7 +226,7 @@ jobs: CARGO_INCREMENTAL: 0 - name: (PR comment) Set latest commit status as failure - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 if: failure() && github.event_name == 'issue_comment' with: sha: ${{ steps.comment-branch.outputs.head_sha }} @@ -253,7 +253,7 @@ jobs: - name: (PR comment) Submit PR result as success if: github.event_name == 'issue_comment' && env.FAILED != 'true' - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/misc.yml b/.github/workflows/misc.yml index 92aa816baacd3..887c1895df18d 100644 --- a/.github/workflows/misc.yml +++ b/.github/workflows/misc.yml @@ -17,7 +17,7 @@ jobs: - name: (PR comment) Set latest commit status as pending if: ${{ github.event_name == 'issue_comment' }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} @@ -54,7 +54,7 @@ jobs: - run: make test-docs - name: (PR comment) Set latest commit status as ${{ job.status }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 if: always() && github.event_name == 'issue_comment' with: sha: ${{ steps.comment-branch.outputs.head_sha }} diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index a6156c0d09df4..86c5c148a1b76 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -275,7 +275,7 @@ jobs: - name: (PR comment) Set latest commit status as pending if: ${{ github.event_name == 'issue_comment' }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.pr-metadata-comment.outputs.COMPARISON_SHA }} token: ${{ secrets.GITHUB_TOKEN }} @@ -814,7 +814,7 @@ jobs: - name: (PR comment) Submit PR result as failed if: github.event_name == 'issue_comment' && env.FAILED == 'true' - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} @@ -823,7 +823,7 @@ jobs: - name: (PR comment) Submit PR result as success if: github.event_name == 'issue_comment' && env.FAILED != 'true' - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/unit_mac.yml b/.github/workflows/unit_mac.yml index 3b42fe12e7c45..0b4281b22cc7a 100644 --- a/.github/workflows/unit_mac.yml +++ b/.github/workflows/unit_mac.yml @@ -18,7 +18,7 @@ jobs: - name: (PR comment) Set latest commit status as pending if: ${{ github.event_name == 'issue_comment' }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} @@ -54,7 +54,7 @@ jobs: - run: make test-behavior - name: (PR comment) Set latest commit status as ${{ job.status }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 if: always() && github.event_name == 'issue_comment' with: sha: ${{ steps.comment-branch.outputs.head_sha }} diff --git a/.github/workflows/unit_windows.yml b/.github/workflows/unit_windows.yml index a06bccb1743d9..9bf6ac453aeec 100644 --- a/.github/workflows/unit_windows.yml +++ b/.github/workflows/unit_windows.yml @@ -16,7 +16,7 @@ jobs: - name: (PR comment) Set latest commit status as pending if: ${{ github.event_name == 'issue_comment' }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 with: sha: ${{ steps.comment-branch.outputs.head_sha }} token: ${{ secrets.GITHUB_TOKEN }} @@ -37,7 +37,7 @@ jobs: - run: make test - name: (PR comment) Set latest commit status as ${{ job.status }} - uses: myrotvorets/set-commit-status-action@v2.0.0 + uses: myrotvorets/set-commit-status-action@v2.0.1 if: always() && github.event_name == 'issue_comment' with: sha: ${{ steps.comment-branch.outputs.head_sha }} From a68a0b5c6a1ddd33682b578163727403dd9ef296 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Wed, 21 Feb 2024 16:16:12 -0800 Subject: [PATCH 0049/1491] =?UTF-8?q?chore(dev):=20Update=20CONTRIBUTING.m?= =?UTF-8?q?d=20docs=20regarding=20how=20to=20have=20website=E2=80=A6=20(#1?= =?UTF-8?q?9926)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit chore(dev): Update CONTRIBUTING.md docs regarding how to have website preview created To reduce the number of branch deploys since we have a hard cap on them in Amplify, restrict preview builds to only branches with `website` in the name. Because Amplify automatically picks up created branches, the only mechanism we seem to have to be able to filter them out at the moment is the name. Signed-off-by: Jesse Szwedko --- CONTRIBUTING.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 1815eef06c44b..5e6ad4ef00406 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -102,7 +102,11 @@ outputs to reference the filter, and finally update the outputs of `workflow_cal ### Git Branches _All_ changes must be made in a branch and submitted as [pull requests](#github-pull-requests). -Vector does not adopt any type of branch naming style, but please use something + +If you want your branch to have a website preview build created, include the word `website` in the +branch. + +Otherwise, Vector does not adopt any type of branch naming style, but please use something descriptive of your changes. ### Git Commits From 3f59886a39321570e459ba65469d933a968876f2 Mon Sep 17 00:00:00 2001 From: Harold Dost Date: Thu, 22 Feb 2024 11:21:04 +0800 Subject: [PATCH 0050/1491] docs: Add pre-requisite for vdev (#19668) While there's no direct dependency shown, it will fail when you attempt to run the initial tests. This will be more direct since tests currently will complain about nextest missing. Co-authored-by: Jesse Szwedko --- vdev/README.md | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/vdev/README.md b/vdev/README.md index 88dc6c9bed54c..befaefaa6f556 100644 --- a/vdev/README.md +++ b/vdev/README.md @@ -13,6 +13,12 @@ Table of Contents: - [CLI](#cli) - [Running Tests](#running-tests) +## Pre-requisites + +This assumes that you have the following tools installed: + +- cargo-nextest - https://nexte.st/ + ## Installation Run the following command from the root of the Vector repository: From 695f847d1711923261acdec0ad029185c7826521 Mon Sep 17 00:00:00 2001 From: Sebastian Tia <75666019+sebastiantia@users.noreply.github.com> Date: Thu, 22 Feb 2024 10:52:44 -0500 Subject: [PATCH 0051/1491] chore(tests): expose test utilities (#19894) * annotate needed funcs * spelling mistake * update doc and include additional helper func * helper func * replace with original unwrap --- src/sinks/http/tests.rs | 42 +++++++++---------------------- src/sinks/util/mod.rs | 2 +- src/sinks/util/test.rs | 50 ++++++++++++++++++++++++++++++++++--- src/test_util/components.rs | 9 +++++++ 4 files changed, 68 insertions(+), 35 deletions(-) diff --git a/src/sinks/http/tests.rs b/src/sinks/http/tests.rs index 790cb9b602d27..363877380c308 100644 --- a/src/sinks/http/tests.rs +++ b/src/sinks/http/tests.rs @@ -1,15 +1,11 @@ //! Unit tests for the `http` sink. -use std::{ - io::{BufRead, BufReader}, - sync::{atomic, Arc}, -}; +use std::sync::{atomic, Arc}; use bytes::{Buf, Bytes}; use flate2::{read::MultiGzDecoder, read::ZlibDecoder}; -use futures::{channel::mpsc, stream}; +use futures::stream; use headers::{Authorization, HeaderMapExt}; -use http::request::Parts; use hyper::{Body, Method, Response, StatusCode}; use serde::{de, Deserialize}; use vector_lib::codecs::{ @@ -27,12 +23,14 @@ use crate::{ util::{ encoding::Encoder as _, http::HeaderValidationError, - test::{build_test_server, build_test_server_generic, build_test_server_status}, + test::{ + build_test_server, build_test_server_generic, build_test_server_status, + get_received_gzip, + }, }, }, test_util::{ - components, - components::{COMPONENT_ERROR_TAGS, HTTP_SINK_TAGS}, + components::{self, COMPONENT_ERROR_TAGS, HTTP_SINK_TAGS}, next_addr, random_lines_with_stream, }, }; @@ -290,7 +288,7 @@ async fn retries_on_no_connection() { assert_eq!(receiver.try_recv(), Ok(BatchStatus::Delivered)); - let output_lines = get_received(rx, |parts| { + let output_lines = get_received_gzip(rx, |parts| { assert_eq!(Method::POST, parts.method); assert_eq!("/frames", parts.uri.path()); }) @@ -336,7 +334,7 @@ async fn retries_on_temporary_error() { assert_eq!(receiver.try_recv(), Ok(BatchStatus::Delivered)); - let output_lines = get_received(rx, |parts| { + let output_lines = get_received_gzip(rx, |parts| { assert_eq!(Method::POST, parts.method); assert_eq!("/frames", parts.uri.path()); }) @@ -370,7 +368,8 @@ async fn fails_on_permanent_error() { assert_eq!(receiver.try_recv(), Ok(BatchStatus::Rejected)); - let output_lines = get_received(rx, |_| unreachable!("There should be no lines")).await; + let output_lines = + get_received_gzip(rx, |_| unreachable!("There should be no lines")).await; assert!(output_lines.is_empty()); }) .await; @@ -541,23 +540,6 @@ where } } -async fn get_received( - rx: mpsc::Receiver<(Parts, Bytes)>, - assert_parts: impl Fn(Parts), -) -> Vec { - rx.flat_map(|(parts, body)| { - assert_parts(parts); - stream::iter(BufReader::new(MultiGzDecoder::new(body.reader())).lines()) - }) - .map(Result::unwrap) - .map(|line| { - let val: serde_json::Value = serde_json::from_str(&line).unwrap(); - val.get("message").unwrap().as_str().unwrap().to_owned() - }) - .collect::>() - .await -} - async fn run_sink(extra_config: &str, assert_parts: impl Fn(http::request::Parts)) { let num_lines = 1000; @@ -573,7 +555,7 @@ async fn run_sink(extra_config: &str, assert_parts: impl Fn(http::request::Parts assert_eq!(receiver.try_recv(), Ok(BatchStatus::Delivered)); - let output_lines = get_received(rx, assert_parts).await; + let output_lines = get_received_gzip(rx, assert_parts).await; assert_eq!(num_lines, output_lines.len()); assert_eq!(input_lines, output_lines); diff --git a/src/sinks/util/mod.rs b/src/sinks/util/mod.rs index 464401d38fdcd..c71b49163f0d2 100644 --- a/src/sinks/util/mod.rs +++ b/src/sinks/util/mod.rs @@ -20,7 +20,7 @@ pub mod snappy; pub mod socket_bytes_sink; pub mod statistic; pub mod tcp; -#[cfg(test)] +#[cfg(any(test, feature = "test-utils"))] pub mod test; pub mod udp; #[cfg(all(any(feature = "sinks-socket", feature = "sinks-statsd"), unix))] diff --git a/src/sinks/util/test.rs b/src/sinks/util/test.rs index ed8a4bbd5adca..c029ce8d7481a 100644 --- a/src/sinks/util/test.rs +++ b/src/sinks/util/test.rs @@ -1,13 +1,18 @@ -use std::net::SocketAddr; - -use bytes::Bytes; -use futures::{channel::mpsc, FutureExt, SinkExt, TryFutureExt}; +use bytes::{Buf, Bytes}; +use flate2::read::{MultiGzDecoder, ZlibDecoder}; +use futures::{channel::mpsc, stream, FutureExt, SinkExt, TryFutureExt}; +use futures_util::StreamExt; +use http::request::Parts; use hyper::{ body::HttpBody, service::{make_service_fn, service_fn}, Body, Request, Response, Server, StatusCode, }; use serde::Deserialize; +use std::{ + io::{BufRead, BufReader}, + net::SocketAddr, +}; use stream_cancel::{Trigger, Tripwire}; use crate::{ @@ -106,3 +111,40 @@ where (rx, trigger, server) } + +pub async fn get_received_gzip( + rx: mpsc::Receiver<(Parts, Bytes)>, + assert_parts: impl Fn(Parts), +) -> Vec { + get_received(rx, assert_parts, |body| MultiGzDecoder::new(body.reader())).await +} + +pub async fn get_received_zlib( + rx: mpsc::Receiver<(Parts, Bytes)>, + assert_parts: impl Fn(Parts), +) -> Vec { + get_received(rx, assert_parts, |body| ZlibDecoder::new(body.reader())).await +} + +async fn get_received( + rx: mpsc::Receiver<(Parts, Bytes)>, + assert_parts: impl Fn(Parts), + decoder_maker: impl Fn(Bytes) -> D, +) -> Vec +where + D: std::io::Read, +{ + rx.flat_map(|(parts, body)| { + assert_parts(parts); + let decoder = decoder_maker(body); + let reader = BufReader::new(decoder); + stream::iter(reader.lines()) + }) + .map(Result::unwrap) + .map(|line| { + let val: serde_json::Value = serde_json::from_str(&line).unwrap(); + val.get("message").unwrap().as_str().unwrap().to_owned() + }) + .collect::>() + .await +} diff --git a/src/test_util/components.rs b/src/test_util/components.rs index 8b4cdf46158a6..7fff28c272c3b 100644 --- a/src/test_util/components.rs +++ b/src/test_util/components.rs @@ -53,6 +53,7 @@ pub const FILE_SOURCE_TAGS: [&str; 1] = ["file"]; /// The most basic set of tags for sinks, regardless of whether or not they push data or have it pulled out. pub const SINK_TAGS: [&str; 1] = ["protocol"]; +/// The set of tags for sinks measuring data volume with source and service identification. pub const DATA_VOLUME_SINK_TAGS: [&str; 2] = ["source", "service"]; /// The standard set of tags for all sinks that write a file. @@ -117,6 +118,7 @@ pub static SINK_TESTS: Lazy = Lazy::new(|| { } }); +/// The component test specification for sinks with source and service identification. pub static DATA_VOLUME_SINK_TESTS: Lazy = Lazy::new(|| { ComponentTests { events: &["BytesSent", "EventsSent"], // EventsReceived is emitted in the topology @@ -339,6 +341,7 @@ where run_and_assert_source_advanced(source, setup, timeout, event_count, &SOURCE_TESTS, tags).await } +/// Runs and asserts source test specifications with configurations. pub async fn run_and_assert_source_advanced( source: SC, setup: impl FnOnce(&mut SourceContext), @@ -407,6 +410,7 @@ where .await } +/// Runs and asserts compliance for transforms. pub async fn assert_transform_compliance(f: impl Future) -> T { init_test(); @@ -428,6 +432,7 @@ pub async fn assert_sink_compliance(tags: &[&str], f: impl Future result } +/// Runs and asserts sink compliance. pub async fn run_and_assert_sink_compliance(sink: VectorSink, events: S, tags: &[&str]) where S: Stream + Send, @@ -451,6 +456,7 @@ pub async fn assert_data_volume_sink_compliance(tags: &[&str], f: impl Future result } +/// Runs and asserts compliance for data volume sink tests. pub async fn run_and_assert_data_volume_sink_compliance( sink: VectorSink, events: S, @@ -466,6 +472,7 @@ pub async fn run_and_assert_data_volume_sink_compliance( .await; } +/// Asserts compliance for nonsending sink tests. pub async fn assert_nonsending_sink_compliance(tags: &[&str], f: impl Future) -> T { init_test(); @@ -476,6 +483,7 @@ pub async fn assert_nonsending_sink_compliance(tags: &[&str], f: impl Future< result } +/// Runs and asserts compliance for nonsending sink tests. pub async fn run_and_assert_nonsending_sink_compliance( sink: VectorSink, events: S, @@ -502,6 +510,7 @@ pub async fn assert_sink_error(tags: &[&str], f: impl Future) -> result } +/// Runs and asserts sink error compliance. pub async fn run_and_assert_sink_error(sink: VectorSink, events: S, tags: &[&str]) where S: Stream + Send, From a6da1d8f4357513161520ae4c9fac96859d7de24 Mon Sep 17 00:00:00 2001 From: neuronull Date: Thu, 22 Feb 2024 09:47:41 -0700 Subject: [PATCH 0052/1491] feat(component validation): add sink error path validation + multi config (#18062) * add fix and small refactor * fix compilation errors * 3 ticks * dont compute expected metrics in validator * cleanup * cleanup * clippy * feedback tz: sent_eventssssss * feedback tz: fix telemetry shutdown finishing logic * 3 ticks * small reorg to add sinks * mini refactor of the component spec validators * attempt to set expected values from the resource * feedback tz- from not try_from * back to 3 ticks * fix incorrect expected values * Even more reduction * clippy * add the discarded events total check * workaround the new sync issues * multi config support * cleanup * check events * partial feedback * thought i removed that * use ref * feedback: dont introduce PassThroughFail variant * feedback: adjust enum variant names for clarity * feedback: no idea what I was thinking with `input_codec` * spell check * fr * feedback- update docs --- src/components/validation/mod.rs | 100 ++++++++++++++---- src/components/validation/resources/event.rs | 73 +++++++++---- src/components/validation/runner/config.rs | 16 ++- src/components/validation/runner/mod.rs | 42 +++++--- src/components/validation/test_case.rs | 1 + .../validators/component_spec/mod.rs | 1 + src/sinks/http/config.rs | 98 ++++++++++++----- src/sources/http_client/client.rs | 9 +- src/sources/http_server.rs | 9 +- tests/validation/components/sinks/http.yaml | 8 ++ .../components/sources/http_client.yaml | 3 +- .../components/sources/http_server.yaml | 3 +- 12 files changed, 277 insertions(+), 86 deletions(-) diff --git a/src/components/validation/mod.rs b/src/components/validation/mod.rs index f1ef68609e59c..d195b48ea99b0 100644 --- a/src/components/validation/mod.rs +++ b/src/components/validation/mod.rs @@ -57,6 +57,51 @@ pub enum ComponentConfiguration { Sink(BoxedSink), } +/// Component configuration for a test case. +#[derive(Clone)] +pub struct ComponentTestCaseConfig { + config: ComponentConfiguration, + /// If specified, this name must match the `config_name` field of at least one of the test case events. + test_case: Option, + external_resource: Option, +} + +impl ComponentTestCaseConfig { + pub fn from_source>( + config: C, + test_case: Option, + external_resource: Option, + ) -> Self { + Self { + config: ComponentConfiguration::Source(config.into()), + test_case, + external_resource, + } + } + pub fn from_transform>( + config: C, + test_case: Option, + external_resource: Option, + ) -> Self { + Self { + config: ComponentConfiguration::Transform(config.into()), + test_case, + external_resource, + } + } + pub fn from_sink>( + config: C, + test_case: Option, + external_resource: Option, + ) -> Self { + Self { + config: ComponentConfiguration::Sink(config.into()), + test_case, + external_resource, + } + } +} + /// Configuration for validating a component. /// /// This type encompasses all of the required information for configuring and validating a @@ -66,46 +111,45 @@ pub enum ComponentConfiguration { pub struct ValidationConfiguration { component_name: &'static str, component_type: ComponentType, - component_configuration: ComponentConfiguration, - external_resource: Option, + /// There may be only one `ComponentTestCaseConfig` necessary to execute all test cases, but some cases + /// require more advanced configuration in order to hit the code path desired. + component_configurations: Vec, } impl ValidationConfiguration { /// Creates a new `ValidationConfiguration` for a source. - pub fn from_source>( + pub fn from_source( component_name: &'static str, - config: C, - external_resource: Option, + component_configurations: Vec, ) -> Self { Self { component_name, component_type: ComponentType::Source, - component_configuration: ComponentConfiguration::Source(config.into()), - external_resource, + component_configurations, } } /// Creates a new `ValidationConfiguration` for a transform. - pub fn from_transform(component_name: &'static str, config: impl Into) -> Self { + pub fn from_transform( + component_name: &'static str, + component_configurations: Vec, + ) -> Self { Self { component_name, component_type: ComponentType::Transform, - component_configuration: ComponentConfiguration::Transform(config.into()), - external_resource: None, + component_configurations, } } /// Creates a new `ValidationConfiguration` for a sink. - pub fn from_sink>( + pub fn from_sink( component_name: &'static str, - config: C, - external_resource: Option, + component_configurations: Vec, ) -> Self { Self { component_name, component_type: ComponentType::Sink, - component_configuration: ComponentConfiguration::Sink(config.into()), - external_resource, + component_configurations, } } @@ -120,13 +164,31 @@ impl ValidationConfiguration { } /// Gets the configuration of the component. - pub fn component_configuration(&self) -> ComponentConfiguration { - self.component_configuration.clone() + pub fn component_configurations(&self) -> Vec { + self.component_configurations.clone() + } + + fn get_comp_test_case(&self, test_case: Option<&String>) -> Option { + let empty = String::from(""); + let test_case = test_case.unwrap_or(&empty); + self.component_configurations + .clone() + .into_iter() + .find(|c| c.test_case.as_ref().unwrap_or(&String::from("")) == test_case) + } + + /// Gets the configuration of the component. + pub fn component_configuration_for_test_case( + &self, + test_case: Option<&String>, + ) -> Option { + self.get_comp_test_case(test_case).map(|c| c.config) } /// Gets the external resource definition for validating the component, if any. - pub fn external_resource(&self) -> Option { - self.external_resource.clone() + pub fn external_resource(&self, test_case: Option<&String>) -> Option { + self.get_comp_test_case(test_case) + .and_then(|c| c.external_resource) } } diff --git a/src/components/validation/resources/event.rs b/src/components/validation/resources/event.rs index 96870f6b773af..343466ee9e51c 100644 --- a/src/components/validation/resources/event.rs +++ b/src/components/validation/resources/event.rs @@ -1,5 +1,6 @@ use bytes::BytesMut; use serde::Deserialize; +use serde_json::Value; use snafu::Snafu; use tokio_util::codec::Encoder as _; @@ -27,7 +28,19 @@ pub enum RawTestEvent { /// /// For transforms and sinks, generally, the only way to cause an error is if the event itself /// is malformed in some way, which can be achieved without this test event variant. - Modified { modified: bool, event: EventData }, + AlternateEncoder { fail_encoding_of: EventData }, + + /// The event is created, and the specified field is added to it. + /// + /// This allows the ability to hit code paths where some codecs require specific fields to be of specific + /// types, thus allowing us to encode into the input runner without error, but encoding in the component + /// under test can be set up to fail. + WithField { + event: EventData, + name: String, + value: Value, + fail: Option, + }, } #[derive(Clone, Debug, Deserialize)] @@ -52,6 +65,9 @@ impl EventData { /// metrics collection is based on the same event. Namely, one issue that can arise from creating the event /// from the event data twice (once for the expected and once for actual), it can result in a timestamp in /// the event which may or may not have the same millisecond precision as it's counterpart. +/// +/// For transforms and sinks, generally, the only way to cause an error is if the event itself +/// is malformed in some way, which can be achieved without this test event variant. #[derive(Clone, Debug, Deserialize)] #[serde(from = "RawTestEvent")] #[serde(untagged)] @@ -59,16 +75,16 @@ pub enum TestEvent { /// The event is used, as-is, without modification. Passthrough(Event), - /// The event is potentially modified by the external resource. - /// - /// The modification made is dependent on the external resource, but this mode is made available - /// for when a test case wants to exercise the failure path, but cannot cause a failure simply - /// by constructing the event in a certain way i.e. adding an invalid field, or removing a - /// required field, or using an invalid field value, and so on. + /// The event is encoded using an encoding that differs from the component's + /// configured encoding, which should cause an error when the event is decoded. + FailWithAlternateEncoder(Event), + + /// The event has an additional field injected prior to encoding, which should cause + /// an error when the event is decoded. /// - /// For transforms and sinks, generally, the only way to cause an error is if the event itself - /// is malformed in some way, which can be achieved without this test event variant. - Modified { modified: bool, event: Event }, + /// This is useful for testing encodings that have strict schemas and cannot + /// handle arbitrary fields or differing data types for certain fields. + FailWithInjectedField(Event), } impl TestEvent { @@ -76,21 +92,25 @@ impl TestEvent { pub fn into_event(self) -> Event { match self { Self::Passthrough(event) => event, - Self::Modified { event, .. } => event, + Self::FailWithAlternateEncoder(event) => event, + Self::FailWithInjectedField(event) => event, } } pub fn get_event(&mut self) -> &mut Event { match self { Self::Passthrough(event) => event, - Self::Modified { event, .. } => event, + Self::FailWithAlternateEncoder(event) => event, + Self::FailWithInjectedField(event) => event, } } + /// (should_fail, event) pub fn get(self) -> (bool, Event) { match self { Self::Passthrough(event) => (false, event), - Self::Modified { modified, event } => (modified, event), + Self::FailWithAlternateEncoder(event) => (true, event), + Self::FailWithInjectedField(event) => (true, event), } } } @@ -104,10 +124,25 @@ impl From for TestEvent { RawTestEvent::Passthrough(event_data) => { TestEvent::Passthrough(event_data.into_event()) } - RawTestEvent::Modified { modified, event } => TestEvent::Modified { - modified, - event: event.into_event(), - }, + RawTestEvent::AlternateEncoder { + fail_encoding_of: event_data, + } => TestEvent::FailWithAlternateEncoder(event_data.into_event()), + RawTestEvent::WithField { + event, + name, + value, + fail, + } => { + let mut event = event.into_event(); + let log_event = event.as_mut_log(); + log_event.insert(name.as_str(), value); + + if fail.unwrap_or_default() { + TestEvent::FailWithInjectedField(event) + } else { + TestEvent::Passthrough(event) + } + } } } } @@ -118,13 +153,13 @@ pub fn encode_test_event( event: TestEvent, ) { match event { - TestEvent::Passthrough(event) => { + TestEvent::Passthrough(event) | TestEvent::FailWithInjectedField(event) => { // Encode the event normally. encoder .encode(event, buf) .expect("should not fail to encode input event"); } - TestEvent::Modified { event, .. } => { + TestEvent::FailWithAlternateEncoder(event) => { // This is a little fragile, but we check what serializer this encoder uses, and based // on `Serializer::supports_json`, we choose an opposing codec. For example, if the // encoder supports JSON, we'll use a serializer that doesn't support JSON, and vise diff --git a/src/components/validation/runner/config.rs b/src/components/validation/runner/config.rs index ed721fea929f0..f2adb90765ce1 100644 --- a/src/components/validation/runner/config.rs +++ b/src/components/validation/runner/config.rs @@ -24,9 +24,17 @@ pub struct TopologyBuilder { impl TopologyBuilder { /// Creates a component topology for the given component configuration. - pub fn from_configuration(configuration: &ValidationConfiguration) -> Self { - let component_configuration = configuration.component_configuration(); - match component_configuration { + pub fn from_configuration( + configuration: &ValidationConfiguration, + config_name: Option<&String>, + ) -> Result { + let component_configuration = configuration + .component_configuration_for_test_case(config_name) + .ok_or(format!( + "No test case name defined for configuration {:?}.", + config_name + ))?; + Ok(match component_configuration { ComponentConfiguration::Source(source) => { debug_assert_eq!(configuration.component_type(), ComponentType::Source); Self::from_source(source) @@ -39,7 +47,7 @@ impl TopologyBuilder { debug_assert_eq!(configuration.component_type(), ComponentType::Sink); Self::from_sink(sink) } - } + }) } /// Creates a component topology for validating a source. diff --git a/src/components/validation/runner/mod.rs b/src/components/validation/runner/mod.rs index 5014edc5aaa40..91048a1e4438e 100644 --- a/src/components/validation/runner/mod.rs +++ b/src/components/validation/runner/mod.rs @@ -225,7 +225,10 @@ impl Runner { // We then finalize the topology builder to get our actual `ConfigBuilder`, as well as // any controlled edges (channel sender/receiver to the aforementioned filler // components) and a telemetry client for collecting internal telemetry. - let topology_builder = TopologyBuilder::from_configuration(&self.configuration); + let topology_builder = TopologyBuilder::from_configuration( + &self.configuration, + test_case.config_name.as_ref(), + )?; let (config_builder, controlled_edges, telemetry_collector) = topology_builder .finalize( &input_task_coordinator, @@ -252,6 +255,7 @@ impl Runner { // controlled output edge, which means we then need a server task listening for the // events sent by that sink. let (runner_input, runner_output, maybe_runner_encoder) = build_external_resource( + test_case.config_name.as_ref(), &self.configuration, &input_task_coordinator, &output_task_coordinator, @@ -308,14 +312,14 @@ impl Runner { input_tx, &runner_metrics, maybe_runner_encoder.as_ref().cloned(), - self.configuration.component_type == ComponentType::Source, + self.configuration.component_type, ); let output_driver = spawn_output_driver( output_rx, &runner_metrics, maybe_runner_encoder.as_ref().cloned(), - self.configuration.component_type == ComponentType::Sink, + self.configuration.component_type, ); // At this point, the component topology is running, and all input/output/telemetry @@ -364,6 +368,7 @@ impl Runner { name: test_name, expectation, events: input_events, + .. } = test_case; let telemetry_events = telemetry_collector.collect().await; @@ -432,16 +437,21 @@ fn load_component_test_cases(test_case_data_path: PathBuf) -> Result, configuration: &ValidationConfiguration, input_task_coordinator: &TaskCoordinator, output_task_coordinator: &TaskCoordinator, runner_metrics: &Arc>, ) -> Result<(RunnerInput, RunnerOutput, Option>), vector_lib::Error> { let component_type = configuration.component_type(); - let maybe_external_resource = configuration.external_resource(); - let maybe_encoder = maybe_external_resource + let maybe_external_resource = configuration.external_resource(test_case); + + let resource_codec = maybe_external_resource .as_ref() - .map(|resource| resource.codec.into_encoder()); + .map(|resource| resource.codec.clone()); + + let maybe_encoder = resource_codec.as_ref().map(|codec| codec.into_encoder()); + match component_type { ComponentType::Source => { // As an external resource for a source, we create a channel that the validation runner @@ -535,7 +545,7 @@ fn spawn_input_driver( input_tx: Sender, runner_metrics: &Arc>, mut maybe_encoder: Option>, - is_source: bool, + component_type: ComponentType, ) -> JoinHandle<()> { let input_runner_metrics = Arc::clone(runner_metrics); @@ -556,13 +566,13 @@ fn spawn_input_driver( // the controlled edge (vector source) adds metadata to the event when it is received. // thus we need to add it here so the expected values for the comparisons on transforms // and sinks are accurate. - if !is_source { + if component_type != ComponentType::Source { if let Event::Log(ref mut log) = input_event.get_event() { log_namespace.insert_standard_vector_source_metadata(log, "vector", now); } } - let (modified, event) = input_event.clone().get(); + let (failure_case, event) = input_event.clone().get(); if let Some(encoder) = maybe_encoder.as_mut() { let mut buffer = BytesMut::new(); @@ -572,9 +582,15 @@ fn spawn_input_driver( } // account for failure case - if modified { + if failure_case { input_runner_metrics.errors_total += 1; - } else { + // TODO: this assumption may need to be made configurable at some point + if component_type == ComponentType::Sink { + input_runner_metrics.discarded_events_total += 1; + } + } + + if !failure_case || component_type == ComponentType::Sink { input_runner_metrics.sent_events_total += 1; // The event is wrapped in a Vec to match the actual event storage in @@ -591,7 +607,7 @@ fn spawn_output_driver( mut output_rx: Receiver>, runner_metrics: &Arc>, maybe_encoder: Option>, - is_sink: bool, + component_type: ComponentType, ) -> JoinHandle> { let output_runner_metrics = Arc::clone(runner_metrics); @@ -605,7 +621,7 @@ fn spawn_output_driver( let mut output_runner_metrics = output_runner_metrics.lock().await; for output_event in events { - if !is_sink { + if component_type != ComponentType::Sink { // The event is wrapped in a Vec to match the actual event storage in // the real topology output_runner_metrics.received_event_bytes_total += diff --git a/src/components/validation/test_case.rs b/src/components/validation/test_case.rs index 360c6513161b0..a281f6e5883f0 100644 --- a/src/components/validation/test_case.rs +++ b/src/components/validation/test_case.rs @@ -26,6 +26,7 @@ pub enum TestCaseExpectation { #[derive(Deserialize)] pub struct TestCase { pub name: String, + pub config_name: Option, pub expectation: TestCaseExpectation, pub events: Vec, } diff --git a/src/components/validation/validators/component_spec/mod.rs b/src/components/validation/validators/component_spec/mod.rs index 799f27a394de3..3d055ae6993ce 100644 --- a/src/components/validation/validators/component_spec/mod.rs +++ b/src/components/validation/validators/component_spec/mod.rs @@ -204,6 +204,7 @@ fn filter_events_by_metric_and_component<'a>( }) .filter(|&m| { if m.name() == metric.to_string() { + info!("{}", m); if let Some(tags) = m.tags() { if tags.get("component_id").unwrap_or("") == component_id { return true; diff --git a/src/sinks/http/config.rs b/src/sinks/http/config.rs index cf6d7264fbe32..62ee534ef1e6f 100644 --- a/src/sinks/http/config.rs +++ b/src/sinks/http/config.rs @@ -5,11 +5,12 @@ use hyper::Body; use indexmap::IndexMap; use vector_lib::codecs::{ encoding::{Framer, Serializer}, - CharacterDelimitedEncoder, + CharacterDelimitedEncoder, GelfSerializerConfig, }; use crate::{ codecs::{EncodingConfigWithFraming, SinkType}, + components::validation::ComponentTestCaseConfig, http::{Auth, HttpClient, MaybeAuth}, sinks::{ prelude::*, @@ -311,33 +312,80 @@ impl ValidatableComponent for HttpSinkConfig { use std::str::FromStr; use vector_lib::codecs::{JsonSerializerConfig, MetricTagValues}; - let config = Self { - uri: UriSerde::from_str("http://127.0.0.1:9000/endpoint") - .expect("should never fail to parse"), - method: HttpMethod::Post, - encoding: EncodingConfigWithFraming::new( - None, - JsonSerializerConfig::new(MetricTagValues::Full).into(), - Transformer::default(), - ), - auth: None, - headers: None, - compression: Compression::default(), - batch: BatchConfig::default(), - request: RequestConfig::default(), - tls: None, - acknowledgements: AcknowledgementsConfig::default(), - payload_prefix: String::new(), - payload_suffix: String::new(), - }; + let happy_encoder = EncodingConfigWithFraming::new( + None, + JsonSerializerConfig::new(MetricTagValues::Full).into(), + Transformer::default(), + ); + + fn get_config(encoding: EncodingConfigWithFraming) -> HttpSinkConfig { + HttpSinkConfig { + uri: UriSerde::from_str("http://127.0.0.1:9000/endpoint") + .expect("should never fail to parse"), + method: HttpMethod::Post, + encoding, + auth: None, + headers: None, + compression: Compression::default(), + batch: BatchConfig::default(), + request: RequestConfig::default(), + tls: None, + acknowledgements: AcknowledgementsConfig::default(), + payload_prefix: String::new(), + payload_suffix: String::new(), + } + } - let external_resource = ExternalResource::new( - ResourceDirection::Push, - HttpResourceConfig::from_parts(config.uri.uri.clone(), Some(config.method.into())), - config.encoding.clone(), + fn get_external_resource( + config: &HttpSinkConfig, + encoding: Option, + ) -> ExternalResource { + ExternalResource::new( + ResourceDirection::Push, + HttpResourceConfig::from_parts(config.uri.uri.clone(), Some(config.method.into())), + if let Some(encoding) = encoding { + encoding + } else { + config.encoding.clone() + }, + ) + } + + let happy_config = get_config(happy_encoder.clone()); + + let happy_external_resource = get_external_resource(&happy_config, None); + + // this config uses the Gelf serializer, which requires the "level" field to + // be an integer + let sad_config = get_config(EncodingConfigWithFraming::new( + None, + GelfSerializerConfig::new().into(), + Transformer::default(), + )); + + let sad_external_resource = get_external_resource( + &happy_config, + // the external resource needs to use an encoder that actually works, in order to + // get the event into the topology successfully + Some(happy_encoder), ); - ValidationConfiguration::from_sink(Self::NAME, config, Some(external_resource)) + ValidationConfiguration::from_sink( + Self::NAME, + vec![ + ComponentTestCaseConfig::from_sink( + happy_config, + None, + Some(happy_external_resource), + ), + // this config only runs with the test case "encoding_error" in the yaml file. + ComponentTestCaseConfig::from_sink( + sad_config, + Some("encoding_error".to_owned()), + Some(sad_external_resource), + ), + ], + ) } } diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index 68304e40ca6f6..3093847904101 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -253,7 +253,14 @@ impl ValidatableComponent for HttpClientConfig { config.get_decoding_config(None), ); - ValidationConfiguration::from_source(Self::NAME, config, Some(external_resource)) + ValidationConfiguration::from_source( + Self::NAME, + vec![ComponentTestCaseConfig::from_source( + config, + None, + Some(external_resource), + )], + ) } } diff --git a/src/sources/http_server.rs b/src/sources/http_server.rs index a752358ad968c..255c4231d7bfd 100644 --- a/src/sources/http_server.rs +++ b/src/sources/http_server.rs @@ -292,7 +292,14 @@ impl ValidatableComponent for SimpleHttpConfig { .expect("should not fail to get decoding config"), ); - ValidationConfiguration::from_source(Self::NAME, config, Some(external_resource)) + ValidationConfiguration::from_source( + Self::NAME, + vec![ComponentTestCaseConfig::from_source( + config, + None, + Some(external_resource), + )], + ) } } diff --git a/tests/validation/components/sinks/http.yaml b/tests/validation/components/sinks/http.yaml index 7f7c18db35ad4..3d3525766d74f 100644 --- a/tests/validation/components/sinks/http.yaml +++ b/tests/validation/components/sinks/http.yaml @@ -4,3 +4,11 @@ - simple message 1 - simple message 2 - simple message 3 +- name: sad path + config_name: encoding_error + expectation: failure + events: + - event: simple message with the invalid data type for encoder + name: level + value: "1" + fail: true diff --git a/tests/validation/components/sources/http_client.yaml b/tests/validation/components/sources/http_client.yaml index 08424a9cde089..437a7d680b566 100644 --- a/tests/validation/components/sources/http_client.yaml +++ b/tests/validation/components/sources/http_client.yaml @@ -9,5 +9,4 @@ events: - simple message 1 - simple message 2 - - modified: true - event: simple message with the wrong encoding + - fail_encoding_of: simple message with the wrong encoding diff --git a/tests/validation/components/sources/http_server.yaml b/tests/validation/components/sources/http_server.yaml index 08424a9cde089..437a7d680b566 100644 --- a/tests/validation/components/sources/http_server.yaml +++ b/tests/validation/components/sources/http_server.yaml @@ -9,5 +9,4 @@ events: - simple message 1 - simple message 2 - - modified: true - event: simple message with the wrong encoding + - fail_encoding_of: simple message with the wrong encoding From bb1b8571070f38f7eee385dad92807249236d063 Mon Sep 17 00:00:00 2001 From: WarmSnowy <17583220+WarmSnowy@users.noreply.github.com> Date: Fri, 23 Feb 2024 22:46:24 +0800 Subject: [PATCH 0053/1491] feat(sources): Initial pulsar source (#18475) * feat(pulsar): initial Pulsar source implementation - WORK IN PROGRESS! - add initial Pulsar source config - add initial Pulsar consumer implementation Tested: - Tested locally simple scenario with receiving a message from a topic - it works! * feat(pulsar): Pulsar source hardening - implement message acknowledgment - implement proper shutdown - make consumer_name and subscription_name optional in config Tested: - Local run * fix: refactor pulsar events - move Pulsar events to the dedicated file Tested: - Local build * feat(pulsar): more features - enable compression algorithms in pulsar-rs crate - now pulsar source is able to decompress payloads - add batch size configuration Tested: - Local build * feat(pulsar): another bunch of features - add priority_level support - add dead letter queue policy support - add auth support Tested: - Local build * feat: add pulsar test - add pulsar source test (similar to pulsar sink test) Tested: - Local run * docs: add initial Pulsar source docs - add initial version of Pulsar source documentation. I am almost sure it's incomplete * fix: cue fmt * fix: clippy warns * feat: continue work on Pulsar source * feat: add new internal events * fix(pulsar-source): build errors Signed-off-by: tianyue * docs(pulsar): add example value for configure option Signed-off-by: tianyue * chore(pulsar source): move create_consumer into PulsarSourceConfig Signed-off-by: tianyue * docs(pulsar source): enriching the doc of codec option Signed-off-by: tianyue * chore(pulsar source): refactor with parse_message fuction Signed-off-by: tianyue * chore(pulsar source): add more metadata Signed-off-by: tianyue * chore(pulsar source): introduce PulsarEventsReceived Signed-off-by: tianyue * chore(pulsar source): optimize pulsar-integration-tests Signed-off-by: tianyue * chore(pulsar): Update .github/semantic.yml Signed-off-by: tianyue * chore(pulsar source): formatting the code Signed-off-by: tianyue * docs(pulsar source): fix typos Signed-off-by: WarmSnowy <17583220+WarmSnowy@users.noreply.github.com> * docs(pulsar source): convert whitespace to tab Signed-off-by: WarmSnowy <17583220+WarmSnowy@users.noreply.github.com> * docs(pulsar source): fix docs build error Signed-off-by: WarmSnowy <17583220+WarmSnowy@users.noreply.github.com> * chore(pulsar source): replace PulsarEventsReceived with EventsReceived Signed-off-by: WarmSnowy <17583220+WarmSnowy@users.noreply.github.com> * chore(pulsar source): replace error metrics with registered metrics Signed-off-by: WarmSnowy <17583220+WarmSnowy@users.noreply.github.com> * Applied feedback Signed-off-by: Stephen Wakely * Add pulsar to datadog service Signed-off-by: Stephen Wakely * Slight formatting Signed-off-by: Stephen Wakely * Clippy Signed-off-by: Stephen Wakely * cue fmt Signed-off-by: Jesse Szwedko * Feedback from Doug Signed-off-by: Stephen Wakely * Remove encode_errors_total Signed-off-by: Stephen Wakely * Update src/sources/pulsar.rs * Added to changelog Signed-off-by: Stephen Wakely * Spelling Signed-off-by: Stephen Wakely * Component docs Signed-off-by: Stephen Wakely --------- Signed-off-by: tianyue Signed-off-by: WarmSnowy <17583220+WarmSnowy@users.noreply.github.com> Signed-off-by: Stephen Wakely Signed-off-by: Jesse Szwedko Co-authored-by: Alexander Zaitsev Co-authored-by: Stephen Wakely Co-authored-by: Jesse Szwedko Co-authored-by: Doug Smith --- .github/semantic.yml | 1 + Cargo.toml | 4 +- changelog.d/pulsar_source.feature.md | 3 + src/internal_events/mod.rs | 4 +- src/internal_events/pulsar.rs | 84 ++- src/sinks/datadog/metrics/encoder.rs | 5 +- src/sources/mod.rs | 2 + src/sources/pulsar.rs | 609 ++++++++++++++++++ .../reference/configuration/sources/pulsar.md | 14 + .../components/sources/base/pulsar.cue | 443 +++++++++++++ .../reference/components/sources/pulsar.cue | 167 +++++ website/data/redirects.yaml | 1 + 12 files changed, 1328 insertions(+), 9 deletions(-) create mode 100644 changelog.d/pulsar_source.feature.md create mode 100644 src/sources/pulsar.rs create mode 100644 website/content/en/docs/reference/configuration/sources/pulsar.md create mode 100644 website/cue/reference/components/sources/base/pulsar.cue create mode 100644 website/cue/reference/components/sources/pulsar.cue diff --git a/.github/semantic.yml b/.github/semantic.yml index 84340954ea7ba..f25e763d2ec6e 100644 --- a/.github/semantic.yml +++ b/.github/semantic.yml @@ -158,6 +158,7 @@ scopes: - postgresql_metrics source # Anything `postgresql_metrics` source related - prometheus_remote_write source # Anything `prometheus_remote_write` source related - prometheus_scrape source # Anything `prometheus_scrape` source related + - pulsar source # Anything `pulsar` source related - redis source # Anything `redis` source related - socket source # Anything `socket` source related - splunk_hec source # Anything `splunk_hec` source related diff --git a/Cargo.toml b/Cargo.toml index e441d35a1a794..3281145fb04b8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -502,6 +502,7 @@ sources-logs = [ "sources-logstash", "sources-nats", "sources-opentelemetry", + "sources-pulsar", "sources-file-descriptor", "sources-redis", "sources-socket", @@ -559,6 +560,7 @@ sources-prometheus = ["sources-prometheus-scrape", "sources-prometheus-remote-wr sources-prometheus-scrape = ["sinks-prometheus", "sources-utils-http-client", "vector-lib/prometheus"] sources-prometheus-remote-write = ["sinks-prometheus", "sources-utils-http", "vector-lib/prometheus"] sources-prometheus-pushgateway = ["sinks-prometheus", "sources-utils-http", "vector-lib/prometheus"] +sources-pulsar = ["dep:apache-avro", "dep:pulsar"] sources-redis= ["dep:redis"] sources-socket = ["sources-utils-net", "tokio-util/net"] sources-splunk_hec = ["dep:roaring"] @@ -852,7 +854,7 @@ nginx-integration-tests = ["sources-nginx_metrics"] opentelemetry-integration-tests = ["sources-opentelemetry"] postgresql_metrics-integration-tests = ["sources-postgresql_metrics"] prometheus-integration-tests = ["sinks-prometheus", "sources-prometheus", "sinks-influxdb"] -pulsar-integration-tests = ["sinks-pulsar"] +pulsar-integration-tests = ["sinks-pulsar", "sources-pulsar"] redis-integration-tests = ["sinks-redis", "sources-redis"] splunk-integration-tests = ["sinks-splunk_hec"] dnstap-integration-tests = ["sources-dnstap", "dep:bollard"] diff --git a/changelog.d/pulsar_source.feature.md b/changelog.d/pulsar_source.feature.md new file mode 100644 index 0000000000000..f0697dc184460 --- /dev/null +++ b/changelog.d/pulsar_source.feature.md @@ -0,0 +1,3 @@ +A new source has been added that can receive logs from Apache Pulsar. + +authors: zamazan4ik WarmSnowy diff --git a/src/internal_events/mod.rs b/src/internal_events/mod.rs index 5bf373a87d2c6..1ca57638f20f4 100644 --- a/src/internal_events/mod.rs +++ b/src/internal_events/mod.rs @@ -102,7 +102,7 @@ mod process; feature = "sinks-prometheus" ))] mod prometheus; -#[cfg(feature = "sinks-pulsar")] +#[cfg(any(feature = "sinks-pulsar", feature = "sources-pulsar"))] mod pulsar; #[cfg(feature = "sources-redis")] mod redis; @@ -237,7 +237,7 @@ pub(crate) use self::postgresql_metrics::*; feature = "sinks-prometheus" ))] pub(crate) use self::prometheus::*; -#[cfg(feature = "sinks-pulsar")] +#[cfg(any(feature = "sinks-pulsar", feature = "sources-pulsar"))] pub(crate) use self::pulsar::*; #[cfg(feature = "sources-redis")] pub(crate) use self::redis::*; diff --git a/src/internal_events/pulsar.rs b/src/internal_events/pulsar.rs index 46c45356e7f2c..1e7b83bc71080 100644 --- a/src/internal_events/pulsar.rs +++ b/src/internal_events/pulsar.rs @@ -1,6 +1,7 @@ -use metrics::counter; -use vector_lib::internal_event::InternalEvent; -use vector_lib::internal_event::{error_stage, error_type, ComponentEventsDropped, UNINTENTIONAL}; +use metrics::{counter, register_counter, Counter}; +use vector_lib::internal_event::{ + error_stage, error_type, ComponentEventsDropped, InternalEvent, UNINTENTIONAL, +}; #[derive(Debug)] pub struct PulsarSendingError { @@ -52,3 +53,80 @@ impl InternalEvent for PulsarPropertyExtractionError { ); } } + +pub enum PulsarErrorEventType { + Read, + Ack, + NAck, +} + +pub struct PulsarErrorEventData { + pub msg: String, + pub error_type: PulsarErrorEventType, +} + +registered_event!( + PulsarErrorEvent => { + ack_errors: Counter = register_counter!( + "component_errors_total", + "error_code" => "acknowledge_message", + "error_type" => error_type::ACKNOWLEDGMENT_FAILED, + "stage" => error_stage::RECEIVING, + ), + + nack_errors: Counter = register_counter!( + "component_errors_total", + "error_code" => "negative_acknowledge_message", + "error_type" => error_type::ACKNOWLEDGMENT_FAILED, + "stage" => error_stage::RECEIVING, + ), + + read_errors: Counter = register_counter!( + "component_errors_total", + "error_code" => "reading_message", + "error_type" => error_type::READER_FAILED, + "stage" => error_stage::RECEIVING, + ), + } + + fn emit(&self,error:PulsarErrorEventData) { + match error.error_type{ + PulsarErrorEventType::Read => { + error!( + message = "Failed to read message.", + error = error.msg, + error_code = "reading_message", + error_type = error_type::READER_FAILED, + stage = error_stage::RECEIVING, + internal_log_rate_limit = true, + ); + + self.read_errors.increment(1_u64); + } + PulsarErrorEventType::Ack => { + error!( + message = "Failed to acknowledge message.", + error = error.msg, + error_code = "acknowledge_message", + error_type = error_type::ACKNOWLEDGMENT_FAILED, + stage = error_stage::RECEIVING, + internal_log_rate_limit = true, + ); + + self.ack_errors.increment(1_u64); + } + PulsarErrorEventType::NAck => { + error!( + message = "Failed to negatively acknowledge message.", + error = error.msg, + error_code = "negative_acknowledge_message", + error_type = error_type::ACKNOWLEDGMENT_FAILED, + stage = error_stage::RECEIVING, + internal_log_rate_limit = true, + ); + + self.nack_errors.increment(1_u64); + } + } + } +); diff --git a/src/sinks/datadog/metrics/encoder.rs b/src/sinks/datadog/metrics/encoder.rs index edb6d4725efc8..faf3b72ddc5c3 100644 --- a/src/sinks/datadog/metrics/encoder.rs +++ b/src/sinks/datadog/metrics/encoder.rs @@ -735,9 +735,8 @@ fn source_type_to_service(source_type: &str) -> Option { // Generally that means the Origin Metadata will have been set as a pass through. // However, if the upstream Vector instance did not set Origin Metadata (for example if it is an // older version version), we will at least set the OriginProduct and OriginCategory. - "kafka" | "nats" | "redis" | "gcp_pubsub" | "http_client" | "http_server" | "vector" => { - Some(0) - } + "kafka" | "nats" | "redis" | "gcp_pubsub" | "http_client" | "http_server" | "vector" + | "pulsar" => Some(0), // This scenario should not occur- if it does it means we added a source that deals with metrics, // and did not update this function. diff --git a/src/sources/mod.rs b/src/sources/mod.rs index e7f7619ee7e20..6526666d827b8 100644 --- a/src/sources/mod.rs +++ b/src/sources/mod.rs @@ -72,6 +72,8 @@ pub mod postgresql_metrics; feature = "sources-prometheus-pushgateway" ))] pub mod prometheus; +#[cfg(feature = "sources-pulsar")] +pub mod pulsar; #[cfg(feature = "sources-redis")] pub mod redis; #[cfg(feature = "sources-socket")] diff --git a/src/sources/pulsar.rs b/src/sources/pulsar.rs new file mode 100644 index 0000000000000..e52cc447a8d17 --- /dev/null +++ b/src/sources/pulsar.rs @@ -0,0 +1,609 @@ +//! `Pulsar` source. +//! Accepts log events streamed from [`Apache Pulsar`][pulsar]. +//! +//! [pulsar]: https://pulsar.apache.org/ +use chrono::TimeZone; +use futures_util::StreamExt; +use pulsar::{ + authentication::oauth2::{OAuth2Authentication, OAuth2Params}, + consumer::Message, + message::proto::MessageIdData, + Authentication, Consumer, Pulsar, SubType, TokioExecutor, +}; +use tokio_util::codec::FramedRead; + +use vector_lib::{ + codecs::{ + decoding::{DeserializerConfig, FramingConfig}, + StreamDecodingError, + }, + config::{LegacyKey, LogNamespace, SourceAcknowledgementsConfig, SourceOutput}, + configurable::configurable_component, + event::Event, + finalization::BatchStatus, + finalizer::OrderedFinalizer, + internal_event::{ + ByteSize, BytesReceived, CountByteSize, EventsReceived, InternalEventHandle, Protocol, + Registered, + }, + sensitive_string::SensitiveString, + shutdown::ShutdownSignal, + EstimatedJsonEncodedSizeOf, +}; +use vrl::{owned_value_path, path, value::Kind}; + +use crate::{ + codecs::{Decoder, DecodingConfig}, + config::{SourceConfig, SourceContext}, + event::BatchNotifier, + internal_events::{ + PulsarErrorEvent, PulsarErrorEventData, PulsarErrorEventType, StreamClosedError, + }, + serde::{bool_or_struct, default_decoding, default_framing_message_based}, + SourceSender, +}; + +/// Configuration for the `pulsar` source. +#[configurable_component(source("pulsar", "Collect logs from Apache Pulsar."))] +#[derive(Clone, Debug, Derivative)] +#[derivative(Default)] +#[serde(deny_unknown_fields)] +pub struct PulsarSourceConfig { + /// The endpoint to which the Pulsar client should connect to. + #[configurable(metadata(docs::examples = "pulsar://127.0.0.1:6650"))] + #[serde(alias = "address")] + endpoint: String, + + /// The Pulsar topic names to read events from. + #[configurable(metadata(docs::examples = "[persistent://public/default/my-topic]"))] + topics: Vec, + + /// The Pulsar consumer name. + #[configurable(metadata(docs::examples = "consumer-name"))] + consumer_name: Option, + + /// The Pulsar subscription name. + #[configurable(metadata(docs::examples = "subscription_name"))] + subscription_name: Option, + + /// The consumer's priority level. + /// + /// The broker follows descending priorities. For example, 0=max-priority, 1, 2,... + /// + /// In Shared subscription type, the broker first dispatches messages to the max priority level consumers if they have permits. Otherwise, the broker considers next priority level consumers. + priority_level: Option, + + /// Max count of messages in a batch. + batch_size: Option, + + #[configurable(derived)] + auth: Option, + + #[configurable(derived)] + dead_letter_queue_policy: Option, + + #[configurable(derived)] + #[serde(default = "default_framing_message_based")] + #[derivative(Default(value = "default_framing_message_based()"))] + framing: FramingConfig, + + #[configurable(derived)] + #[serde(default = "default_decoding")] + #[derivative(Default(value = "default_decoding()"))] + decoding: DeserializerConfig, + + #[configurable(derived)] + #[serde(default, deserialize_with = "bool_or_struct")] + acknowledgements: SourceAcknowledgementsConfig, + + /// The namespace to use for logs. This overrides the global setting. + #[configurable(metadata(docs::hidden))] + #[serde(default)] + log_namespace: Option, +} + +/// Authentication configuration. +#[configurable_component] +#[derive(Clone, Debug)] +#[serde(deny_unknown_fields, untagged)] +enum AuthConfig { + /// Basic authentication. + Basic { + /// Basic authentication name/username. + /// + /// This can be used either for basic authentication (username/password) or JWT authentication. + /// When used for JWT, the value should be `token`. + #[configurable(metadata(docs::examples = "${PULSAR_NAME}"))] + #[configurable(metadata(docs::examples = "name123"))] + name: String, + + /// Basic authentication password/token. + /// + /// This can be used either for basic authentication (username/password) or JWT authentication. + /// When used for JWT, the value should be the signed JWT, in the compact representation. + #[configurable(metadata(docs::examples = "${PULSAR_TOKEN}"))] + #[configurable(metadata(docs::examples = "123456789"))] + token: SensitiveString, + }, + + /// OAuth authentication. + OAuth { + #[configurable(derived)] + oauth2: OAuth2Config, + }, +} + +/// OAuth2-specific authentication configuration. +#[configurable_component] +#[derive(Clone, Debug)] +pub struct OAuth2Config { + /// The issuer URL. + #[configurable(metadata(docs::examples = "${OAUTH2_ISSUER_URL}"))] + #[configurable(metadata(docs::examples = "https://oauth2.issuer"))] + issuer_url: String, + + /// The credentials URL. + /// + /// A data URL is also supported. + #[configurable(metadata(docs::examples = "${OAUTH2_CREDENTIALS_URL}"))] + #[configurable(metadata(docs::examples = "file:///oauth2_credentials"))] + #[configurable(metadata(docs::examples = "data:application/json;base64,cHVsc2FyCg=="))] + credentials_url: String, + + /// The OAuth2 audience. + #[configurable(metadata(docs::examples = "${OAUTH2_AUDIENCE}"))] + #[configurable(metadata(docs::examples = "pulsar"))] + audience: Option, + + /// The OAuth2 scope. + #[configurable(metadata(docs::examples = "${OAUTH2_SCOPE}"))] + #[configurable(metadata(docs::examples = "admin"))] + scope: Option, +} + +/// Dead Letter Queue policy configuration. +#[configurable_component] +#[derive(Clone, Debug)] +struct DeadLetterQueuePolicy { + /// Maximum number of times that a message will be redelivered before being sent to the dead letter queue. + pub max_redeliver_count: usize, + + /// Name of the dead letter topic where the failing messages will be sent. + pub dead_letter_topic: String, +} + +#[derive(Debug)] +struct FinalizerEntry { + topic: String, + message_id: MessageIdData, +} + +impl_generate_config_from_default!(PulsarSourceConfig); + +#[async_trait::async_trait] +#[typetag::serde(name = "pulsar")] +impl SourceConfig for PulsarSourceConfig { + async fn build(&self, cx: SourceContext) -> crate::Result { + let log_namespace = cx.log_namespace(self.log_namespace); + + let consumer = self.create_consumer().await?; + let decoder = + DecodingConfig::new(self.framing.clone(), self.decoding.clone(), log_namespace) + .build()?; + let acknowledgements = cx.do_acknowledgements(self.acknowledgements); + + Ok(Box::pin(pulsar_source( + consumer, + decoder, + cx.shutdown, + cx.out, + acknowledgements, + log_namespace, + ))) + } + + fn outputs(&self, global_log_namespace: LogNamespace) -> Vec { + let log_namespace = global_log_namespace.merge(self.log_namespace); + + let schema_definition = self + .decoding + .schema_definition(log_namespace) + .with_standard_vector_source_metadata() + .with_source_metadata( + Self::NAME, + Some(LegacyKey::InsertIfEmpty(owned_value_path!("publish_time"))), + &owned_value_path!("publish_time"), + Kind::timestamp(), + Some("publish_time"), + ) + .with_source_metadata( + Self::NAME, + Some(LegacyKey::InsertIfEmpty(owned_value_path!("topic"))), + &owned_value_path!("topic"), + Kind::bytes(), + Some("topic"), + ) + .with_source_metadata( + Self::NAME, + Some(LegacyKey::InsertIfEmpty(owned_value_path!("producer_name"))), + &owned_value_path!("producer_name"), + Kind::bytes(), + Some("producer_name"), + ); + vec![SourceOutput::new_logs( + self.decoding.output_type(), + schema_definition, + )] + } + + fn can_acknowledge(&self) -> bool { + true + } +} + +impl PulsarSourceConfig { + async fn create_consumer( + &self, + ) -> crate::Result> { + let mut builder = Pulsar::builder(&self.endpoint, TokioExecutor); + + if let Some(auth) = &self.auth { + builder = match auth { + AuthConfig::Basic { name, token } => builder.with_auth(Authentication { + name: name.clone(), + data: token.inner().as_bytes().to_vec(), + }), + AuthConfig::OAuth { oauth2 } => builder.with_auth_provider( + OAuth2Authentication::client_credentials(OAuth2Params { + issuer_url: oauth2.issuer_url.clone(), + credentials_url: oauth2.credentials_url.clone(), + audience: oauth2.audience.clone(), + scope: oauth2.scope.clone(), + }), + ), + }; + } + + let pulsar = builder.build().await?; + + let mut consumer_builder = pulsar + .consumer() + .with_topics(&self.topics) + .with_subscription_type(SubType::Shared) + .with_options(pulsar::consumer::ConsumerOptions { + priority_level: self.priority_level, + ..Default::default() + }); + + if let Some(dead_letter_queue_policy) = &self.dead_letter_queue_policy { + consumer_builder = + consumer_builder.with_dead_letter_policy(pulsar::consumer::DeadLetterPolicy { + max_redeliver_count: dead_letter_queue_policy.max_redeliver_count, + dead_letter_topic: dead_letter_queue_policy.dead_letter_topic.clone(), + }); + } + + if let Some(batch_size) = self.batch_size { + consumer_builder = consumer_builder.with_batch_size(batch_size); + } + if let Some(consumer_name) = &self.consumer_name { + consumer_builder = consumer_builder.with_consumer_name(consumer_name); + } + if let Some(subscription_name) = &self.subscription_name { + consumer_builder = consumer_builder.with_subscription(subscription_name); + } + + let consumer = consumer_builder.build::().await?; + + Ok(consumer) + } +} + +async fn pulsar_source( + mut consumer: Consumer, + decoder: Decoder, + mut shutdown: ShutdownSignal, + mut out: SourceSender, + acknowledgements: bool, + log_namespace: LogNamespace, +) -> Result<(), ()> { + let (finalizer, mut ack_stream) = + OrderedFinalizer::::maybe_new(acknowledgements, Some(shutdown.clone())); + + let bytes_received = register!(BytesReceived::from(Protocol::TCP)); + let events_received = register!(EventsReceived); + let pulsar_error_events = register!(PulsarErrorEvent); + + loop { + tokio::select! { + _ = &mut shutdown => break, + entry = ack_stream.next() => { + if let Some((status, entry)) = entry { + handle_ack(&mut consumer, status, entry, &pulsar_error_events).await; + } + }, + Some(maybe_message) = consumer.next() => { + match maybe_message { + Ok(msg) => { + bytes_received.emit(ByteSize(msg.payload.data.len())); + parse_message(msg, &decoder, &finalizer, &mut out, &mut consumer, log_namespace, &events_received, &pulsar_error_events).await; + } + Err(error) => { + pulsar_error_events.emit(PulsarErrorEventData{ + msg: error.to_string(), + error_type:PulsarErrorEventType::Read, + }); + } + } + }, + } + } + + Ok(()) +} + +#[allow(clippy::too_many_arguments)] +async fn parse_message( + msg: Message, + decoder: &Decoder, + finalizer: &Option>, + out: &mut SourceSender, + consumer: &mut Consumer, + log_namespace: LogNamespace, + events_received: &Registered, + pulsar_error_events: &Registered, +) { + let publish_time = i64::try_from(msg.payload.metadata.publish_time) + .ok() + .and_then(|millis| chrono::Utc.timestamp_millis_opt(millis).latest()); + let topic = msg.topic.clone(); + let producer_name = msg.payload.metadata.producer_name.clone(); + + let mut stream = FramedRead::new(msg.payload.data.as_ref(), decoder.clone()); + let stream = async_stream::stream! { + while let Some(next) = stream.next().await { + match next { + Ok((events, _byte_size)) => { + events_received.emit(CountByteSize( + events.len(), + events.estimated_json_encoded_size_of(), + )); + + let now = chrono::Utc::now(); + + let events = events.into_iter().map(|mut event| { + if let Event::Log(ref mut log) = event { + log_namespace.insert_standard_vector_source_metadata( + log, + PulsarSourceConfig::NAME, + now, + ); + + log_namespace.insert_source_metadata( + PulsarSourceConfig::NAME, + log, + Some(LegacyKey::InsertIfEmpty(path!("publish_time"))), + path!("publish_time"), + publish_time, + ); + + log_namespace.insert_source_metadata( + PulsarSourceConfig::NAME, + log, + Some(LegacyKey::InsertIfEmpty(path!("topic"))), + path!("topic"), + topic.clone(), + ); + + log_namespace.insert_source_metadata( + PulsarSourceConfig::NAME, + log, + Some(LegacyKey::InsertIfEmpty(path!("producer_name"))), + path!("producer_name"), + producer_name.clone(), + ); + } + event + }); + + for event in events { + yield event; + } + } + Err(error) => { + // Error is logged by `crate::codecs`, no further + // handling is needed here. + if !error.can_continue() { + break; + } + } + } + } + } + .boxed(); + + finalize_event_stream( + consumer, + finalizer, + out, + stream, + msg.topic.clone(), + msg.message_id().clone(), + pulsar_error_events, + ) + .await; +} + +/// Send the event stream created by the framed read to the `out` stream. +async fn finalize_event_stream( + consumer: &mut Consumer, + finalizer: &Option>, + out: &mut SourceSender, + mut stream: std::pin::Pin + Send + '_>>, + topic: String, + message_id: MessageIdData, + pulsar_error_events: &Registered, +) { + match finalizer { + Some(finalizer) => { + let (batch, receiver) = BatchNotifier::new_with_receiver(); + let mut stream = stream.map(|event| event.with_batch_notifier(&batch)); + + match out.send_event_stream(&mut stream).await { + Err(_error) => { + emit!(StreamClosedError { count: 1 }); + } + Ok(_) => { + finalizer.add(FinalizerEntry { topic, message_id }, receiver); + } + } + } + None => match out.send_event_stream(&mut stream).await { + Err(_error) => { + emit!(StreamClosedError { count: 1 }); + } + Ok(_) => { + if let Err(error) = consumer.ack_with_id(topic.as_str(), message_id).await { + pulsar_error_events.emit(PulsarErrorEventData { + msg: error.to_string(), + error_type: PulsarErrorEventType::Ack, + }); + } + } + }, + } +} + +async fn handle_ack( + consumer: &mut Consumer, + status: BatchStatus, + entry: FinalizerEntry, + pulsar_error_events: &Registered, +) { + match status { + BatchStatus::Delivered => { + if let Err(error) = consumer + .ack_with_id(entry.topic.as_str(), entry.message_id) + .await + { + pulsar_error_events.emit(PulsarErrorEventData { + msg: error.to_string(), + error_type: PulsarErrorEventType::Ack, + }); + } + } + BatchStatus::Errored | BatchStatus::Rejected => { + if let Err(error) = consumer + .nack_with_id(entry.topic.as_str(), entry.message_id) + .await + { + pulsar_error_events.emit(PulsarErrorEventData { + msg: error.to_string(), + error_type: PulsarErrorEventType::NAck, + }); + } + } + } +} + +#[cfg(test)] +mod tests { + use crate::sources::pulsar::PulsarSourceConfig; + + #[test] + fn generate_config() { + crate::test_util::test_generate_config::(); + } +} + +#[cfg(feature = "pulsar-integration-tests")] +#[cfg(test)] +mod integration_tests { + use super::*; + use crate::config::log_schema; + use crate::test_util::components::{assert_source_compliance, SOURCE_TAGS}; + use crate::test_util::{collect_n, random_string, trace_init}; + + fn pulsar_address() -> String { + std::env::var("PULSAR_ADDRESS").unwrap_or_else(|_| "pulsar://127.0.0.1:6650".into()) + } + + #[tokio::test] + async fn consumes_event_with_acknowledgements() { + pulsar_send_receive(true, LogNamespace::Legacy).await; + } + + #[tokio::test] + async fn consumes_event_with_acknowledgements_vector_namespace() { + pulsar_send_receive(true, LogNamespace::Vector).await; + } + + #[tokio::test] + async fn consumes_event_without_acknowledgements() { + pulsar_send_receive(false, LogNamespace::Legacy).await; + } + + #[tokio::test] + async fn consumes_event_without_acknowledgements_vector_namespace() { + pulsar_send_receive(false, LogNamespace::Vector).await; + } + + async fn pulsar_send_receive(acknowledgements: bool, log_namespace: LogNamespace) { + trace_init(); + + let topic = format!("test-{}", random_string(10)); + let cnf = PulsarSourceConfig { + endpoint: pulsar_address(), + topics: vec![topic.clone()], + consumer_name: None, + subscription_name: None, + priority_level: None, + batch_size: None, + auth: None, + dead_letter_queue_policy: None, + framing: FramingConfig::Bytes, + decoding: DeserializerConfig::Bytes, + acknowledgements: acknowledgements.into(), + log_namespace: None, + }; + + let pulsar = Pulsar::::builder(&cnf.endpoint, TokioExecutor) + .build() + .await + .unwrap(); + + let consumer = cnf.create_consumer().await.unwrap(); + let decoder = DecodingConfig::new( + cnf.framing.clone(), + cnf.decoding.clone(), + LogNamespace::Legacy, + ) + .build() + .unwrap(); + + let mut producer = pulsar.producer().with_topic(topic).build().await.unwrap(); + + let msg = "test message"; + + let events = assert_source_compliance(&SOURCE_TAGS, async move { + let (tx, rx) = SourceSender::new_test(); + tokio::spawn(pulsar_source( + consumer, + decoder, + ShutdownSignal::noop(), + tx, + acknowledgements, + log_namespace, + )); + producer.send(msg).await.unwrap(); + + collect_n(rx, 1).await + }) + .await; + + assert_eq!( + events[0].as_log()[log_schema().message_key().unwrap().to_string()], + msg.into() + ); + } +} diff --git a/website/content/en/docs/reference/configuration/sources/pulsar.md b/website/content/en/docs/reference/configuration/sources/pulsar.md new file mode 100644 index 0000000000000..2b0512eb1ada8 --- /dev/null +++ b/website/content/en/docs/reference/configuration/sources/pulsar.md @@ -0,0 +1,14 @@ +--- +title: Pulsar +description: Collect observability events from [Apache Pulsar](https://pulsar.apache.org) topics +kind: source +layout: component +tags: ["pulsar", "apache", "component", "source"] +--- + +{{/* +This doc is generated using: + +1. The template in layouts/docs/component.html +2. The relevant CUE data in cue/reference/components/... +*/}} diff --git a/website/cue/reference/components/sources/base/pulsar.cue b/website/cue/reference/components/sources/base/pulsar.cue new file mode 100644 index 0000000000000..f157854d6f1fa --- /dev/null +++ b/website/cue/reference/components/sources/base/pulsar.cue @@ -0,0 +1,443 @@ +package metadata + +base: components: sources: pulsar: configuration: { + acknowledgements: { + deprecated: true + description: """ + Controls how acknowledgements are handled by this source. + + This setting is **deprecated** in favor of enabling `acknowledgements` at the [global][global_acks] or sink level. + + Enabling or disabling acknowledgements at the source level has **no effect** on acknowledgement behavior. + + See [End-to-end Acknowledgements][e2e_acks] for more information on how event acknowledgement is handled. + + [global_acks]: https://vector.dev/docs/reference/configuration/global-options/#acknowledgements + [e2e_acks]: https://vector.dev/docs/about/under-the-hood/architecture/end-to-end-acknowledgements/ + """ + required: false + type: object: options: enabled: { + description: "Whether or not end-to-end acknowledgements are enabled for this source." + required: false + type: bool: {} + } + } + auth: { + description: "Authentication configuration." + required: false + type: object: options: { + name: { + description: """ + Basic authentication name/username. + + This can be used either for basic authentication (username/password) or JWT authentication. + When used for JWT, the value should be `token`. + """ + required: true + type: string: examples: ["${PULSAR_NAME}", "name123"] + } + oauth2: { + description: "OAuth2-specific authentication configuration." + required: true + type: object: options: { + audience: { + description: "The OAuth2 audience." + required: false + type: string: examples: ["${OAUTH2_AUDIENCE}", "pulsar"] + } + credentials_url: { + description: """ + The credentials URL. + + A data URL is also supported. + """ + required: true + type: string: examples: ["${OAUTH2_CREDENTIALS_URL}", "file:///oauth2_credentials", "data:application/json;base64,cHVsc2FyCg=="] + } + issuer_url: { + description: "The issuer URL." + required: true + type: string: examples: ["${OAUTH2_ISSUER_URL}", "https://oauth2.issuer"] + } + scope: { + description: "The OAuth2 scope." + required: false + type: string: examples: ["${OAUTH2_SCOPE}", "admin"] + } + } + } + token: { + description: """ + Basic authentication password/token. + + This can be used either for basic authentication (username/password) or JWT authentication. + When used for JWT, the value should be the signed JWT, in the compact representation. + """ + required: true + type: string: examples: ["${PULSAR_TOKEN}", "123456789"] + } + } + } + batch_size: { + description: "Max count of messages in a batch." + required: false + type: uint: {} + } + consumer_name: { + description: "The Pulsar consumer name." + required: false + type: string: examples: ["consumer-name"] + } + dead_letter_queue_policy: { + description: "Dead Letter Queue policy configuration." + required: false + type: object: options: { + dead_letter_topic: { + description: "Name of the dead letter topic where the failing messages will be sent." + required: true + type: string: {} + } + max_redeliver_count: { + description: "Maximum number of times that a message will be redelivered before being sent to the dead letter queue." + required: true + type: uint: {} + } + } + } + decoding: { + description: "Configures how events are decoded from raw bytes." + required: false + type: object: options: { + avro: { + description: "Apache Avro-specific encoder options." + relevant_when: "codec = \"avro\"" + required: true + type: object: options: { + schema: { + description: """ + The Avro schema definition. + Please note that the following [`apache_avro::types::Value`] variants are currently *not* supported: + * `Date` + * `Decimal` + * `Duration` + * `Fixed` + * `TimeMillis` + """ + required: true + type: string: examples: ["{ \"type\": \"record\", \"name\": \"log\", \"fields\": [{ \"name\": \"message\", \"type\": \"string\" }] }"] + } + strip_schema_id_prefix: { + description: """ + For Avro datum encoded in Kafka messages, the bytes are prefixed with the schema ID. Set this to true to strip the schema ID prefix. + According to [Confluent Kafka's document](https://docs.confluent.io/platform/current/schema-registry/fundamentals/serdes-develop/index.html#wire-format). + """ + required: true + type: bool: {} + } + } + } + codec: { + description: "The codec to use for decoding events." + required: false + type: string: { + default: "bytes" + enum: { + avro: """ + Decodes the raw bytes as as an [Apache Avro][apache_avro] message. + + [apache_avro]: https://avro.apache.org/ + """ + bytes: "Uses the raw bytes as-is." + gelf: """ + Decodes the raw bytes as a [GELF][gelf] message. + + This codec is experimental for the following reason: + + The GELF specification is more strict than the actual Graylog receiver. + Vector's decoder currently adheres more strictly to the GELF spec, with + the exception that some characters such as `@` are allowed in field names. + + Other GELF codecs such as Loki's, use a [Go SDK][implementation] that is maintained + by Graylog, and is much more relaxed than the GELF spec. + + Going forward, Vector will use that [Go SDK][implementation] as the reference implementation, which means + the codec may continue to relax the enforcement of specification. + + [gelf]: https://docs.graylog.org/docs/gelf + [implementation]: https://github.com/Graylog2/go-gelf/blob/v2/gelf/reader.go + """ + json: """ + Decodes the raw bytes as [JSON][json]. + + [json]: https://www.json.org/ + """ + native: """ + Decodes the raw bytes as [native Protocol Buffers format][vector_native_protobuf]. + + This codec is **[experimental][experimental]**. + + [vector_native_protobuf]: https://github.com/vectordotdev/vector/blob/master/lib/vector-core/proto/event.proto + [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs + """ + native_json: """ + Decodes the raw bytes as [native JSON format][vector_native_json]. + + This codec is **[experimental][experimental]**. + + [vector_native_json]: https://github.com/vectordotdev/vector/blob/master/lib/codecs/tests/data/native_encoding/schema.cue + [experimental]: https://vector.dev/highlights/2022-03-31-native-event-codecs + """ + protobuf: """ + Decodes the raw bytes as [protobuf][protobuf]. + + [protobuf]: https://protobuf.dev/ + """ + syslog: """ + Decodes the raw bytes as a Syslog message. + + Decodes either as the [RFC 3164][rfc3164]-style format ("old" style) or the + [RFC 5424][rfc5424]-style format ("new" style, includes structured data). + + [rfc3164]: https://www.ietf.org/rfc/rfc3164.txt + [rfc5424]: https://www.ietf.org/rfc/rfc5424.txt + """ + vrl: """ + Decodes the raw bytes as a string and passes them as input to a [VRL][vrl] program. + + [vrl]: https://vector.dev/docs/reference/vrl + """ + } + } + } + gelf: { + description: "GELF-specific decoding options." + relevant_when: "codec = \"gelf\"" + required: false + type: object: options: lossy: { + description: """ + Determines whether or not to replace invalid UTF-8 sequences instead of failing. + + When true, invalid UTF-8 sequences are replaced with the [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD]. + + [U+FFFD]: https://en.wikipedia.org/wiki/Specials_(Unicode_block)#Replacement_character + """ + required: false + type: bool: default: true + } + } + json: { + description: "JSON-specific decoding options." + relevant_when: "codec = \"json\"" + required: false + type: object: options: lossy: { + description: """ + Determines whether or not to replace invalid UTF-8 sequences instead of failing. + + When true, invalid UTF-8 sequences are replaced with the [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD]. + + [U+FFFD]: https://en.wikipedia.org/wiki/Specials_(Unicode_block)#Replacement_character + """ + required: false + type: bool: default: true + } + } + native_json: { + description: "Vector's native JSON-specific decoding options." + relevant_when: "codec = \"native_json\"" + required: false + type: object: options: lossy: { + description: """ + Determines whether or not to replace invalid UTF-8 sequences instead of failing. + + When true, invalid UTF-8 sequences are replaced with the [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD]. + + [U+FFFD]: https://en.wikipedia.org/wiki/Specials_(Unicode_block)#Replacement_character + """ + required: false + type: bool: default: true + } + } + protobuf: { + description: "Protobuf-specific decoding options." + relevant_when: "codec = \"protobuf\"" + required: false + type: object: options: { + desc_file: { + description: "Path to desc file" + required: false + type: string: default: "" + } + message_type: { + description: "message type. e.g package.message" + required: false + type: string: default: "" + } + } + } + syslog: { + description: "Syslog-specific decoding options." + relevant_when: "codec = \"syslog\"" + required: false + type: object: options: lossy: { + description: """ + Determines whether or not to replace invalid UTF-8 sequences instead of failing. + + When true, invalid UTF-8 sequences are replaced with the [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD]. + + [U+FFFD]: https://en.wikipedia.org/wiki/Specials_(Unicode_block)#Replacement_character + """ + required: false + type: bool: default: true + } + } + vrl: { + description: "VRL-specific decoding options." + relevant_when: "codec = \"vrl\"" + required: true + type: object: options: { + source: { + description: """ + The [Vector Remap Language][vrl] (VRL) program to execute for each event. + Note that the final contents of the `.` target will be used as the decoding result. + Compilation error or use of 'abort' in a program will result in a decoding error. + + [vrl]: https://vector.dev/docs/reference/vrl + """ + required: true + type: string: {} + } + timezone: { + description: """ + The name of the timezone to apply to timestamp conversions that do not contain an explicit + time zone. The time zone name may be any name in the [TZ database][tz_database], or `local` + to indicate system local time. + + If not set, `local` will be used. + + [tz_database]: https://en.wikipedia.org/wiki/List_of_tz_database_time_zones + """ + required: false + type: string: examples: ["local", "America/New_York", "EST5EDT"] + } + } + } + } + } + endpoint: { + description: "The endpoint to which the Pulsar client should connect to." + required: true + type: string: examples: ["pulsar://127.0.0.1:6650"] + } + framing: { + description: """ + Framing configuration. + + Framing handles how events are separated when encoded in a raw byte form, where each event is + a frame that must be prefixed, or delimited, in a way that marks where an event begins and + ends within the byte stream. + """ + required: false + type: object: options: { + character_delimited: { + description: "Options for the character delimited decoder." + relevant_when: "method = \"character_delimited\"" + required: true + type: object: options: { + delimiter: { + description: "The character that delimits byte sequences." + required: true + type: uint: {} + } + max_length: { + description: """ + The maximum length of the byte buffer. + + This length does *not* include the trailing delimiter. + + By default, there is no maximum length enforced. If events are malformed, this can lead to + additional resource usage as events continue to be buffered in memory, and can potentially + lead to memory exhaustion in extreme cases. + + If there is a risk of processing malformed data, such as logs with user-controlled input, + consider setting the maximum length to a reasonably large value as a safety net. This + ensures that processing is not actually unbounded. + """ + required: false + type: uint: {} + } + } + } + method: { + description: "The framing method." + required: false + type: string: { + default: "bytes" + enum: { + bytes: "Byte frames are passed through as-is according to the underlying I/O boundaries (for example, split between messages or stream segments)." + character_delimited: "Byte frames which are delimited by a chosen character." + length_delimited: "Byte frames which are prefixed by an unsigned big-endian 32-bit integer indicating the length." + newline_delimited: "Byte frames which are delimited by a newline character." + octet_counting: """ + Byte frames according to the [octet counting][octet_counting] format. + + [octet_counting]: https://tools.ietf.org/html/rfc6587#section-3.4.1 + """ + } + } + } + newline_delimited: { + description: "Options for the newline delimited decoder." + relevant_when: "method = \"newline_delimited\"" + required: false + type: object: options: max_length: { + description: """ + The maximum length of the byte buffer. + + This length does *not* include the trailing delimiter. + + By default, there is no maximum length enforced. If events are malformed, this can lead to + additional resource usage as events continue to be buffered in memory, and can potentially + lead to memory exhaustion in extreme cases. + + If there is a risk of processing malformed data, such as logs with user-controlled input, + consider setting the maximum length to a reasonably large value as a safety net. This + ensures that processing is not actually unbounded. + """ + required: false + type: uint: {} + } + } + octet_counting: { + description: "Options for the octet counting decoder." + relevant_when: "method = \"octet_counting\"" + required: false + type: object: options: max_length: { + description: "The maximum length of the byte buffer." + required: false + type: uint: {} + } + } + } + } + priority_level: { + description: """ + The consumer's priority level. + + The broker follows descending priorities. For example, 0=max-priority, 1, 2,... + + In Shared subscription type, the broker first dispatches messages to the max priority level consumers if they have permits. Otherwise, the broker considers next priority level consumers. + """ + required: false + type: int: {} + } + subscription_name: { + description: "The Pulsar subscription name." + required: false + type: string: examples: ["subscription_name"] + } + topics: { + description: "The Pulsar topic names to read events from." + required: true + type: array: items: type: string: examples: ["[persistent://public/default/my-topic]"] + } +} diff --git a/website/cue/reference/components/sources/pulsar.cue b/website/cue/reference/components/sources/pulsar.cue new file mode 100644 index 0000000000000..c3ff06743eec2 --- /dev/null +++ b/website/cue/reference/components/sources/pulsar.cue @@ -0,0 +1,167 @@ +package metadata + +components: sources: pulsar: { + title: "Apache Pulsar" + + classes: { + commonly_used: false + delivery: "at_least_once" + deployment_roles: ["aggregator"] + development: "beta" + egress_method: "stream" + stateful: false + } + + features: { + auto_generated: true + acknowledgements: true + multiline: enabled: false + codecs: { + enabled: true + default_framing: "bytes" + } + generate: {} + } + + support: { + requirements: [] + warnings: [] + notices: [] + } + + installation: { + platform_name: null + } + + configuration: { + auth: { + common: false + description: "Options for the authentication strategy." + required: false + type: object: { + examples: [] + options: { + name: { + common: false + description: "The basic authentication name." + required: false + type: string: { + default: null + examples: ["${PULSAR_NAME}", "name123"] + } + } + token: { + common: false + description: "The basic authentication password." + required: false + type: string: { + default: null + examples: ["${PULSAR_TOKEN}", "123456789"] + } + } + oauth2: { + common: false + description: "Options for OAuth2 authentication." + required: false + type: object: { + examples: [] + options: { + issuer_url: { + description: "The issuer url." + required: true + type: string: { + examples: ["${OAUTH2_ISSUER_URL}", "https://oauth2.issuer"] + } + } + credentials_url: { + description: "The url for credentials. The data url is also supported." + required: true + type: string: { + examples: ["{OAUTH2_CREDENTIALS_URL}", "file:///oauth2_credentials", "data:application/json;base64,cHVsc2FyCg=="] + } + } + audience: { + common: false + description: "OAuth2 audience." + required: false + type: string: { + default: null + examples: ["${OAUTH2_AUDIENCE}", "pulsar"] + } + } + scope: { + common: false + description: "OAuth2 scope." + required: false + type: string: { + default: null + examples: ["${OAUTH2_SCOPE}", "admin"] + } + } + } + } + } + } + } + } + endpoint: { + description: "Endpoint to which the pulsar client should connect to." + required: true + type: string: { + examples: ["pulsar://127.0.0.1:6650"] + } + } + topics: { + description: "The Pulsar topic names to read events from." + required: true + type: string: { + examples: ["topic-1234"] + } + } + } + + output: logs: record: { + description: "An individual Pulsar record" + fields: { + message: { + description: "The raw line from the Kafka record." + required: true + type: string: { + examples: ["53.126.150.246 - - [01/Oct/2020:11:25:58 -0400] \"GET /disintermediate HTTP/2.0\" 401 20308"] + } + } + source_type: { + description: "The name of the source type." + required: true + type: string: { + examples: ["pulsar"] + } + } + timestamp: fields._current_timestamp & { + description: "The current time if it cannot be fetched." + } + publish_time: fields._current_timestamp & { + description: "The timestamp encoded in the Pulsar message." + } + topic: { + description: "The Pulsar topic that the record came from." + required: true + type: string: { + examples: ["topic"] + } + } + producer_name: { + description: "The Pulsar producer's name which the record came from." + required: true + type: string: { + examples: ["pulsar-client"] + } + } + } + } + + telemetry: metrics: { + component_discarded_events_total: components.sources.internal_metrics.output.metrics.component_discarded_events_total + component_errors_total: components.sources.internal_metrics.output.metrics.component_errors_total + } +} diff --git a/website/data/redirects.yaml b/website/data/redirects.yaml index e65a7293bd2f0..c72f6d5b339ce 100644 --- a/website/data/redirects.yaml +++ b/website/data/redirects.yaml @@ -61,6 +61,7 @@ sources: - postgresql_metrics - prometheus_remote_write - prometheus_scrape +- pulsar - socket - splunk_hec - statsd From 5d03bf0e00b3f235cd2dfa9c88e77d7a162c0180 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 08:03:46 -0500 Subject: [PATCH 0054/1491] chore(deps): Bump serde-wasm-bindgen from 0.6.3 to 0.6.4 (#19934) Bumps [serde-wasm-bindgen](https://github.com/RReverser/serde-wasm-bindgen) from 0.6.3 to 0.6.4. - [Commits](https://github.com/RReverser/serde-wasm-bindgen/compare/v0.6.3...v0.6.4) --- updated-dependencies: - dependency-name: serde-wasm-bindgen dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3e5b1e60997b8..d094bb87b02c6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7972,9 +7972,9 @@ dependencies = [ [[package]] name = "serde-wasm-bindgen" -version = "0.6.3" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b9b713f70513ae1f8d92665bbbbda5c295c2cf1da5542881ae5eefe20c9af132" +checksum = "4c1432112bce8b966497ac46519535189a3250a3812cd27a999678a69756f79f" dependencies = [ "js-sys", "serde", From e2e5253ff42339f8c66226580a8aadf9b729e10d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 08:04:11 -0500 Subject: [PATCH 0055/1491] chore(deps): Bump the aws group with 6 updates (#19936) Bumps the aws group with 6 updates: | Package | From | To | | --- | --- | --- | | [aws-types](https://github.com/smithy-lang/smithy-rs) | `1.1.5` | `1.1.6` | | [aws-sigv4](https://github.com/smithy-lang/smithy-rs) | `1.1.5` | `1.1.6` | | [aws-smithy-types](https://github.com/smithy-lang/smithy-rs) | `1.1.6` | `1.1.7` | | [aws-smithy-runtime-api](https://github.com/smithy-lang/smithy-rs) | `1.1.6` | `1.1.7` | | [aws-smithy-runtime](https://github.com/smithy-lang/smithy-rs) | `1.1.6` | `1.1.7` | | [aws-smithy-async](https://github.com/smithy-lang/smithy-rs) | `1.1.6` | `1.1.7` | Updates `aws-types` from 1.1.5 to 1.1.6 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-sigv4` from 1.1.5 to 1.1.6 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-smithy-types` from 1.1.6 to 1.1.7 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-smithy-runtime-api` from 1.1.6 to 1.1.7 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-smithy-runtime` from 1.1.6 to 1.1.7 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-smithy-async` from 1.1.6 to 1.1.7 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) --- updated-dependencies: - dependency-name: aws-types dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-sigv4 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-smithy-types dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-smithy-runtime-api dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-smithy-runtime dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-smithy-async dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 28 ++++++++++++++-------------- Cargo.toml | 12 ++++++------ 2 files changed, 20 insertions(+), 20 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d094bb87b02c6..0caed2ac8f876 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1022,9 +1022,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.1.5" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54b1cbe0eee57a213039088dbdeca7be9352f24e0d72332d961e8a1cb388f82d" +checksum = "404c64a104188ac70dd1684718765cb5559795458e446480e41984e68e57d888" dependencies = [ "aws-credential-types", "aws-smithy-eventstream", @@ -1046,9 +1046,9 @@ dependencies = [ [[package]] name = "aws-smithy-async" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ec441341e019c441aa78472ed6d206cfe198026c495277a95ac5bebda520742" +checksum = "fcf7f09a27286d84315dfb9346208abb3b0973a692454ae6d0bc8d803fcce3b4" dependencies = [ "futures-util", "pin-project-lite", @@ -1089,9 +1089,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.60.5" +version = "0.60.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85d6a0619f7b67183067fa3b558f94f90753da2df8c04aeb7336d673f804b0b8" +checksum = "b6ca214a6a26f1b7ebd63aa8d4f5e2194095643023f9608edf99a58247b9d80d" dependencies = [ "aws-smithy-eventstream", "aws-smithy-runtime-api", @@ -1129,9 +1129,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3b36f1f98c8d7b6256b86d4a3c8c4abb120670267baa9712a485ba477eaac9e9" +checksum = "fbb5fca54a532a36ff927fbd7407a7c8eb9c3b4faf72792ba2965ea2cad8ed55" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -1154,9 +1154,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime-api" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "180898ed701a773fb3fadbd94b9e9559125cf88eeb1815ab99e35d4f5f34f7fb" +checksum = "22389cb6f7cac64f266fb9f137745a9349ced7b47e0d2ba503e9e40ede4f7060" dependencies = [ "aws-smithy-async", "aws-smithy-types", @@ -1170,9 +1170,9 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "897f1db4020ad91f2c2421945ec49b7e3eb81cc3fea99e8b5dd5be721e697fed" +checksum = "f081da5481210523d44ffd83d9f0740320050054006c719eae0232d411f024d3" dependencies = [ "base64-simd", "bytes 1.5.0", @@ -1200,9 +1200,9 @@ dependencies = [ [[package]] name = "aws-types" -version = "1.1.5" +version = "1.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0ff7e122ee50ca962e9de91f5850cc37e2184b1219611eef6d44aa85929b54f6" +checksum = "8fbb5d48aae496f628e7aa2e41991dd4074f606d9e3ade1ce1059f293d40f9a2" dependencies = [ "aws-credential-types", "aws-smithy-async", diff --git a/Cargo.toml b/Cargo.toml index 3281145fb04b8..995b8ab3f37bf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -181,14 +181,14 @@ aws-sdk-cloudwatchlogs = { version = "1.3.0", default-features = false, features aws-sdk-elasticsearch = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } aws-sdk-firehose = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } aws-sdk-kinesis = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } -aws-types = { version = "1.1.5", default-features = false, optional = true } -aws-sigv4 = { version = "1.1.5", default-features = false, features = ["sign-http"], optional = true } +aws-types = { version = "1.1.6", default-features = false, optional = true } +aws-sigv4 = { version = "1.1.6", default-features = false, features = ["sign-http"], optional = true } aws-config = { version = "1.0.1", default-features = false, features = ["behavior-version-latest"], optional = true } aws-credential-types = { version = "1.1.6", default-features = false, features = ["hardcoded-credentials"], optional = true } aws-smithy-http = { version = "0.60", default-features = false, features = ["event-stream"], optional = true } -aws-smithy-types = { version = "1.1.6", default-features = false, optional = true } -aws-smithy-runtime-api = { version = "1.1.6", default-features = false, optional = true } -aws-smithy-runtime = { version = "1.1.6", default-features = false, features = ["client", "connector-hyper-0-14-x", "rt-tokio"], optional = true } +aws-smithy-types = { version = "1.1.7", default-features = false, optional = true } +aws-smithy-runtime-api = { version = "1.1.7", default-features = false, optional = true } +aws-smithy-runtime = { version = "1.1.7", default-features = false, features = ["client", "connector-hyper-0-14-x", "rt-tokio"], optional = true } aws-smithy-async = { version = "1.0.2", default-features = false, features = ["rt-tokio"], optional = true } # Azure @@ -358,7 +358,7 @@ openssl-src = { version = "300", default-features = false, features = ["force-en [dev-dependencies] approx = "0.5.1" assert_cmd = { version = "2.0.14", default-features = false } -aws-smithy-runtime = { version = "1.1.6", default-features = false, features = ["tls-rustls"] } +aws-smithy-runtime = { version = "1.1.7", default-features = false, features = ["tls-rustls"] } azure_core = { version = "0.17", default-features = false, features = ["enable_reqwest", "azurite_workaround"] } azure_identity = { version = "0.17", default-features = false, features = ["enable_reqwest"] } azure_storage_blobs = { version = "0.17", default-features = false, features = ["azurite_workaround"] } From ae5b06bff08d062216a1beab2f764b6b39b04b71 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 13:51:37 +0000 Subject: [PATCH 0056/1491] chore(deps): Bump lru from 0.12.2 to 0.12.3 (#19945) Bumps [lru](https://github.com/jeromefroe/lru-rs) from 0.12.2 to 0.12.3. - [Changelog](https://github.com/jeromefroe/lru-rs/blob/master/CHANGELOG.md) - [Commits](https://github.com/jeromefroe/lru-rs/compare/0.12.2...0.12.3) --- updated-dependencies: - dependency-name: lru dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0caed2ac8f876..bbb6924dbf89b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4961,9 +4961,9 @@ dependencies = [ [[package]] name = "lru" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db2c024b41519440580066ba82aab04092b333e09066a5eb86c7c4890df31f22" +checksum = "d3262e75e648fce39813cb56ac41f3c3e3f65217ebf3844d818d1f9398cfb0dc" dependencies = [ "hashbrown 0.14.3", ] diff --git a/Cargo.toml b/Cargo.toml index 995b8ab3f37bf..e3762006d235e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -291,7 +291,7 @@ k8s-openapi = { version = "0.18.0", default-features = false, features = ["api", kube = { version = "0.82.0", default-features = false, features = ["client", "openssl-tls", "runtime"], optional = true } listenfd = { version = "1.0.1", default-features = false, optional = true } logfmt = { version = "0.0.2", default-features = false, optional = true } -lru = { version = "0.12.2", default-features = false, optional = true } +lru = { version = "0.12.3", default-features = false, optional = true } maxminddb = { version = "0.24.0", default-features = false, optional = true } md-5 = { version = "0.10", default-features = false, optional = true } mongodb = { version = "2.8.1", default-features = false, features = ["tokio-runtime"], optional = true } From 7bb9716ebc46bb2842e8df4b2c20775c1897d631 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 13:51:47 +0000 Subject: [PATCH 0057/1491] chore(deps): Bump socket2 from 0.5.5 to 0.5.6 (#19947) Bumps [socket2](https://github.com/rust-lang/socket2) from 0.5.5 to 0.5.6. - [Release notes](https://github.com/rust-lang/socket2/releases) - [Changelog](https://github.com/rust-lang/socket2/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/socket2/compare/v0.5.5...v0.5.6) --- updated-dependencies: - dependency-name: socket2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 20 ++++++++++---------- Cargo.toml | 2 +- lib/vector-core/Cargo.toml | 2 +- 3 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bbb6924dbf89b..92bf9634de88f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2854,7 +2854,7 @@ checksum = "e5766087c2235fec47fafa4cfecc81e494ee679d0fd4a59887ea0919bfb0e4fc" dependencies = [ "cfg-if", "libc", - "socket2 0.5.5", + "socket2 0.5.6", "windows-sys 0.48.0", ] @@ -4173,7 +4173,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.5.5", + "socket2 0.4.10", "tokio", "tower-service", "tracing 0.1.40", @@ -4454,7 +4454,7 @@ version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2 0.5.5", + "socket2 0.5.6", "widestring 1.0.2", "windows-sys 0.48.0", "winreg", @@ -8444,12 +8444,12 @@ dependencies = [ [[package]] name = "socket2" -version = "0.5.5" +version = "0.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" +checksum = "05ffd9c0a93b7543e062e759284fcf5f5e3b098501104bfbdde4d404db792871" dependencies = [ "libc", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -8968,7 +8968,7 @@ dependencies = [ "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.5", + "socket2 0.5.6", "tokio-macros", "tracing 0.1.40", "windows-sys 0.48.0", @@ -9048,7 +9048,7 @@ dependencies = [ "postgres-protocol", "postgres-types", "rand 0.8.5", - "socket2 0.5.5", + "socket2 0.5.6", "tokio", "tokio-util", "whoami", @@ -9989,7 +9989,7 @@ dependencies = [ "smpl_jwt", "snafu 0.7.5", "snap", - "socket2 0.5.5", + "socket2 0.5.6", "stream-cancel", "strip-ansi-escapes", "syslog", @@ -10240,7 +10240,7 @@ dependencies = [ "similar-asserts", "smallvec", "snafu 0.7.5", - "socket2 0.5.5", + "socket2 0.5.6", "tokio", "tokio-openssl", "tokio-stream", diff --git a/Cargo.toml b/Cargo.toml index e3762006d235e..0857e8ead47bd 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -319,7 +319,7 @@ semver = { version = "1.0.22", default-features = false, features = ["serde", "s smallvec = { version = "1", default-features = false, features = ["union", "serde"] } snafu = { version = "0.7.5", default-features = false, features = ["futures"] } snap = { version = "1.1.1", default-features = false } -socket2 = { version = "0.5.5", default-features = false } +socket2 = { version = "0.5.6", default-features = false } stream-cancel = { version = "0.8.2", default-features = false } strip-ansi-escapes = { version = "0.2.0", default-features = false } syslog = { version = "6.1.0", default-features = false, optional = true } diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index 7540c44a8efd7..38c48cf71f63b 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -45,7 +45,7 @@ serde_json.workspace = true serde_with = { version = "3.6.1", default-features = false, features = ["std", "macros"] } smallvec = { version = "1", default-features = false, features = ["serde", "const_generics"] } snafu = { version = "0.7.5", default-features = false } -socket2 = { version = "0.5.5", default-features = false } +socket2 = { version = "0.5.6", default-features = false } tokio = { version = "1.36.0", default-features = false, features = ["net"] } tokio-openssl = { version = "0.6.4", default-features = false } tokio-stream = { version = "0.1", default-features = false, features = ["time"], optional = true } From fb11980b98b5ad3358124b5ecfb24d136c6f8903 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 13:52:03 +0000 Subject: [PATCH 0058/1491] chore(deps): Bump cached from 0.48.1 to 0.49.2 (#19948) Bumps [cached](https://github.com/jaemk/cached) from 0.48.1 to 0.49.2. - [Changelog](https://github.com/jaemk/cached/blob/master/CHANGELOG.md) - [Commits](https://github.com/jaemk/cached/commits) --- updated-dependencies: - dependency-name: cached dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- vdev/Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 92bf9634de88f..dde663bc70f9b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1712,9 +1712,9 @@ checksum = "a3e368af43e418a04d52505cf3dbc23dda4e3407ae2fa99fd0e4f308ce546acc" [[package]] name = "cached" -version = "0.48.1" +version = "0.49.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "355face540df58778b96814c48abb3c2ed67c4878a8087ab1819c1fedeec505f" +checksum = "f251fd1e72720ca07bf5d8e310f54a193fd053479a1f6342c6663ee4fa01cf96" dependencies = [ "ahash 0.8.6", "cached_proc_macro", @@ -1727,9 +1727,9 @@ dependencies = [ [[package]] name = "cached_proc_macro" -version = "0.19.1" +version = "0.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d52f526f7cbc875b296856ca8c964a9f6290556922c303a8a3883e3c676e6a1" +checksum = "ad9f16c0d84de31a2ab7fdf5f7783c14631f7075cf464eb3bb43119f61c9cb2a" dependencies = [ "darling 0.14.4", "proc-macro2 1.0.78", diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index d1717412016e8..9b0e4f8e9c6d9 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -9,7 +9,7 @@ publish = false [dependencies] anyhow = "1.0.80" -cached = "0.48.0" +cached = "0.49.2" chrono.workspace = true clap.workspace = true clap-verbosity-flag = "2.2.0" From 4634e2f167f47c6f9cfe0221cb7238b976f76091 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 26 Feb 2024 16:06:08 +0000 Subject: [PATCH 0059/1491] chore(deps): Bump openssl from 0.10.63 to 0.10.64 (#19906) Bumps [openssl](https://github.com/sfackler/rust-openssl) from 0.10.63 to 0.10.64. - [Release notes](https://github.com/sfackler/rust-openssl/releases) - [Commits](https://github.com/sfackler/rust-openssl/compare/openssl-v0.10.63...openssl-v0.10.64) --- updated-dependencies: - dependency-name: openssl dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 10 +++++----- Cargo.toml | 2 +- lib/vector-core/Cargo.toml | 2 +- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index dde663bc70f9b..14755841380ea 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4173,7 +4173,7 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", - "socket2 0.4.10", + "socket2 0.5.6", "tokio", "tower-service", "tracing 0.1.40", @@ -5917,9 +5917,9 @@ dependencies = [ [[package]] name = "openssl" -version = "0.10.63" +version = "0.10.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15c9d69dd87a29568d4d017cfe8ec518706046a05184e5aea92d0af890b803c8" +checksum = "95a0481286a310808298130d22dd1fef0fa571e05a8f44ec801801e84b216b1f" dependencies = [ "bitflags 2.4.1", "cfg-if", @@ -5958,9 +5958,9 @@ dependencies = [ [[package]] name = "openssl-sys" -version = "0.9.99" +version = "0.9.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22e1bf214306098e4832460f797824c05d25aacdf896f64a985fb0fd992454ae" +checksum = "dda2b0f344e78efc2facf7d195d098df0dd72151b26ab98da807afc26c198dff" dependencies = [ "cc", "libc", diff --git a/Cargo.toml b/Cargo.toml index 0857e8ead47bd..8d4a9c4e9ce93 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -300,7 +300,7 @@ nkeys = { version = "0.4.0", default-features = false, optional = true } nom = { version = "7.1.3", default-features = false, optional = true } notify = { version = "6.1.1", default-features = false, features = ["macos_fsevent"] } once_cell = { version = "1.19", default-features = false } -openssl = { version = "0.10.63", default-features = false, features = ["vendored"] } +openssl = { version = "0.10.64", default-features = false, features = ["vendored"] } openssl-probe = { version = "0.1.5", default-features = false } ordered-float = { version = "4.2.0", default-features = false } paste = "1.0.14" diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index 38c48cf71f63b..fb4fc8e5dfa95 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -31,7 +31,7 @@ mlua = { version = "0.9.5", default-features = false, features = ["lua54", "send no-proxy = { version = "0.3.4", default-features = false, features = ["serialize"] } once_cell = { version = "1.19", default-features = false } ordered-float = { version = "4.2.0", default-features = false } -openssl = { version = "0.10.63", default-features = false, features = ["vendored"] } +openssl = { version = "0.10.64", default-features = false, features = ["vendored"] } parking_lot = { version = "0.12.1", default-features = false } pin-project.workspace = true proptest = { version = "1.4", optional = true } From 070e38c555d7a7aaf9dda67e7dd468cfbfb949b9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ensar=20Saraj=C4=8Di=C4=87?= Date: Mon, 26 Feb 2024 17:09:23 +0100 Subject: [PATCH 0060/1491] feat(dnsmsg_parser): add support for EDNS EDE fields (#19937) * feat(dnsmsg_parser): add support for EDNS EDE fields This adds support for EDNS extended DNS errors. This implementation is slightly different compared to original proposal in #19871, in that it does not return `ede` as a direct child of `responseData`, but rather in `opt` field of `responseData`, since that is where other EDNS options are located. Fixes: #19871 * Add changelog entry * Fix spelling error in `ede.rs` * Clean up `BinDecodable` impl for `EDE` * Fix clippy warnings * Remove unused `len` and `is_empty` from EDE --- .github/actions/spelling/allow.txt | 2 + changelog.d/19937_edns_ede_support.feature.md | 3 + lib/dnsmsg-parser/src/dns_message.rs | 3 + lib/dnsmsg-parser/src/dns_message_parser.rs | 70 +++++++++++++- lib/dnsmsg-parser/src/ede.rs | 93 +++++++++++++++++++ lib/dnsmsg-parser/src/lib.rs | 1 + src/sources/dnstap/parser.rs | 60 ++++++++++++ src/sources/dnstap/schema.rs | 24 +++++ .../reference/components/sources/dnstap.cue | 41 +++++++- website/cue/reference/urls.cue | 1 + 10 files changed, 292 insertions(+), 6 deletions(-) create mode 100644 changelog.d/19937_edns_ede_support.feature.md create mode 100644 lib/dnsmsg-parser/src/ede.rs diff --git a/.github/actions/spelling/allow.txt b/.github/actions/spelling/allow.txt index a44b6ce736215..1943e682bcc80 100644 --- a/.github/actions/spelling/allow.txt +++ b/.github/actions/spelling/allow.txt @@ -164,6 +164,7 @@ pront Proscan Qmobilevn RPZ +RRSIGs Rackspace Rathi Regza @@ -282,6 +283,7 @@ dnsutils dockercmd downsides downwardapi +ede emoji esbuild etld diff --git a/changelog.d/19937_edns_ede_support.feature.md b/changelog.d/19937_edns_ede_support.feature.md new file mode 100644 index 0000000000000..5366ea3a0e7dd --- /dev/null +++ b/changelog.d/19937_edns_ede_support.feature.md @@ -0,0 +1,3 @@ +Added support for parsing EDNS EDE (Extended DNS errors) options + +authors: esensar diff --git a/lib/dnsmsg-parser/src/dns_message.rs b/lib/dnsmsg-parser/src/dns_message.rs index 58f949cb3ed8b..0d40e713b517b 100644 --- a/lib/dnsmsg-parser/src/dns_message.rs +++ b/lib/dnsmsg-parser/src/dns_message.rs @@ -1,5 +1,7 @@ use hickory_proto::op::ResponseCode; +use crate::ede::EDE; + pub(super) const RTYPE_MB: u16 = 7; pub(super) const RTYPE_MG: u16 = 8; pub(super) const RTYPE_MR: u16 = 9; @@ -81,6 +83,7 @@ pub struct OptPseudoSection { pub version: u8, pub dnssec_ok: bool, pub udp_max_payload_size: u16, + pub ede: Vec, pub options: Vec, } diff --git a/lib/dnsmsg-parser/src/dns_message_parser.rs b/lib/dnsmsg-parser/src/dns_message_parser.rs index cf2e7176229c1..0f83a4b473c58 100644 --- a/lib/dnsmsg-parser/src/dns_message_parser.rs +++ b/lib/dnsmsg-parser/src/dns_message_parser.rs @@ -20,6 +20,8 @@ use hickory_proto::{ }; use thiserror::Error; +use crate::ede::{EDE, EDE_OPTION_CODE}; + use super::dns_message::{ self, DnsQueryMessage, DnsRecord, DnsUpdateMessage, EdnsOptionEntry, OptPseudoSection, QueryHeader, QueryQuestion, UpdateHeader, ZoneInfo, @@ -866,20 +868,39 @@ fn parse_dns_update_message_header(dns_message: &TrustDnsMessage) -> UpdateHeade fn parse_edns(dns_message: &TrustDnsMessage) -> Option> { dns_message.extensions().as_ref().map(|edns| { - parse_edns_options(edns).map(|options| OptPseudoSection { + parse_edns_options(edns).map(|(ede, rest)| OptPseudoSection { extended_rcode: edns.rcode_high(), version: edns.version(), dnssec_ok: edns.dnssec_ok(), udp_max_payload_size: edns.max_payload(), - options, + ede, + options: rest, }) }) } -fn parse_edns_options(edns: &Edns) -> DnsParserResult> { - edns.options() +fn parse_edns_options(edns: &Edns) -> DnsParserResult<(Vec, Vec)> { + let ede_opts: Vec = edns + .options() + .as_ref() + .iter() + .filter_map(|(_, option)| { + if let EdnsOption::Unknown(EDE_OPTION_CODE, option) = option { + Some( + EDE::from_bytes(option) + .map_err(|source| DnsMessageParserError::TrustDnsError { source }), + ) + } else { + None + } + }) + .collect::, DnsMessageParserError>>()?; + + let rest: Vec = edns + .options() .as_ref() .iter() + .filter(|(&code, _)| u16::from(code) != EDE_OPTION_CODE) .map(|(code, option)| match option { EdnsOption::DAU(algorithms) | EdnsOption::DHU(algorithms) @@ -891,7 +912,9 @@ fn parse_edns_options(edns: &Edns) -> DnsParserResult> { .map(|bytes| parse_edns_opt(*code, &bytes)) .map_err(|source| DnsMessageParserError::TrustDnsError { source }), }) - .collect() + .collect::, DnsMessageParserError>>()?; + + Ok((ede_opts, rest)) } fn parse_edns_opt_dnssec_algorithms( @@ -1209,6 +1232,43 @@ mod tests { ); } + #[test] + fn test_parse_as_query_message_with_ede() { + let raw_dns_message = + "szgAAAABAAAAAAABAmg1B2V4YW1wbGUDY29tAAAGAAEAACkE0AEBQAAABgAPAAIAFQ=="; + let raw_query_message = BASE64 + .decode(raw_dns_message.as_bytes()) + .expect("Invalid base64 encoded data."); + let parse_result = DnsMessageParser::new(raw_query_message).parse_as_query_message(); + assert!(parse_result.is_ok()); + let message = parse_result.expect("Message is not parsed."); + let opt_pseudo_section = message.opt_pseudo_section.expect("OPT section was missing"); + assert_eq!(opt_pseudo_section.ede.len(), 1); + assert_eq!(opt_pseudo_section.ede[0].info_code(), 21u16); + assert_eq!(opt_pseudo_section.ede[0].purpose(), Some("Not Supported")); + assert_eq!(opt_pseudo_section.ede[0].extra_text(), None); + } + + #[test] + fn test_parse_as_query_message_with_ede_with_extra_text() { + let raw_dns_message = + "szgAAAABAAAAAAABAmg1B2V4YW1wbGUDY29tAAAGAAEAACkE0AEBQAAAOQAPADUACW5vIFNFUCBtYXRjaGluZyB0aGUgRFMgZm91bmQgZm9yIGRuc3NlYy1mYWlsZWQub3JnLg=="; + let raw_query_message = BASE64 + .decode(raw_dns_message.as_bytes()) + .expect("Invalid base64 encoded data."); + let parse_result = DnsMessageParser::new(raw_query_message).parse_as_query_message(); + assert!(parse_result.is_ok()); + let message = parse_result.expect("Message is not parsed."); + let opt_pseudo_section = message.opt_pseudo_section.expect("OPT section was missing"); + assert_eq!(opt_pseudo_section.ede.len(), 1); + assert_eq!(opt_pseudo_section.ede[0].info_code(), 9u16); + assert_eq!(opt_pseudo_section.ede[0].purpose(), Some("DNSKEY Missing")); + assert_eq!( + opt_pseudo_section.ede[0].extra_text(), + Some("no SEP matching the DS found for dnssec-failed.org.".to_string()) + ); + } + #[test] fn test_parse_as_query_message_with_invalid_data() { let err = DnsMessageParser::new(vec![1, 2, 3]) diff --git a/lib/dnsmsg-parser/src/ede.rs b/lib/dnsmsg-parser/src/ede.rs new file mode 100644 index 0000000000000..7bd3e7c0439ee --- /dev/null +++ b/lib/dnsmsg-parser/src/ede.rs @@ -0,0 +1,93 @@ +use hickory_proto::{ + error::ProtoResult, + serialize::binary::{BinDecodable, BinDecoder, BinEncodable, BinEncoder}, +}; + +pub const EDE_OPTION_CODE: u16 = 15u16; + +#[derive(Debug, Clone)] +pub struct EDE { + info_code: u16, + extra_text: Option, +} + +impl EDE { + pub fn new(info_code: u16, extra_text: Option) -> Self { + Self { + info_code, + extra_text, + } + } + + // https://www.iana.org/assignments/dns-parameters/dns-parameters.xhtml#extended-dns-error-codes + pub fn purpose(&self) -> Option<&str> { + match self.info_code { + 0 => Some("Other Error"), + 1 => Some("Unsupported DNSKEY Algorithm"), + 2 => Some("Unsupported DS Digest Type"), + 3 => Some("Stale Answer"), + 4 => Some("Forged Answer"), + 5 => Some("DNSSEC Indeterminate"), + 6 => Some("DNSSEC Bogus"), + 7 => Some("Signature Expired"), + 8 => Some("Signature Not Yet Valid"), + 9 => Some("DNSKEY Missing"), + 10 => Some("RRSIGs Missing"), + 11 => Some("No Zone Key Bit Set"), + 12 => Some("NSEC Missing"), + 13 => Some("Cached Error"), + 14 => Some("Not Ready"), + 15 => Some("Blocked"), + 16 => Some("Censored"), + 17 => Some("Filtered"), + 18 => Some("Prohibited"), + 19 => Some("Stale NXDomain Answer"), + 20 => Some("Not Authoritative"), + 21 => Some("Not Supported"), + 22 => Some("No Reachable Authority"), + 23 => Some("Network Error"), + 24 => Some("Invalid Data"), + 25 => Some("Signature Expired before Valid"), + 26 => Some("Too Early"), + 27 => Some("Unsupported NSEC3 Iterations Value"), + 28 => Some("Unable to conform to policy"), + 29 => Some("Synthesized"), + _ => None, + } + } + + pub fn info_code(&self) -> u16 { + self.info_code + } + + pub fn extra_text(&self) -> Option { + self.extra_text.clone() + } +} + +impl BinEncodable for EDE { + fn emit(&self, encoder: &mut BinEncoder<'_>) -> ProtoResult<()> { + encoder.emit_u16(self.info_code)?; + if let Some(extra_text) = &self.extra_text { + encoder.emit_vec(extra_text.as_bytes())?; + } + Ok(()) + } +} + +impl<'a> BinDecodable<'a> for EDE { + fn read(decoder: &mut BinDecoder<'a>) -> ProtoResult { + let info_code = decoder.read_u16()?.unverified(); + let extra_text = if decoder.is_empty() { + None + } else { + Some(String::from_utf8( + decoder.read_vec(decoder.len())?.unverified(), + )?) + }; + Ok(Self { + info_code, + extra_text, + }) + } +} diff --git a/lib/dnsmsg-parser/src/lib.rs b/lib/dnsmsg-parser/src/lib.rs index f78b7099deb4b..d332fd0d3fae3 100644 --- a/lib/dnsmsg-parser/src/lib.rs +++ b/lib/dnsmsg-parser/src/lib.rs @@ -9,3 +9,4 @@ pub mod dns_message; pub mod dns_message_parser; +pub mod ede; diff --git a/src/sources/dnstap/parser.rs b/src/sources/dnstap/parser.rs index f90a8ac10a5ec..6067b1a96c7f5 100644 --- a/src/sources/dnstap/parser.rs +++ b/src/sources/dnstap/parser.rs @@ -8,6 +8,7 @@ use std::{ use base64::prelude::{Engine as _, BASE64_STANDARD}; use bytes::Bytes; use chrono::{TimeZone, Utc}; +use dnsmsg_parser::ede::EDE; use hickory_proto::{ rr::domain::Name, serialize::binary::{BinDecodable, BinDecoder}, @@ -811,6 +812,7 @@ impl DnstapParser { &DNSTAP_VALUE_PATHS.udp_max_payload_size, edns.udp_max_payload_size, ); + DnstapParser::log_edns_ede(event, prefix.concat(&DNSTAP_VALUE_PATHS.ede), &edns.ede); DnstapParser::log_edns_options( event, prefix.concat(&DNSTAP_VALUE_PATHS.options), @@ -819,6 +821,33 @@ impl DnstapParser { } } + fn log_edns_ede<'a>(event: &mut LogEvent, prefix: impl ValuePath<'a>, options: &[EDE]) { + options.iter().enumerate().for_each(|(i, entry)| { + let index_segment = path!(i as isize); + DnstapParser::log_edns_ede_entry(event, prefix.concat(index_segment), entry); + }); + } + + fn log_edns_ede_entry<'a>(event: &mut LogEvent, prefix: impl ValuePath<'a>, entry: &EDE) { + DnstapParser::insert( + event, + prefix.clone(), + &DNSTAP_VALUE_PATHS.info_code, + entry.info_code(), + ); + if let Some(purpose) = entry.purpose() { + DnstapParser::insert(event, prefix.clone(), &DNSTAP_VALUE_PATHS.purpose, purpose); + } + if let Some(extra_text) = entry.extra_text() { + DnstapParser::insert( + event, + prefix.clone(), + &DNSTAP_VALUE_PATHS.extra_text, + extra_text, + ); + } + } + fn log_edns_options<'a>( event: &mut LogEvent, prefix: impl ValuePath<'a>, @@ -1075,6 +1104,37 @@ mod tests { } } + #[test] + fn test_parse_dnstap_data_with_ede_options() { + let mut log_event = LogEvent::default(); + let raw_dnstap_data = "ChVqYW1lcy1WaXJ0dWFsLU1hY2hpbmUSC0JJTkQgOS4xNi4zGgBy5wEIAxACGAEiEAAAAAAAAAAAAAAAAAAAAAAqECABBQJwlAAAAAAAAAAAADAw8+0CODVA7+zq9wVNMU3WNlI2kwIAAAABAAAAAAABCWZhY2Vib29rMQNjb20AAAEAAQAAKQIAAACAAAAMAAoACOxjCAG9zVgzWgUDY29tAGAAbQAAAAByZLM4AAAAAQAAAAAAAQJoNQdleGFtcGxlA2NvbQAABgABAAApBNABAUAAADkADwA1AAlubyBTRVAgbWF0Y2hpbmcgdGhlIERTIGZvdW5kIGZvciBkbnNzZWMtZmFpbGVkLm9yZy54AQ=="; + let dnstap_data = BASE64_STANDARD + .decode(raw_dnstap_data) + .expect("Invalid base64 encoded data."); + let parse_result = DnstapParser::parse(&mut log_event, Bytes::from(dnstap_data)); + assert!(parse_result.is_ok()); + + let expected_map: BTreeMap<&str, Value> = BTreeMap::from([ + ("responseData.opt.ede[0].infoCode", Value::Integer(9)), + ( + "responseData.opt.ede[0].purpose", + Value::Bytes(Bytes::from("DNSKEY Missing")), + ), + ( + "responseData.opt.ede[0].extraText", + Value::Bytes(Bytes::from( + "no SEP matching the DS found for dnssec-failed.org.", + )), + ), + ]); + + // The maps need to contain identical keys and values. + for (exp_key, exp_value) in expected_map { + let value = log_event.get(exp_key).unwrap(); + assert_eq!(*value, exp_value); + } + } + #[test] fn test_parse_dnstap_data_with_update_message() { let mut log_event = LogEvent::default(); diff --git a/src/sources/dnstap/schema.rs b/src/sources/dnstap/schema.rs index 189b5a412babf..59db0421a7f3a 100644 --- a/src/sources/dnstap/schema.rs +++ b/src/sources/dnstap/schema.rs @@ -254,8 +254,14 @@ pub struct DnstapPaths { pub version: OwnedValuePath, pub do_flag: OwnedValuePath, pub udp_max_payload_size: OwnedValuePath, + pub ede: OwnedValuePath, pub options: OwnedValuePath, + // DnsMessageEdeOptionSchema + pub info_code: OwnedValuePath, + pub purpose: OwnedValuePath, + pub extra_text: OwnedValuePath, + // DnsMessageOptionSchema pub opt_code: OwnedValuePath, pub opt_name: OwnedValuePath, @@ -336,7 +342,11 @@ pub(crate) static DNSTAP_VALUE_PATHS: Lazy = Lazy::new(|| DnstapPat version: owned_value_path!("ednsVersion"), do_flag: owned_value_path!("do"), udp_max_payload_size: owned_value_path!("udpPayloadSize"), + ede: owned_value_path!("ede"), options: owned_value_path!("options"), + info_code: owned_value_path!("infoCode"), + purpose: owned_value_path!("purpose"), + extra_text: owned_value_path!("extraText"), opt_code: owned_value_path!("optCode"), opt_name: owned_value_path!("optName"), opt_data: owned_value_path!("optValue"), @@ -417,6 +427,20 @@ impl DnsMessageOptPseudoSectionSchema { } } +#[derive(Debug, Default, Clone)] +pub struct DnsMessageEdeOptionSchema; + +impl DnsMessageEdeOptionSchema { + pub fn schema_definition() -> Collection { + btreemap! { + DNSTAP_VALUE_PATHS.info_code.to_string() => Kind::integer(), + DNSTAP_VALUE_PATHS.purpose.to_string() => Kind::bytes(), + DNSTAP_VALUE_PATHS.extra_text.to_string() => Kind::bytes(), + } + .into() + } +} + #[derive(Debug, Default, Clone)] pub struct DnsMessageOptionSchema; diff --git a/website/cue/reference/components/sources/dnstap.cue b/website/cue/reference/components/sources/dnstap.cue index e82fa83df0d9a..0bf149c18df36 100644 --- a/website/cue/reference/components/sources/dnstap.cue +++ b/website/cue/reference/components/sources/dnstap.cue @@ -809,8 +809,47 @@ components: sources: dnstap: { ] "udpPayloadSize": 4096 }, + { + "do": false + "ednsVersion": 0 + "extendedRcode": 0 + "options": [ + { + "optCode": 10 + "optName": "Cookie" + "optValue": "hbbDFmHUM9wBAAAAX1q1McL4KhalWTS3" + }, + ] + "ede": [ + { + "infoCode": 9 + "purpose": "DNSKEY Missing" + "extraText": "Additional description" + }, + ] + "udpPayloadSize": 4096 + }, ] - options: {} + options: { + ede: { + required: false + description: """ + Extended DNS errors. Provides additional information about + the DNS failure. See [RFC 8914](\(urls.rfc_8914)) for + detailed information. + """ + type: array: items: type: object: { + examples: [ + { + "infoCode": 9 + "purpose": "DNSKEY Missing" + "extraText": "Additional description" + }, + ] + options: {} + } + } + } } } zone: { diff --git a/website/cue/reference/urls.cue b/website/cue/reference/urls.cue index e0fe11129bc6f..f7c81a62462a1 100644 --- a/website/cue/reference/urls.cue +++ b/website/cue/reference/urls.cue @@ -444,6 +444,7 @@ urls: { rfc_4180: "https://tools.ietf.org/html/rfc4180" rfc_6587_3_4_1: "https://tools.ietf.org/html/rfc6587#section-3.4.1" rfc_6891: "https://tools.ietf.org/html/rfc6891" + rfc_8914: "https://tools.ietf.org/html/rfc8914" rhel: "https://www.redhat.com/en/technologies/linux-platforms/enterprise-linux" rpm: "https://rpm.org/" rust: "https://www.rust-lang.org/" From f33169d6aa7d130f8a6a47a7060eeb3c69e22e98 Mon Sep 17 00:00:00 2001 From: Stephen Wakely Date: Mon, 26 Feb 2024 16:35:16 +0000 Subject: [PATCH 0061/1491] fix(pulsar source): PulsarErrorEvent only occurs for the source (#19950) PulsarErrorEvent only occurs for the source Signed-off-by: Stephen Wakely --- src/internal_events/pulsar.rs | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/src/internal_events/pulsar.rs b/src/internal_events/pulsar.rs index 1e7b83bc71080..7006d46fba7ec 100644 --- a/src/internal_events/pulsar.rs +++ b/src/internal_events/pulsar.rs @@ -1,4 +1,6 @@ -use metrics::{counter, register_counter, Counter}; +use metrics::counter; +#[cfg(feature = "sources-pulsar")] +use metrics::{register_counter, Counter}; use vector_lib::internal_event::{ error_stage, error_type, ComponentEventsDropped, InternalEvent, UNINTENTIONAL, }; @@ -54,17 +56,20 @@ impl InternalEvent for PulsarPropertyExtractionError { } } +#[cfg(feature = "sources-pulsar")] pub enum PulsarErrorEventType { Read, Ack, NAck, } +#[cfg(feature = "sources-pulsar")] pub struct PulsarErrorEventData { pub msg: String, pub error_type: PulsarErrorEventType, } +#[cfg(feature = "sources-pulsar")] registered_event!( PulsarErrorEvent => { ack_errors: Counter = register_counter!( From b9c4544d83c9c4042c49b4153cb94ba062f9dfdb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 07:57:38 -0500 Subject: [PATCH 0062/1491] chore(deps): Bump bstr from 1.9.0 to 1.9.1 (#19946) Bumps [bstr](https://github.com/BurntSushi/bstr) from 1.9.0 to 1.9.1. - [Commits](https://github.com/BurntSushi/bstr/compare/1.9.0...1.9.1) --- updated-dependencies: - dependency-name: bstr dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 14755841380ea..1faa71949b8ed 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -338,7 +338,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ed72493ac66d5804837f480ab3766c72bdfab91a65e565fc54fa9e42db0073a8" dependencies = [ "anstyle", - "bstr 1.9.0", + "bstr 1.9.1", "doc-comment", "predicates", "predicates-core", @@ -1632,9 +1632,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.9.0" +version = "1.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c48f0051a4b4c5e0b6d365cd04af53aeaa209e3cc15ec2cdb69e73cc87fbd0dc" +checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706" dependencies = [ "memchr", "regex-automata 0.4.4", @@ -3253,7 +3253,7 @@ checksum = "a481586acf778f1b1455424c343f71124b048ffa5f4fc3f8f6ae9dc432dcb3c7" name = "file-source" version = "0.1.0" dependencies = [ - "bstr 1.9.0", + "bstr 1.9.1", "bytes 1.5.0", "chrono", "crc", @@ -5256,7 +5256,7 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1d3561f79659ff3afad7b25e2bf2ec21507fe601ebecb7f81088669ec4bfd51e" dependencies = [ - "bstr 1.9.0", + "bstr 1.9.1", "mlua-sys", "mlua_derive", "num-traits", From 3091443aa82b31ba04ecd3727c1f6bb37a6abbb0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 07:57:49 -0500 Subject: [PATCH 0063/1491] chore(deps): Bump darling from 0.20.6 to 0.20.8 (#19949) Bumps [darling](https://github.com/TedDriggs/darling) from 0.20.6 to 0.20.8. - [Release notes](https://github.com/TedDriggs/darling/releases) - [Changelog](https://github.com/TedDriggs/darling/blob/master/CHANGELOG.md) - [Commits](https://github.com/TedDriggs/darling/compare/v0.20.6...v0.20.8) --- updated-dependencies: - dependency-name: darling dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 26 +++++++++++++------------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1faa71949b8ed..5894623256007 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -463,7 +463,7 @@ checksum = "a6a7349168b79030e3172a620f4f0e0062268a954604e41475eff082380fe505" dependencies = [ "Inflector", "async-graphql-parser", - "darling 0.20.6", + "darling 0.20.8", "proc-macro-crate 1.3.1", "proc-macro2 1.0.78", "quote 1.0.35", @@ -2579,12 +2579,12 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.6" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c376d08ea6aa96aafe61237c7200d1241cb177b7d3a542d791f2d118e9cbb955" +checksum = "54e36fcd13ed84ffdfda6f5be89b31287cbb80c439841fe69e04841435464391" dependencies = [ - "darling_core 0.20.6", - "darling_macro 0.20.6", + "darling_core 0.20.8", + "darling_macro 0.20.8", ] [[package]] @@ -2617,9 +2617,9 @@ dependencies = [ [[package]] name = "darling_core" -version = "0.20.6" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33043dcd19068b8192064c704b3f83eb464f91f1ff527b44a4e2b08d9cdb8855" +checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" dependencies = [ "fnv", "ident_case", @@ -2653,11 +2653,11 @@ dependencies = [ [[package]] name = "darling_macro" -version = "0.20.6" +version = "0.20.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5a91391accf613803c2a9bf9abccdbaa07c54b4244a5b64883f9c3c137c86be" +checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ - "darling_core 0.20.6", + "darling_core 0.20.8", "quote 1.0.35", "syn 2.0.50", ] @@ -8141,7 +8141,7 @@ version = "3.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "865f9743393e638991566a8b7a479043c2c8da94a33e0a31f18214c9cae0a64d" dependencies = [ - "darling 0.20.6", + "darling 0.20.8", "proc-macro2 1.0.78", "quote 1.0.35", "syn 2.0.50", @@ -10158,7 +10158,7 @@ name = "vector-config-common" version = "0.1.0" dependencies = [ "convert_case 0.6.0", - "darling 0.20.6", + "darling 0.20.8", "once_cell", "proc-macro2 1.0.78", "quote 1.0.35", @@ -10172,7 +10172,7 @@ dependencies = [ name = "vector-config-macros" version = "0.1.0" dependencies = [ - "darling 0.20.6", + "darling 0.20.8", "proc-macro2 1.0.78", "quote 1.0.35", "serde", From 5f43cde7aa6165e55091ec8372e301a03426a3e5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 12:57:59 +0000 Subject: [PATCH 0064/1491] chore(deps): Bump syn from 2.0.50 to 2.0.51 (#19953) Bumps [syn](https://github.com/dtolnay/syn) from 2.0.50 to 2.0.51. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/2.0.50...2.0.51) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 94 +++++++++++++++++++++++++++--------------------------- 1 file changed, 47 insertions(+), 47 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5894623256007..df20bb0a42c41 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "strum 0.25.0", - "syn 2.0.50", + "syn 2.0.51", "thiserror", ] @@ -651,7 +651,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -691,7 +691,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -708,7 +708,7 @@ checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -1467,7 +1467,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9990737a6d5740ff51cdbbc0f0503015cb30c390f6623968281eb214a520cfc0" dependencies = [ "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -1594,7 +1594,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", "syn_derive", ] @@ -2026,7 +2026,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -2554,7 +2554,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -2626,7 +2626,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "strsim 0.10.0", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -2659,7 +2659,7 @@ checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ "darling_core 0.20.8", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -2764,7 +2764,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -3045,7 +3045,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -3057,7 +3057,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -3077,7 +3077,7 @@ checksum = "5c785274071b1b420972453b306eeca06acf4633829db4223b58a2a8c5953bc4" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -3479,7 +3479,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -5169,7 +5169,7 @@ checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -5289,7 +5289,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "regex", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -5738,7 +5738,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -5750,7 +5750,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -5938,7 +5938,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -6223,7 +6223,7 @@ dependencies = [ "pest_meta", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -6311,7 +6311,7 @@ checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -6588,7 +6588,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ "proc-macro2 1.0.78", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -6791,7 +6791,7 @@ dependencies = [ "prost 0.12.3", "prost-types 0.12.3", "regex", - "syn 2.0.50", + "syn 2.0.51", "tempfile", "which 4.4.2", ] @@ -6819,7 +6819,7 @@ dependencies = [ "itertools 0.11.0", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -7603,7 +7603,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.0", - "syn 2.0.50", + "syn 2.0.51", "unicode-ident", ] @@ -7998,7 +7998,7 @@ checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -8009,7 +8009,7 @@ checksum = "330f01ce65a3a5fe59a60c82f3c9a024b573b8a6e875bd233fe5f934e71d54e3" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -8071,7 +8071,7 @@ checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -8144,7 +8144,7 @@ dependencies = [ "darling 0.20.8", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -8423,7 +8423,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -8613,7 +8613,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "rustversion", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -8626,7 +8626,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "rustversion", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -8669,9 +8669,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.50" +version = "2.0.51" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74f1bdc9872430ce9b75da68329d1c1746faf50ffac5f19e02b71e37ff881ffb" +checksum = "6ab617d94515e94ae53b8406c628598680aa0c9587474ecbe58188f7b345d66c" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", @@ -8687,7 +8687,7 @@ dependencies = [ "proc-macro-error", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -8855,7 +8855,7 @@ checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -9003,7 +9003,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -9239,7 +9239,7 @@ dependencies = [ "proc-macro2 1.0.78", "prost-build 0.12.3", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -9342,7 +9342,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -9576,7 +9576,7 @@ checksum = "f03ca4cb38206e2bef0700092660bb74d696f808514dae47fa1467cbfe26e96e" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -9606,7 +9606,7 @@ checksum = "291db8a81af4840c10d636e047cac67664e343be44e24dfdbd1492df9a5d3390" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] @@ -10164,7 +10164,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_json", - "syn 2.0.50", + "syn 2.0.51", "tracing 0.1.40", ] @@ -10177,7 +10177,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_derive_internals", - "syn 2.0.50", + "syn 2.0.51", "vector-config", "vector-config-common", ] @@ -10585,7 +10585,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", "wasm-bindgen-shared", ] @@ -10619,7 +10619,7 @@ checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11064,7 +11064,7 @@ checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.50", + "syn 2.0.51", ] [[package]] From 565d93d35cca13c77e3105e6fa376761b23251d2 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 12:58:09 +0000 Subject: [PATCH 0065/1491] chore(deps): Bump dyn-clone from 1.0.16 to 1.0.17 (#19954) Bumps [dyn-clone](https://github.com/dtolnay/dyn-clone) from 1.0.16 to 1.0.17. - [Release notes](https://github.com/dtolnay/dyn-clone/releases) - [Commits](https://github.com/dtolnay/dyn-clone/compare/1.0.16...1.0.17) --- updated-dependencies: - dependency-name: dyn-clone dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- lib/enrichment/Cargo.toml | 2 +- lib/vector-core/Cargo.toml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index df20bb0a42c41..d147d8e047a03 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2908,9 +2908,9 @@ checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" [[package]] name = "dyn-clone" -version = "1.0.16" +version = "1.0.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "545b22097d44f8a9581187cdf93de7a71e4722bf51200cfaba810865b49a495d" +checksum = "0d6ef0072f8a535281e4876be788938b528e9a1d43900b82c2569af7da799125" [[package]] name = "ecdsa" diff --git a/Cargo.toml b/Cargo.toml index 8d4a9c4e9ce93..348313bbca085 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -262,7 +262,7 @@ colored = { version = "2.1.0", default-features = false } csv = { version = "1.3", default-features = false } derivative = { version = "2.2.0", default-features = false } dirs-next = { version = "2.0.0", default-features = false, optional = true } -dyn-clone = { version = "1.0.16", default-features = false } +dyn-clone = { version = "1.0.17", default-features = false } encoding_rs = { version = "0.8.33", default-features = false, features = ["serde"] } enum_dispatch = { version = "0.3.12", default-features = false } exitcode = { version = "1.1.2", default-features = false } diff --git a/lib/enrichment/Cargo.toml b/lib/enrichment/Cargo.toml index e88d71a6e2a73..d1487f1f1b603 100644 --- a/lib/enrichment/Cargo.toml +++ b/lib/enrichment/Cargo.toml @@ -8,5 +8,5 @@ publish = false [dependencies] arc-swap = { version = "1.6.0", default-features = false } chrono.workspace = true -dyn-clone = { version = "1.0.16", default-features = false } +dyn-clone = { version = "1.0.17", default-features = false } vrl.workspace = true diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index fb4fc8e5dfa95..ea81a76398c47 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -13,7 +13,7 @@ bytes = { version = "1.5.0", default-features = false, features = ["serde"] } chrono.workspace = true crossbeam-utils = { version = "0.8.19", default-features = false } db-key = { version = "0.0.5", default-features = false, optional = true } -dyn-clone = { version = "1.0.16", default-features = false } +dyn-clone = { version = "1.0.17", default-features = false } enrichment = { path = "../enrichment", optional = true } enumflags2 = { version = "0.7.9", default-features = false } float_eq = { version = "1.0", default-features = false } From 906cd65bb315cf658cc6c8a597c93e34de228d74 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 12:58:27 +0000 Subject: [PATCH 0066/1491] chore(deps): Bump typetag from 0.2.15 to 0.2.16 (#19956) Bumps [typetag](https://github.com/dtolnay/typetag) from 0.2.15 to 0.2.16. - [Release notes](https://github.com/dtolnay/typetag/releases) - [Commits](https://github.com/dtolnay/typetag/compare/0.2.15...0.2.16) --- updated-dependencies: - dependency-name: typetag dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- Cargo.toml | 2 +- lib/vector-core/Cargo.toml | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index d147d8e047a03..9c82914d8584f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9587,9 +9587,9 @@ checksum = "42ff0bf0c66b8238c6f3b578df37d0b7848e55df8577b3f74f92a69acceeb825" [[package]] name = "typetag" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c43148481c7b66502c48f35b8eef38b6ccdc7a9f04bd4cc294226d901ccc9bc7" +checksum = "661d18414ec032a49ece2d56eee03636e43c4e8d577047ab334c0ba892e29aaf" dependencies = [ "erased-serde", "inventory", @@ -9600,9 +9600,9 @@ dependencies = [ [[package]] name = "typetag-impl" -version = "0.2.15" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "291db8a81af4840c10d636e047cac67664e343be44e24dfdbd1492df9a5d3390" +checksum = "ac73887f47b9312552aa90ef477927ff014d63d1920ca8037c6c1951eab64bb1" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", diff --git a/Cargo.toml b/Cargo.toml index 348313bbca085..967c9ad68f8a2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -329,7 +329,7 @@ tokio-tungstenite = {version = "0.20.1", default-features = false, features = [" toml.workspace = true tonic = { version = "0.10", optional = true, default-features = false, features = ["transport", "codegen", "prost", "tls", "tls-roots", "gzip"] } hickory-proto = { version = "0.24.0", default-features = false, features = ["dnssec"], optional = true } -typetag = { version = "0.2.15", default-features = false } +typetag = { version = "0.2.16", default-features = false } url = { version = "2.5.0", default-features = false, features = ["serde"] } uuid = { version = "1", default-features = false, features = ["serde", "v4"] } warp = { version = "0.3.6", default-features = false } diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index ea81a76398c47..631403d6a3ae0 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -55,7 +55,7 @@ tonic = { version = "0.10", default-features = false, features = ["transport"] } tracing = { version = "0.1.34", default-features = false } tracing-core = { version = "0.1.26", default-features = false } tracing-subscriber = { version = "0.3.18", default-features = false, features = ["std"] } -typetag = { version = "0.2.15", default-features = false } +typetag = { version = "0.2.16", default-features = false } url = { version = "2", default-features = false } vector-buffers = { path = "../vector-buffers", default-features = false } vector-common = { path = "../vector-common" } From 11f6491f77bd9fc98c3e19859d87aa036184a1d3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 13:08:42 -0500 Subject: [PATCH 0067/1491] chore(ci): Bump actions/add-to-project from 0.5.0 to 0.6.0 (#19960) Bumps [actions/add-to-project](https://github.com/actions/add-to-project) from 0.5.0 to 0.6.0. - [Release notes](https://github.com/actions/add-to-project/releases) - [Commits](https://github.com/actions/add-to-project/compare/v0.5.0...v0.6.0) --- updated-dependencies: - dependency-name: actions/add-to-project dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/gardener_open_issue.yml | 2 +- .github/workflows/gardener_open_pr.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/gardener_open_issue.yml b/.github/workflows/gardener_open_issue.yml index 3c215513f950a..56da3309db9f1 100644 --- a/.github/workflows/gardener_open_issue.yml +++ b/.github/workflows/gardener_open_issue.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 5 steps: - - uses: actions/add-to-project@v0.5.0 + - uses: actions/add-to-project@v0.6.0 with: project-url: https://github.com/orgs/vectordotdev/projects/49 github-token: ${{ secrets.GH_PROJECT_PAT }} diff --git a/.github/workflows/gardener_open_pr.yml b/.github/workflows/gardener_open_pr.yml index 55bccc2429435..a89c5d0542bd7 100644 --- a/.github/workflows/gardener_open_pr.yml +++ b/.github/workflows/gardener_open_pr.yml @@ -26,7 +26,7 @@ jobs: username: ${{ github.actor }} team: vector GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} - - uses: actions/add-to-project@v0.5.0 + - uses: actions/add-to-project@v0.6.0 if: ${{ steps.checkVectorMember.outputs.isTeamMember == 'false' }} with: project-url: https://github.com/orgs/vectordotdev/projects/49 @@ -37,7 +37,7 @@ jobs: timeout-minutes: 5 if: ${{ github.actor == 'dependabot[bot]' }} steps: - - uses: actions/add-to-project@v0.5.0 + - uses: actions/add-to-project@v0.6.0 with: project-url: https://github.com/orgs/vectordotdev/projects/49 github-token: ${{ secrets.GH_PROJECT_PAT }} From cae37e99d8dba79c943e9cdf6af862523141f71c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 27 Feb 2024 13:08:58 -0500 Subject: [PATCH 0068/1491] chore(ci): Bump docker/setup-buildx-action from 3.0.0 to 3.1.0 (#19961) Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.0.0 to 3.1.0. - [Release notes](https://github.com/docker/setup-buildx-action/releases) - [Commits](https://github.com/docker/setup-buildx-action/compare/v3.0.0...v3.1.0) --- updated-dependencies: - dependency-name: docker/setup-buildx-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/environment.yml | 2 +- .github/workflows/publish.yml | 2 +- .github/workflows/regression.yml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/environment.yml b/.github/workflows/environment.yml index 20d1a7941724a..3e7f0b6d1cb6d 100644 --- a/.github/workflows/environment.yml +++ b/.github/workflows/environment.yml @@ -43,7 +43,7 @@ jobs: - name: Set up QEMU uses: docker/setup-qemu-action@v3.0.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3.0.0 + uses: docker/setup-buildx-action@v3.1.0 - name: Login to DockerHub uses: docker/login-action@v3 if: github.ref == 'refs/heads/master' diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 73d3af2b6067b..b40a11170c751 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -435,7 +435,7 @@ jobs: platforms: all - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v3.0.0 + uses: docker/setup-buildx-action@v3.1.0 with: version: latest install: true diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 86c5c148a1b76..82709290f002b 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -303,7 +303,7 @@ jobs: - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v3.0.0 + uses: docker/setup-buildx-action@v3.1.0 - name: Build 'vector' target image uses: docker/build-push-action@v5.1.0 @@ -341,7 +341,7 @@ jobs: - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v3.0.0 + uses: docker/setup-buildx-action@v3.1.0 - name: Build 'vector' target image uses: docker/build-push-action@v5.1.0 From b1a2ca11c156aa9f66125c56009e7f05bbe65d2f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 Feb 2024 07:33:08 -0500 Subject: [PATCH 0069/1491] chore(deps): Bump tempfile from 3.10.0 to 3.10.1 (#19955) Bumps [tempfile](https://github.com/Stebalien/tempfile) from 3.10.0 to 3.10.1. - [Changelog](https://github.com/Stebalien/tempfile/blob/master/CHANGELOG.md) - [Commits](https://github.com/Stebalien/tempfile/compare/v3.10.0...v3.10.1) --- updated-dependencies: - dependency-name: tempfile dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- lib/file-source/Cargo.toml | 2 +- vdev/Cargo.toml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9c82914d8584f..655ba48210129 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8771,9 +8771,9 @@ checksum = "dd16aa9ffe15fe021c6ee3766772132c6e98dfa395a167e16864f61a9cfb71d6" [[package]] name = "tempfile" -version = "3.10.0" +version = "3.10.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a365e8cd18e44762ef95d87f284f4b5cd04107fec2ff3052bd6a3e6069669e67" +checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" dependencies = [ "cfg-if", "fastrand 2.0.1", diff --git a/Cargo.toml b/Cargo.toml index 967c9ad68f8a2..38a629c4bedaa 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -372,7 +372,7 @@ proptest.workspace = true quickcheck = "1.0.3" reqwest = { version = "0.11", features = ["json"] } rstest = {version = "0.18.2"} -tempfile = "3.10.0" +tempfile = "3.10.1" test-generator = "0.3.1" tokio = { version = "1.36.0", features = ["test-util"] } tokio-test = "0.4.3" diff --git a/lib/file-source/Cargo.toml b/lib/file-source/Cargo.toml index c1b4e5c09d8a6..13e7bd3698103 100644 --- a/lib/file-source/Cargo.toml +++ b/lib/file-source/Cargo.toml @@ -76,7 +76,7 @@ features = ["full"] [dev-dependencies] criterion = "0.5" quickcheck = "1" -tempfile = "3.10.0" +tempfile = "3.10.1" similar-asserts = "1.5.0" [[bench]] diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index 9b0e4f8e9c6d9..3fc57a1acddd2 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -35,5 +35,5 @@ serde.workspace = true serde_json.workspace = true serde_yaml = "0.9.32" sha2 = "0.10.8" -tempfile = "3.10.0" +tempfile = "3.10.1" toml.workspace = true From 26ec8f432394b966e5c48da97634738f30c949d7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 Feb 2024 07:33:19 -0500 Subject: [PATCH 0070/1491] chore(deps): Bump the aws group with 1 update (#19965) Bumps the aws group with 1 update: [aws-credential-types](https://github.com/smithy-lang/smithy-rs). Updates `aws-credential-types` from 1.1.6 to 1.1.7 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) --- updated-dependencies: - dependency-name: aws-credential-types dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 655ba48210129..fff143e92123b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -762,9 +762,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5635d8707f265c773282a22abe1ecd4fbe96a8eb2f0f14c0796f8016f11a41a" +checksum = "273fa47dafc9ef14c2c074ddddbea4561ff01b7f68d5091c0e9737ced605c01d" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", diff --git a/Cargo.toml b/Cargo.toml index 38a629c4bedaa..298441b531b2e 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -184,7 +184,7 @@ aws-sdk-kinesis = { version = "1.3.0", default-features = false, features = ["be aws-types = { version = "1.1.6", default-features = false, optional = true } aws-sigv4 = { version = "1.1.6", default-features = false, features = ["sign-http"], optional = true } aws-config = { version = "1.0.1", default-features = false, features = ["behavior-version-latest"], optional = true } -aws-credential-types = { version = "1.1.6", default-features = false, features = ["hardcoded-credentials"], optional = true } +aws-credential-types = { version = "1.1.7", default-features = false, features = ["hardcoded-credentials"], optional = true } aws-smithy-http = { version = "0.60", default-features = false, features = ["event-stream"], optional = true } aws-smithy-types = { version = "1.1.7", default-features = false, optional = true } aws-smithy-runtime-api = { version = "1.1.7", default-features = false, optional = true } From c1d6529225b3c9dd1c3e00957361acab89fa4d50 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 Feb 2024 12:33:31 +0000 Subject: [PATCH 0071/1491] chore(deps): Bump serde-wasm-bindgen from 0.6.4 to 0.6.5 (#19966) Bumps [serde-wasm-bindgen](https://github.com/RReverser/serde-wasm-bindgen) from 0.6.4 to 0.6.5. - [Commits](https://github.com/RReverser/serde-wasm-bindgen/compare/v0.6.4...v0.6.5) --- updated-dependencies: - dependency-name: serde-wasm-bindgen dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fff143e92123b..c0fb2b89d59ad 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7972,9 +7972,9 @@ dependencies = [ [[package]] name = "serde-wasm-bindgen" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c1432112bce8b966497ac46519535189a3250a3812cd27a999678a69756f79f" +checksum = "8302e169f0eddcc139c70f139d19d6467353af16f9fce27e8c30158036a1e16b" dependencies = [ "js-sys", "serde", From d4cf2bf6989eee92a41e7312b63b8522fdb0444b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 28 Feb 2024 13:07:02 +0000 Subject: [PATCH 0072/1491] chore(deps): Bump rumqttc from 0.23.0 to 0.24.0 (#19967) * chore(deps): Bump rumqttc from 0.23.0 to 0.24.0 Bumps [rumqttc](https://github.com/bytebeamio/rumqtt) from 0.23.0 to 0.24.0. - [Release notes](https://github.com/bytebeamio/rumqtt/releases) - [Changelog](https://github.com/bytebeamio/rumqtt/blob/main/CHANGELOG.md) - [Commits](https://github.com/bytebeamio/rumqtt/compare/rumqttc-0.23.0...rumqttc-0.24.0) --- updated-dependencies: - dependency-name: rumqttc dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * Regenerate licenses Signed-off-by: Jesse Szwedko --------- Signed-off-by: dependabot[bot] Signed-off-by: Jesse Szwedko Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jesse Szwedko --- Cargo.lock | 135 ++++++++++++++++++++++++++++++++----------- Cargo.toml | 2 +- LICENSE-3rdparty.csv | 2 + 3 files changed, 103 insertions(+), 36 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c0fb2b89d59ad..52d03fd2b9bcf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -586,10 +586,10 @@ dependencies = [ "rand 0.8.5", "regex", "ring", - "rustls", - "rustls-native-certs", - "rustls-pemfile", - "rustls-webpki", + "rustls 0.21.8", + "rustls-native-certs 0.6.3", + "rustls-pemfile 1.0.3", + "rustls-webpki 0.101.7", "serde", "serde_json", "serde_nanos", @@ -598,7 +598,7 @@ dependencies = [ "time", "tokio", "tokio-retry", - "tokio-rustls", + "tokio-rustls 0.24.1", "tracing 0.1.40", "url", ] @@ -1147,7 +1147,7 @@ dependencies = [ "once_cell", "pin-project-lite", "pin-utils", - "rustls", + "rustls 0.21.8", "tokio", "tracing 0.1.40", ] @@ -1545,10 +1545,10 @@ dependencies = [ "hyperlocal", "log", "pin-project-lite", - "rustls", - "rustls-native-certs", - "rustls-pemfile", - "rustls-webpki", + "rustls 0.21.8", + "rustls-native-certs 0.6.3", + "rustls-pemfile 1.0.3", + "rustls-webpki 0.101.7", "serde", "serde_derive", "serde_json", @@ -4225,10 +4225,10 @@ dependencies = [ "http 0.2.9", "hyper", "log", - "rustls", - "rustls-native-certs", + "rustls 0.21.8", + "rustls-native-certs 0.6.3", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", ] [[package]] @@ -5323,8 +5323,8 @@ dependencies = [ "percent-encoding", "rand 0.8.5", "rustc_version_runtime", - "rustls", - "rustls-pemfile", + "rustls 0.21.8", + "rustls-pemfile 1.0.3", "serde", "serde_bytes", "serde_with 1.14.0", @@ -5336,7 +5336,7 @@ dependencies = [ "take_mut", "thiserror", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", "tokio-util", "trust-dns-proto", "trust-dns-resolver", @@ -7416,8 +7416,8 @@ dependencies = [ "once_cell", "percent-encoding", "pin-project-lite", - "rustls", - "rustls-pemfile", + "rustls 0.21.8", + "rustls-pemfile 1.0.3", "serde", "serde_json", "serde_urlencoded", @@ -7425,7 +7425,7 @@ dependencies = [ "system-configuration", "tokio", "tokio-native-tls", - "tokio-rustls", + "tokio-rustls 0.24.1", "tokio-util", "tower-service", "url", @@ -7609,20 +7609,20 @@ dependencies = [ [[package]] name = "rumqttc" -version = "0.23.0" +version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d8941c6791801b667d52bfe9ff4fc7c968d4f3f9ae8ae7abdaaa1c966feafc8" +checksum = "e1568e15fab2d546f940ed3a21f48bbbd1c494c90c99c4481339364a497f94a9" dependencies = [ "bytes 1.5.0", "flume 0.11.0", "futures-util", "log", - "rustls-native-certs", - "rustls-pemfile", - "rustls-webpki", + "rustls-native-certs 0.7.0", + "rustls-pemfile 2.1.0", + "rustls-webpki 0.102.2", "thiserror", "tokio", - "tokio-rustls", + "tokio-rustls 0.25.0", ] [[package]] @@ -7716,10 +7716,24 @@ checksum = "446e14c5cda4f3f30fe71863c34ec70f5ac79d6087097ad0bb433e1be5edf04c" dependencies = [ "log", "ring", - "rustls-webpki", + "rustls-webpki 0.101.7", "sct", ] +[[package]] +name = "rustls" +version = "0.22.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e87c9956bd9807afa1f77e0f7594af32566e830e088a5576d27c5b6f30f49d41" +dependencies = [ + "log", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.2", + "subtle", + "zeroize", +] + [[package]] name = "rustls-native-certs" version = "0.6.3" @@ -7727,7 +7741,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ "openssl-probe", - "rustls-pemfile", + "rustls-pemfile 1.0.3", + "schannel", + "security-framework", +] + +[[package]] +name = "rustls-native-certs" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8f1fb85efa936c42c6d5fc28d2629bb51e4b2f4b8a5211e297d599cc5a093792" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.1.0", + "rustls-pki-types", "schannel", "security-framework", ] @@ -7741,6 +7768,22 @@ dependencies = [ "base64 0.21.7", ] +[[package]] +name = "rustls-pemfile" +version = "2.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3c333bb734fcdedcea57de1602543590f545f127dc8b533324318fd492c5c70b" +dependencies = [ + "base64 0.21.7", + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.3.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5ede67b28608b4c60685c7d54122d4400d90f62b40caee7700e700380a390fa8" + [[package]] name = "rustls-webpki" version = "0.101.7" @@ -7751,6 +7794,17 @@ dependencies = [ "untrusted", ] +[[package]] +name = "rustls-webpki" +version = "0.102.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "faaa0a62740bedb9b2ef5afa303da42764c012f743917351dc9a237ea1663610" +dependencies = [ + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" version = "1.0.14" @@ -8760,7 +8814,7 @@ checksum = "4da30af7998f51ee1aa48ab24276fe303a697b004e31ff542b192c088d5630a5" dependencies = [ "cfg-if", "native-tls", - "rustls-pemfile", + "rustls-pemfile 1.0.3", ] [[package]] @@ -9071,7 +9125,18 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls", + "rustls 0.21.8", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.25.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "775e0c0f0adb3a2f22a00c4745d728b479985fc15ee7ca6a2608388c5569860f" +dependencies = [ + "rustls 0.22.2", + "rustls-pki-types", "tokio", ] @@ -9108,7 +9173,7 @@ checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" dependencies = [ "futures-util", "log", - "rustls", + "rustls 0.21.8", "tokio", "tungstenite", ] @@ -9204,11 +9269,11 @@ dependencies = [ "percent-encoding", "pin-project", "prost 0.12.3", - "rustls", - "rustls-native-certs", - "rustls-pemfile", + "rustls 0.21.8", + "rustls-native-certs 0.6.3", + "rustls-pemfile 1.0.3", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", "tokio-stream", "tower", "tower-layer", @@ -10539,7 +10604,7 @@ dependencies = [ "mime_guess", "percent-encoding", "pin-project", - "rustls-pemfile", + "rustls-pemfile 1.0.3", "scoped-tls", "serde", "serde_json", diff --git a/Cargo.toml b/Cargo.toml index 298441b531b2e..bc2951913e120 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -313,7 +313,7 @@ rdkafka = { version = "0.35.0", default-features = false, features = ["tokio", " redis = { version = "0.24.0", default-features = false, features = ["connection-manager", "tokio-comp", "tokio-native-tls-comp"], optional = true } regex = { version = "1.10.3", default-features = false, features = ["std", "perf"] } roaring = { version = "0.10.3", default-features = false, optional = true } -rumqttc = { version = "0.23.0", default-features = false, features = ["use-rustls"], optional = true } +rumqttc = { version = "0.24.0", default-features = false, features = ["use-rustls"], optional = true } seahash = { version = "4.1.0", default-features = false } semver = { version = "1.0.22", default-features = false, features = ["serde", "std"], optional = true } smallvec = { version = "1", default-features = false, features = ["union", "serde"] } diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 0a45a8d7b8024..746d2a7e171f4 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -472,7 +472,9 @@ rustc_version_runtime,https://github.com/seppo0010/rustc-version-runtime-rs,MIT, rustix,https://github.com/bytecodealliance/rustix,Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT,"Dan Gohman , Jakub Konka " rustls,https://github.com/rustls/rustls,Apache-2.0 OR ISC OR MIT,The rustls Authors rustls-native-certs,https://github.com/ctz/rustls-native-certs,Apache-2.0 OR ISC OR MIT,The rustls-native-certs Authors +rustls-native-certs,https://github.com/rustls/rustls-native-certs,Apache-2.0 OR ISC OR MIT,The rustls-native-certs Authors rustls-pemfile,https://github.com/rustls/pemfile,Apache-2.0 OR ISC OR MIT,The rustls-pemfile Authors +rustls-pki-types,https://github.com/rustls/pki-types,MIT OR Apache-2.0,The rustls-pki-types Authors rustls-webpki,https://github.com/rustls/webpki,ISC,The rustls-webpki Authors rustversion,https://github.com/dtolnay/rustversion,MIT OR Apache-2.0,David Tolnay rusty-fork,https://github.com/altsysrq/rusty-fork,MIT OR Apache-2.0,Jason Lingle From 2ca14aef0ee056f8f1a0763abf2859a75cad5f9c Mon Sep 17 00:00:00 2001 From: Stephen Wakely Date: Wed, 28 Feb 2024 15:42:00 +0000 Subject: [PATCH 0073/1491] fix (aws service): use http client so we can use openssl tls. (#19939) * Use http client so we can use openssl tls. * Share connector code. Signed-off-by: Stephen Wakely * Clippy Signed-off-by: Stephen Wakely * Feedback from Bruce. Signed-off-by: Stephen Wakely --------- Signed-off-by: Stephen Wakely --- Cargo.lock | 1 + Cargo.toml | 5 +++++ src/aws/auth.rs | 29 ++++++++++++++--------------- src/aws/mod.rs | 29 ++++++++++++++++++++--------- 4 files changed, 40 insertions(+), 24 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 52d03fd2b9bcf..6e8671279a76b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9929,6 +9929,7 @@ dependencies = [ "aws-sdk-s3", "aws-sdk-sns", "aws-sdk-sqs", + "aws-sdk-sts", "aws-sigv4", "aws-smithy-async", "aws-smithy-http", diff --git a/Cargo.toml b/Cargo.toml index bc2951913e120..5833ef2f39504 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -181,6 +181,10 @@ aws-sdk-cloudwatchlogs = { version = "1.3.0", default-features = false, features aws-sdk-elasticsearch = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } aws-sdk-firehose = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } aws-sdk-kinesis = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } +# The sts crate is needed despite not being referred to anywhere in the code because we need to set the +# `behavior-version-latest` feature. Without this we get a runtime panic when `auth.assume_role` authentication +# is configured. +aws-sdk-sts = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } aws-types = { version = "1.1.6", default-features = false, optional = true } aws-sigv4 = { version = "1.1.6", default-features = false, features = ["sign-http"], optional = true } aws-config = { version = "1.0.1", default-features = false, features = ["behavior-version-latest"], optional = true } @@ -464,6 +468,7 @@ aws-core = [ "dep:aws-smithy-types", "dep:aws-smithy-runtime", "dep:aws-smithy-runtime-api", + "dep:aws-sdk-sts", ] # Anything that requires Protocol Buffers. diff --git a/src/aws/auth.rs b/src/aws/auth.rs index d71b9289633e4..b975c2a51d02c 100644 --- a/src/aws/auth.rs +++ b/src/aws/auth.rs @@ -13,14 +13,12 @@ use aws_config::{ sts::AssumeRoleProviderBuilder, }; use aws_credential_types::{provider::SharedCredentialsProvider, Credentials}; -use aws_smithy_runtime::client::http::hyper_014::HyperClientBuilder; +use aws_smithy_async::time::SystemTimeSource; use aws_smithy_runtime_api::client::identity::SharedIdentityCache; -use aws_types::region::Region; +use aws_types::{region::Region, SdkConfig}; use serde_with::serde_as; +use vector_lib::configurable::configurable_component; use vector_lib::{config::proxy::ProxyConfig, sensitive_string::SensitiveString, tls::TlsConfig}; -use vector_lib::{configurable::configurable_component, tls::MaybeTlsSettings}; - -use crate::http::{build_proxy_connector, build_tls_connector}; // matches default load timeout from the SDK as of 0.10.1, but lets us confidently document the // default rather than relying on the SDK default to not change @@ -266,8 +264,16 @@ impl AwsAuthentication { .. } => { let auth_region = region.clone().map(Region::new).unwrap_or(service_region); - let mut builder = - AssumeRoleProviderBuilder::new(assume_role).region(auth_region.clone()); + let connector = super::connector(proxy, tls_options)?; + let config = SdkConfig::builder() + .http_client(connector) + .region(auth_region.clone()) + .time_source(SystemTimeSource::new()) + .build(); + + let mut builder = AssumeRoleProviderBuilder::new(assume_role) + .region(auth_region.clone()) + .configure(&config); if let Some(external_id) = external_id { builder = builder.external_id(external_id) @@ -313,14 +319,7 @@ async fn default_credentials_provider( tls_options: &Option, imds: ImdsAuthentication, ) -> crate::Result { - let tls_settings = MaybeTlsSettings::tls_client(tls_options)?; - let connector = if proxy.enabled { - let proxy = build_proxy_connector(tls_settings, proxy)?; - HyperClientBuilder::new().build(proxy) - } else { - let tls_connector = build_tls_connector(tls_settings)?; - HyperClientBuilder::new().build(tls_connector) - }; + let connector = super::connector(proxy, tls_options)?; let provider_config = ProviderConfig::empty() .with_region(Some(region.clone())) diff --git a/src/aws/mod.rs b/src/aws/mod.rs index a3bb665ed6d68..f611e4a2aabe6 100644 --- a/src/aws/mod.rs +++ b/src/aws/mod.rs @@ -21,6 +21,7 @@ use aws_smithy_runtime_api::client::{ runtime_components::RuntimeComponents, }; use aws_smithy_types::body::SdkBody; +use aws_types::sdk_config::SharedHttpClient; use bytes::Bytes; use futures_util::FutureExt; use http::HeaderMap; @@ -94,6 +95,24 @@ fn check_response(res: &HttpResponse) -> bool { || (status.is_client_error() && re.is_match(response_body.as_ref())) } +/// Creates the http connector that has been configured to use the given proxy and TLS settings. +/// All AWS requests should use this connector as the aws crates by default use RustTLS which we +/// have turned off as we want to consistently use openssl. +fn connector( + proxy: &ProxyConfig, + tls_options: &Option, +) -> crate::Result { + let tls_settings = MaybeTlsSettings::tls_client(tls_options)?; + + if proxy.enabled { + let proxy = build_proxy_connector(tls_settings, proxy)?; + Ok(HyperClientBuilder::new().build(proxy)) + } else { + let tls_connector = build_tls_connector(tls_settings)?; + Ok(HyperClientBuilder::new().build(tls_connector)) + } +} + /// Implement for each AWS service to create the appropriate AWS sdk client. pub trait ClientBuilder { /// The type of the client in the SDK. @@ -145,15 +164,7 @@ pub async fn create_client_and_region( let provider_config = aws_config::provider_config::ProviderConfig::empty().with_region(Some(region.clone())); - let tls_settings = MaybeTlsSettings::tls_client(tls_options)?; - - let connector = if proxy.enabled { - let proxy = build_proxy_connector(tls_settings, proxy)?; - HyperClientBuilder::new().build(proxy) - } else { - let tls_connector = build_tls_connector(tls_settings)?; - HyperClientBuilder::new().build(tls_connector) - }; + let connector = connector(proxy, tls_options)?; // Create a custom http connector that will emit the required metrics for us. let connector = AwsHttpClient { From a9cee3f796624e1a18a3ae9243a0666cffd27aa5 Mon Sep 17 00:00:00 2001 From: Stephen Wakely Date: Wed, 28 Feb 2024 17:31:36 +0000 Subject: [PATCH 0074/1491] fix(aws service): determine region using our http client (#19972) * Use http client so we can use openssl tls. * Share connector code. Signed-off-by: Stephen Wakely * Clippy Signed-off-by: Stephen Wakely * Feedback from Bruce. Signed-off-by: Stephen Wakely * Resolve region with custom http provider. Signed-off-by: Stephen Wakely --------- Signed-off-by: Stephen Wakely --- src/aws/mod.rs | 28 +++++++++++++++++++++++++--- 1 file changed, 25 insertions(+), 3 deletions(-) diff --git a/src/aws/mod.rs b/src/aws/mod.rs index f611e4a2aabe6..2f4ea2678a637 100644 --- a/src/aws/mod.rs +++ b/src/aws/mod.rs @@ -122,10 +122,32 @@ pub trait ClientBuilder { fn build(config: &SdkConfig) -> Self::Client; } -async fn resolve_region(region: Option) -> crate::Result { +fn region_provider( + proxy: &ProxyConfig, + tls_options: &Option, +) -> crate::Result { + let config = aws_config::provider_config::ProviderConfig::default() + .with_http_client(connector(proxy, tls_options)?); + + Ok(aws_config::meta::region::RegionProviderChain::first_try( + aws_config::environment::EnvironmentVariableRegionProvider::new(), + ) + .or_else(aws_config::profile::ProfileFileRegionProvider::builder().build()) + .or_else( + aws_config::imds::region::ImdsRegionProvider::builder() + .configure(&config) + .build(), + )) +} + +async fn resolve_region( + proxy: &ProxyConfig, + tls_options: &Option, + region: Option, +) -> crate::Result { match region { Some(region) => Ok(region), - None => aws_config::default_provider::region::default_provider() + None => region_provider(proxy, tls_options)? .region() .await .ok_or_else(|| { @@ -159,7 +181,7 @@ pub async fn create_client_and_region( // The default credentials chains will look for a region if not given but we'd like to // error up front if later SDK calls will fail due to lack of region configuration - let region = resolve_region(region).await?; + let region = resolve_region(proxy, tls_options, region).await?; let provider_config = aws_config::provider_config::ProviderConfig::empty().with_region(Some(region.clone())); From 43a91293c61e67305ee175e3cf135adeec0b51b1 Mon Sep 17 00:00:00 2001 From: neuronull Date: Wed, 28 Feb 2024 12:15:48 -0700 Subject: [PATCH 0075/1491] chore(observability): robustly synchronize component validation framework tasks (#19927) * add fix and small refactor * fix compilation errors * 3 ticks * dont compute expected metrics in validator * cleanup * cleanup * clippy * feedback tz: sent_eventssssss * feedback tz: fix telemetry shutdown finishing logic * 3 ticks * small reorg to add sinks * mini refactor of the component spec validators * attempt to set expected values from the resource * feedback tz- from not try_from * back to 3 ticks * fix incorrect expected values * Even more reduction * clippy * add the discarded events total check * workaround the new sync issues * multi config support * cleanup * check events * partial feedback * thought i removed that * use ref * feedback: dont introduce PassThroughFail variant * feedback: adjust enum variant names for clarity * feedback: no idea what I was thinking with `input_codec` * spell check * fr * fix sync issues * remove unused enum variant * feedback- update docs * check_events * touchup * spell checker * merge leftover * feedback: log formating * feedback- better approach to driving shutdown * give a generous timeout --- Cargo.toml | 4 +- src/components/validation/resources/event.rs | 67 ++++---- src/components/validation/resources/http.rs | 105 ++++++++----- src/components/validation/resources/mod.rs | 2 + src/components/validation/runner/mod.rs | 145 +++++++++++------- src/components/validation/runner/telemetry.rs | 13 +- src/components/validation/sync.rs | 19 ++- .../validators/component_spec/mod.rs | 2 +- src/sinks/http/config.rs | 98 ++++-------- src/sources/http_client/client.rs | 3 +- tests/validation/components/sinks/http.yaml | 6 +- 11 files changed, 246 insertions(+), 218 deletions(-) diff --git a/Cargo.toml b/Cargo.toml index 5833ef2f39504..0582b03bde6bc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -903,7 +903,9 @@ enterprise-tests = [ ] component-validation-runner = ["dep:tonic", "sources-internal_logs", "sources-internal_metrics", "sources-vector", "sinks-vector"] -component-validation-tests = ["component-validation-runner", "sources", "transforms", "sinks"] +# For now, only include components that implement ValidatableComponent. +# In the future, this can change to simply reference the targets `sources`, `transforms`, `sinks` +component-validation-tests = ["component-validation-runner", "sources-http_client", "sources-http_server", "sinks-http"] # Grouping together features for benchmarks. We exclude the API client due to it causing the build process to run out # of memory when those additional dependencies are built in CI. diff --git a/src/components/validation/resources/event.rs b/src/components/validation/resources/event.rs index 343466ee9e51c..9008cf291ef46 100644 --- a/src/components/validation/resources/event.rs +++ b/src/components/validation/resources/event.rs @@ -1,6 +1,5 @@ use bytes::BytesMut; use serde::Deserialize; -use serde_json::Value; use snafu::Snafu; use tokio_util::codec::Encoder as _; @@ -30,16 +29,9 @@ pub enum RawTestEvent { /// is malformed in some way, which can be achieved without this test event variant. AlternateEncoder { fail_encoding_of: EventData }, - /// The event is created, and the specified field is added to it. - /// - /// This allows the ability to hit code paths where some codecs require specific fields to be of specific - /// types, thus allowing us to encode into the input runner without error, but encoding in the component - /// under test can be set up to fail. - WithField { - event: EventData, - name: String, - value: Value, - fail: Option, + /// The event will be rejected by the external resource. + ResourceReject { + external_resource_rejects: EventData, }, } @@ -79,12 +71,9 @@ pub enum TestEvent { /// configured encoding, which should cause an error when the event is decoded. FailWithAlternateEncoder(Event), - /// The event has an additional field injected prior to encoding, which should cause - /// an error when the event is decoded. - /// - /// This is useful for testing encodings that have strict schemas and cannot - /// handle arbitrary fields or differing data types for certain fields. - FailWithInjectedField(Event), + /// The event encodes successfully but when the external resource receives that event, it should + /// throw a failure. + FailWithExternalResource(Event), } impl TestEvent { @@ -93,7 +82,7 @@ impl TestEvent { match self { Self::Passthrough(event) => event, Self::FailWithAlternateEncoder(event) => event, - Self::FailWithInjectedField(event) => event, + Self::FailWithExternalResource(event) => event, } } @@ -101,7 +90,7 @@ impl TestEvent { match self { Self::Passthrough(event) => event, Self::FailWithAlternateEncoder(event) => event, - Self::FailWithInjectedField(event) => event, + Self::FailWithExternalResource(event) => event, } } @@ -110,7 +99,24 @@ impl TestEvent { match self { Self::Passthrough(event) => (false, event), Self::FailWithAlternateEncoder(event) => (true, event), - Self::FailWithInjectedField(event) => (true, event), + Self::FailWithExternalResource(event) => (true, event), + } + } + + /// True if the event should fail, false otherwise. + pub const fn should_fail(&self) -> bool { + match self { + Self::Passthrough(_) => false, + Self::FailWithAlternateEncoder(_) | Self::FailWithExternalResource(_) => true, + } + } + + /// True if the event should be rejected by the external resource in order to + /// trigger a failure path. + pub const fn should_reject(&self) -> bool { + match self { + Self::Passthrough(_) | Self::FailWithAlternateEncoder(_) => false, + Self::FailWithExternalResource(_) => true, } } } @@ -127,22 +133,9 @@ impl From for TestEvent { RawTestEvent::AlternateEncoder { fail_encoding_of: event_data, } => TestEvent::FailWithAlternateEncoder(event_data.into_event()), - RawTestEvent::WithField { - event, - name, - value, - fail, - } => { - let mut event = event.into_event(); - let log_event = event.as_mut_log(); - log_event.insert(name.as_str(), value); - - if fail.unwrap_or_default() { - TestEvent::FailWithInjectedField(event) - } else { - TestEvent::Passthrough(event) - } - } + RawTestEvent::ResourceReject { + external_resource_rejects: event_data, + } => TestEvent::FailWithExternalResource(event_data.into_event()), } } } @@ -153,7 +146,7 @@ pub fn encode_test_event( event: TestEvent, ) { match event { - TestEvent::Passthrough(event) | TestEvent::FailWithInjectedField(event) => { + TestEvent::Passthrough(event) | TestEvent::FailWithExternalResource(event) => { // Encode the event normally. encoder .encode(event, buf) diff --git a/src/components/validation/resources/http.rs b/src/components/validation/resources/http.rs index b2fb003ff688f..cb78aa50081ab 100644 --- a/src/components/validation/resources/http.rs +++ b/src/components/validation/resources/http.rs @@ -66,6 +66,7 @@ impl HttpResourceConfig { codec: ResourceCodec, output_tx: mpsc::Sender>, task_coordinator: &TaskCoordinator, + input_events: Vec, runner_metrics: &Arc>, ) -> vector_lib::Result<()> { match direction { @@ -77,9 +78,14 @@ impl HttpResourceConfig { task_coordinator, )), // The sink will push data to us. - ResourceDirection::Push => { - spawn_output_http_server(self, codec, output_tx, task_coordinator, runner_metrics) - } + ResourceDirection::Push => spawn_output_http_server( + self, + codec, + output_tx, + task_coordinator, + input_events, + runner_metrics, + ), } } } @@ -132,10 +138,11 @@ fn spawn_input_http_server( // events and working with the HTTP server as they're consumed. let resource_started = task_coordinator.track_started(); let resource_completed = task_coordinator.track_completed(); + let mut resource_shutdown_rx = task_coordinator.register_for_shutdown(); tokio::spawn(async move { resource_started.mark_as_done(); - debug!("HTTP server external input resource started."); + info!("HTTP server external input resource started."); let mut input_finished = false; @@ -152,7 +159,7 @@ fn spawn_input_http_server( outstanding_events.push_back(event); }, None => { - trace!("HTTP server external input resource input is finished."); + info!("HTTP server external input resource input is finished."); input_finished = true; }, }, @@ -175,16 +182,18 @@ fn spawn_input_http_server( } // Mark ourselves as completed now that we've sent all inputs to the source, and // additionally signal the HTTP server to also gracefully shutdown. - _ = http_server_shutdown_tx.send(()); + info!("HTTP server external input resource signalling ready for shutdown."); - // TODO - currently we are getting lucky in the testing of `http_client` source... if the source tries to query - // this server but we have already shut down the thread, then it will generate an error which can throw off our error - // validation. - // I think the solution involves adding synchronization to wait here for the runner to tell us to shutdown. + // Wait for the runner to signal us to shutdown + resource_shutdown_rx.wait().await; + // Shutdown the server + _ = http_server_shutdown_tx.send(()); + + info!("HTTP server external input resource marking as done."); resource_completed.mark_as_done(); - debug!("HTTP server external input resource completed."); + info!("HTTP server external input resource completed."); }); } @@ -205,7 +214,7 @@ fn spawn_input_http_client( // Mark ourselves as started. We don't actually do anything until we get our first input // message, though. started.mark_as_done(); - debug!("HTTP client external input resource started."); + info!("HTTP client external input resource started."); let client = Client::builder().build_http::(); let request_uri = config.uri; @@ -238,7 +247,7 @@ fn spawn_input_http_client( // Mark ourselves as completed now that we've sent all inputs to the source. completed.mark_as_done(); - debug!("HTTP client external input resource completed."); + info!("HTTP client external input resource completed."); }); } @@ -249,6 +258,7 @@ fn spawn_output_http_server( codec: ResourceCodec, output_tx: mpsc::Sender>, task_coordinator: &TaskCoordinator, + input_events: Vec, runner_metrics: &Arc>, ) -> vector_lib::Result<()> { // This HTTP server will wait for events to be sent by a sink, and collect them and send them on @@ -257,6 +267,8 @@ fn spawn_output_http_server( // First, we'll build and spawn our HTTP server. let decoder = codec.into_decoder()?; + let should_reject = input_events.iter().filter(|te| te.should_reject()).count() > 0; + let (_, http_server_shutdown_tx) = spawn_http_server( task_coordinator, &config, @@ -273,28 +285,41 @@ fn spawn_output_http_server( loop { match decoder.decode_eof(&mut body) { Ok(Some((events, byte_size))) => { - let mut output_runner_metrics = - output_runner_metrics.lock().await; - debug!("HTTP server external output resource decoded {byte_size} bytes."); - - // Update the runner metrics for the received events. This will later - // be used in the Validators, as the "expected" case. - output_runner_metrics.received_bytes_total += byte_size as u64; - - output_runner_metrics.received_events_total += - events.len() as u64; - - events.iter().for_each(|event| { - output_runner_metrics.received_event_bytes_total += - event.estimated_json_encoded_size_of().get() as u64; - }); - - output_tx - .send(events.to_vec()) - .await - .expect("should not fail to send output event"); + if should_reject { + info!("HTTP server external output resource decoded {byte_size} bytes but test case configured to reject."); + } else { + let mut output_runner_metrics = + output_runner_metrics.lock().await; + info!("HTTP server external output resource decoded {byte_size} bytes."); + + // Update the runner metrics for the received events. This will later + // be used in the Validators, as the "expected" case. + output_runner_metrics.received_bytes_total += + byte_size as u64; + + output_runner_metrics.received_events_total += + events.len() as u64; + + events.iter().for_each(|event| { + output_runner_metrics.received_event_bytes_total += + event.estimated_json_encoded_size_of().get() as u64; + }); + + output_tx + .send(events.to_vec()) + .await + .expect("should not fail to send output event"); + } + } + Ok(None) => { + if should_reject { + // This status code is not retried and should result in the component under test + // emitting error events + return StatusCode::BAD_REQUEST.into_response(); + } else { + return StatusCode::OK.into_response(); + } } - Ok(None) => return StatusCode::OK.into_response(), Err(_) => return StatusCode::INTERNAL_SERVER_ERROR.into_response(), } } @@ -312,14 +337,20 @@ fn spawn_output_http_server( tokio::spawn(async move { resource_started.mark_as_done(); - debug!("HTTP server external output resource started."); + info!("HTTP server external output resource started."); + // Wait for the runner to tell us to shutdown resource_shutdown_rx.wait().await; - _ = http_server_shutdown_tx.send(()); + + // signal the server to shutdown + let _ = http_server_shutdown_tx.send(()); + + // mark ourselves as done resource_completed.mark_as_done(); - debug!("HTTP server external output resource completed."); + info!("HTTP server external output resource completed."); }); + Ok(()) } diff --git a/src/components/validation/resources/mod.rs b/src/components/validation/resources/mod.rs index 84186e23195e8..a9b39a560988c 100644 --- a/src/components/validation/resources/mod.rs +++ b/src/components/validation/resources/mod.rs @@ -340,6 +340,7 @@ impl ExternalResource { self, output_tx: mpsc::Sender>, task_coordinator: &TaskCoordinator, + input_events: Vec, runner_metrics: &Arc>, ) -> vector_lib::Result<()> { match self.definition { @@ -348,6 +349,7 @@ impl ExternalResource { self.codec, output_tx, task_coordinator, + input_events, runner_metrics, ), } diff --git a/src/components/validation/runner/mod.rs b/src/components/validation/runner/mod.rs index 91048a1e4438e..9e2dc82a02831 100644 --- a/src/components/validation/runner/mod.rs +++ b/src/components/validation/runner/mod.rs @@ -203,17 +203,17 @@ impl Runner { let component_type = self.configuration.component_type(); - let test_cases = load_component_test_cases(self.test_case_data_path)?; + let test_cases = load_component_test_cases(&self.test_case_data_path)?; for test_case in test_cases { // Create a task coordinator for each relevant phase of the test. // // This provides us the granularity to know when the tasks associated with each phase // (inputs, component topology, outputs/telemetry, etc) have started, and the ability to // trigger them to shutdown and then wait until the associated tasks have completed. - let input_task_coordinator = TaskCoordinator::new(); - let output_task_coordinator = TaskCoordinator::new(); - let topology_task_coordinator = TaskCoordinator::new(); - let telemetry_task_coordinator = TaskCoordinator::new(); + let input_task_coordinator = TaskCoordinator::new("Input"); + let output_task_coordinator = TaskCoordinator::new("Output"); + let topology_task_coordinator = TaskCoordinator::new("Topology"); + let telemetry_task_coordinator = TaskCoordinator::new("Telemetry"); // First, we get a topology builder for the given component being validated. // @@ -254,8 +254,9 @@ impl Runner { // For example, if we're validating a source, we would have added a filler sink for our // controlled output edge, which means we then need a server task listening for the // events sent by that sink. + let (runner_input, runner_output, maybe_runner_encoder) = build_external_resource( - test_case.config_name.as_ref(), + &test_case, &self.configuration, &input_task_coordinator, &output_task_coordinator, @@ -268,13 +269,13 @@ impl Runner { // Now with any external resource spawned, as well as any tasks for handling controlled // edges, we'll wait for all of those tasks to report that they're ready to go and // listening, etc. - let input_task_coordinator = input_task_coordinator.started().await; + let mut input_task_coordinator = input_task_coordinator.started().await; info!("All input task(s) started."); - let telemetry_task_coordinator = telemetry_task_coordinator.started().await; + let mut telemetry_task_coordinator = telemetry_task_coordinator.started().await; info!("All telemetry task(s) started."); - let output_task_coordinator = output_task_coordinator.started().await; + let mut output_task_coordinator = output_task_coordinator.started().await; info!("All output task(s) started."); // At this point, we need to actually spawn the configured component topology so that it @@ -285,7 +286,7 @@ impl Runner { &topology_task_coordinator, self.extra_context.clone(), ); - let topology_task_coordinator = topology_task_coordinator.started().await; + let mut topology_task_coordinator = topology_task_coordinator.started().await; // Now we'll spawn two tasks: one for sending inputs, and one for collecting outputs. // @@ -315,11 +316,19 @@ impl Runner { self.configuration.component_type, ); + // the number of events we expect to receive from the output. + let expected_output_events = test_case + .events + .iter() + .filter(|te| !te.should_fail()) + .count(); + let output_driver = spawn_output_driver( output_rx, &runner_metrics, maybe_runner_encoder.as_ref().cloned(), self.configuration.component_type, + expected_output_events, ); // At this point, the component topology is running, and all input/output/telemetry @@ -335,30 +344,24 @@ impl Runner { .await .expect("input driver task should not have panicked"); + // Synchronize the shutdown of all tasks, and get the resulting output events. + // We drive the shutdown by ensuring that the output events have been + // processed by the external resource, which ensures that the input events have travelled + // all the way through the pipeline, and that the telemetry events have been processed + // before shutting down the telemetry and topology tasks. input_task_coordinator.shutdown().await; - info!("Input task(s) have been shutdown."); - - // Without this, not all internal metric events are received for sink components under test. - // TODO: This is awful and needs a proper solution. - // I think we are going to need to setup distinct task sync logic potentially for each - // combination of Source/Sink + Resource direction Push/Pull - if self.configuration.component_type == ComponentType::Sink { - tokio::time::sleep(Duration::from_secs(1)).await; - } - - telemetry_task_coordinator.shutdown().await; - info!("Telemetry task(s) have been shutdown."); - - topology_task_coordinator.shutdown().await; - info!("Component topology task has been shutdown."); - - output_task_coordinator.shutdown().await; - info!("Output task(s) have been shutdown."); let output_events = output_driver .await .expect("output driver task should not have panicked"); + // Now that all output events have been received, we can shutdown the controlled edge/sink + output_task_coordinator.shutdown().await; + + // as well as the telemetry and topology + telemetry_task_coordinator.shutdown().await; + topology_task_coordinator.shutdown().await; + info!("Collected runner metrics: {:?}", runner_metrics); let final_runner_metrics = runner_metrics.lock().await; @@ -418,7 +421,7 @@ impl Runner { /// during deserialization of the test case file, whether the error is I/O related in nature or due /// to invalid YAML, or not representing valid serialized test cases, then an error variant will be /// returned explaining the cause. -fn load_component_test_cases(test_case_data_path: PathBuf) -> Result, String> { +fn load_component_test_cases(test_case_data_path: &PathBuf) -> Result, String> { std::fs::File::open(test_case_data_path) .map_err(|e| { format!( @@ -437,14 +440,14 @@ fn load_component_test_cases(test_case_data_path: PathBuf) -> Result, + test_case: &TestCase, configuration: &ValidationConfiguration, input_task_coordinator: &TaskCoordinator, output_task_coordinator: &TaskCoordinator, runner_metrics: &Arc>, ) -> Result<(RunnerInput, RunnerOutput, Option>), vector_lib::Error> { let component_type = configuration.component_type(); - let maybe_external_resource = configuration.external_resource(test_case); + let maybe_external_resource = configuration.external_resource(test_case.config_name.as_ref()); let resource_codec = maybe_external_resource .as_ref() @@ -481,7 +484,13 @@ fn build_external_resource( let (tx, rx) = mpsc::channel(1024); let resource = maybe_external_resource.expect("a sink must always have an external resource"); - resource.spawn_as_output(tx, output_task_coordinator, runner_metrics)?; + + resource.spawn_as_output( + tx, + output_task_coordinator, + test_case.events.clone(), + runner_metrics, + )?; Ok(( RunnerInput::Controlled, @@ -599,7 +608,7 @@ fn spawn_input_driver( vec![event].estimated_json_encoded_size_of().get() as u64; } } - trace!("Input driver sent all events."); + info!("Input driver sent all events."); }) } @@ -608,34 +617,60 @@ fn spawn_output_driver( runner_metrics: &Arc>, maybe_encoder: Option>, component_type: ComponentType, + expected_events: usize, ) -> JoinHandle> { let output_runner_metrics = Arc::clone(runner_metrics); tokio::spawn(async move { + let timeout = tokio::time::sleep(Duration::from_secs(8)); + tokio::pin!(timeout); + let mut output_events = Vec::new(); - while let Some(events) = output_rx.recv().await { - output_events.extend(events.clone()); - // Update the runner metrics for the received event. This will later - // be used in the Validators, as the "expected" case. - let mut output_runner_metrics = output_runner_metrics.lock().await; - - for output_event in events { - if component_type != ComponentType::Sink { - // The event is wrapped in a Vec to match the actual event storage in - // the real topology - output_runner_metrics.received_event_bytes_total += - vec![&output_event].estimated_json_encoded_size_of().get() as u64; - - if let Some(encoder) = maybe_encoder.as_ref() { - let mut buffer = BytesMut::new(); - encoder - .clone() - .encode(output_event, &mut buffer) - .expect("should not fail to encode output event"); - - output_runner_metrics.received_events_total += 1; - output_runner_metrics.received_bytes_total += buffer.len() as u64; + loop { + tokio::select! { + _ = &mut timeout => { + error!("Output driver timed out waiting for all events."); + break + }, + events = output_rx.recv() => { + if let Some(events) = events { + info!("Output driver received {} events.", events.len()); + output_events.extend(events.clone()); + + // Update the runner metrics for the received event. This will later + // be used in the Validators, as the "expected" case. + let mut output_runner_metrics = output_runner_metrics.lock().await; + + if component_type != ComponentType::Sink { + for output_event in events { + // The event is wrapped in a Vec to match the actual event storage in + // the real topology + output_runner_metrics.received_event_bytes_total += + vec![&output_event].estimated_json_encoded_size_of().get() as u64; + + if let Some(encoder) = maybe_encoder.as_ref() { + let mut buffer = BytesMut::new(); + encoder + .clone() + .encode(output_event, &mut buffer) + .expect("should not fail to encode output event"); + + output_runner_metrics.received_events_total += 1; + output_runner_metrics.received_bytes_total += buffer.len() as u64; + } + } + } + if output_events.len() >= expected_events { + info!("Output driver has received all expected events."); + break + } + } else { + // The channel closed on us. + // This shouldn't happen because in the runner we should not shutdown the external + // resource until this output driver task is complete. + error!("Output driver channel with external resource closed."); + break } } } diff --git a/src/components/validation/runner/telemetry.rs b/src/components/validation/runner/telemetry.rs index c415bfb05d012..83c12c02db9c4 100644 --- a/src/components/validation/runner/telemetry.rs +++ b/src/components/validation/runner/telemetry.rs @@ -36,7 +36,7 @@ impl Telemetry { /// Creates a telemetry collector by attaching the relevant components to an existing `ConfigBuilder`. pub fn attach_to_config(config_builder: &mut ConfigBuilder) -> Self { let listen_addr = GrpcAddress::from(next_addr()); - debug!(%listen_addr, "Attaching telemetry components."); + info!(%listen_addr, "Attaching telemetry components."); // Attach an internal logs and internal metrics source, and send them on to a dedicated Vector // sink that we'll spawn a listener for to collect everything. @@ -83,10 +83,10 @@ impl Telemetry { // needs to be shut down after the telemetry collector. This is because // the server needs to be alive to process every last incoming event // from the Vector sink that we're using to collect telemetry. - let grpc_task_coordinator = TaskCoordinator::new(); + let grpc_task_coordinator = TaskCoordinator::new("gRPC"); spawn_grpc_server(self.listen_addr, self.service, &grpc_task_coordinator); - let grpc_task_coordinator = grpc_task_coordinator.started().await; - debug!("All gRPC task(s) started."); + let mut grpc_task_coordinator = grpc_task_coordinator.started().await; + info!("All gRPC task(s) started."); let mut rx = self.rx; let driver_handle = tokio::spawn(async move { @@ -107,7 +107,7 @@ impl Telemetry { // emitted. Thus, two batches ensure that all component // events have been emitted. - debug!("Telemetry: waiting for final internal_metrics events before shutting down."); + info!("Telemetry: waiting for final internal_metrics events before shutting down."); let mut batches_received = 0; @@ -121,7 +121,7 @@ impl Telemetry { None => break, Some(telemetry_event_batch) => { telemetry_events.extend(telemetry_event_batch); - debug!("Telemetry: processed one batch of internal_metrics."); + info!("Telemetry: processed one batch of internal_metrics."); batches_received += 1; if batches_received == SHUTDOWN_TICKS { break; @@ -145,7 +145,6 @@ impl Telemetry { } grpc_task_coordinator.shutdown().await; - debug!("GRPC task(s) have been shutdown."); telemetry_completed.mark_as_done(); diff --git a/src/components/validation/sync.rs b/src/components/validation/sync.rs index 0e842b66eb87f..92f359af6a47a 100644 --- a/src/components/validation/sync.rs +++ b/src/components/validation/sync.rs @@ -180,17 +180,19 @@ pub struct Started { /// after waiting for all tasks to start, and so on. pub struct TaskCoordinator { state: State, + name: String, } impl TaskCoordinator<()> { /// Creates a new `TaskCoordinator`. - pub fn new() -> TaskCoordinator { + pub fn new(name: &str) -> TaskCoordinator { TaskCoordinator { state: Configuring { tasks_started: WaitGroup::new(), tasks_completed: WaitGroup::new(), shutdown_triggers: Mutex::new(Vec::new()), }, + name: name.to_string(), } } } @@ -233,27 +235,34 @@ impl TaskCoordinator { tasks_completed: Some(tasks_completed), shutdown_triggers: shutdown_triggers.into_inner().expect("poisoned"), }, + name: self.name, } } } impl TaskCoordinator { /// Triggers all coordinated tasks to shutdown, and waits for them to mark themselves as completed. - pub async fn shutdown(mut self) { + pub async fn shutdown(&mut self) { + info!("{}: triggering task to shutdown.", self.name); + // Trigger all registered shutdown handles. for trigger in self.state.shutdown_triggers.drain(..) { trigger.trigger(); - trace!("Shutdown triggered for coordinated tasks."); + debug!("{}: shutdown triggered for coordinated tasks.", self.name); } // Now simply wait for all of them to mark themselves as completed. - trace!("Waiting for coordinated tasks to complete..."); + debug!( + "{}: waiting for coordinated tasks to complete...", + self.name + ); let tasks_completed = self .state .tasks_completed .as_mut() .expect("tasks completed wait group already consumed"); tasks_completed.wait_for_children().await; - trace!("All coordinated tasks completed."); + + info!("{}: task has been shutdown.", self.name); } } diff --git a/src/components/validation/validators/component_spec/mod.rs b/src/components/validation/validators/component_spec/mod.rs index 3d055ae6993ce..5cce50c01c195 100644 --- a/src/components/validation/validators/component_spec/mod.rs +++ b/src/components/validation/validators/component_spec/mod.rs @@ -204,7 +204,7 @@ fn filter_events_by_metric_and_component<'a>( }) .filter(|&m| { if m.name() == metric.to_string() { - info!("{}", m); + debug!("{}", m); if let Some(tags) = m.tags() { if tags.get("component_id").unwrap_or("") == component_id { return true; diff --git a/src/sinks/http/config.rs b/src/sinks/http/config.rs index 62ee534ef1e6f..a6313c1f1ae5b 100644 --- a/src/sinks/http/config.rs +++ b/src/sinks/http/config.rs @@ -5,7 +5,7 @@ use hyper::Body; use indexmap::IndexMap; use vector_lib::codecs::{ encoding::{Framer, Serializer}, - CharacterDelimitedEncoder, GelfSerializerConfig, + CharacterDelimitedEncoder, }; use crate::{ @@ -312,79 +312,39 @@ impl ValidatableComponent for HttpSinkConfig { use std::str::FromStr; use vector_lib::codecs::{JsonSerializerConfig, MetricTagValues}; - let happy_encoder = EncodingConfigWithFraming::new( - None, - JsonSerializerConfig::new(MetricTagValues::Full).into(), - Transformer::default(), - ); - - fn get_config(encoding: EncodingConfigWithFraming) -> HttpSinkConfig { - HttpSinkConfig { - uri: UriSerde::from_str("http://127.0.0.1:9000/endpoint") - .expect("should never fail to parse"), - method: HttpMethod::Post, - encoding, - auth: None, - headers: None, - compression: Compression::default(), - batch: BatchConfig::default(), - request: RequestConfig::default(), - tls: None, - acknowledgements: AcknowledgementsConfig::default(), - payload_prefix: String::new(), - payload_suffix: String::new(), - } - } - - fn get_external_resource( - config: &HttpSinkConfig, - encoding: Option, - ) -> ExternalResource { - ExternalResource::new( - ResourceDirection::Push, - HttpResourceConfig::from_parts(config.uri.uri.clone(), Some(config.method.into())), - if let Some(encoding) = encoding { - encoding - } else { - config.encoding.clone() - }, - ) - } - - let happy_config = get_config(happy_encoder.clone()); - - let happy_external_resource = get_external_resource(&happy_config, None); - - // this config uses the Gelf serializer, which requires the "level" field to - // be an integer - let sad_config = get_config(EncodingConfigWithFraming::new( - None, - GelfSerializerConfig::new().into(), - Transformer::default(), - )); + let config = HttpSinkConfig { + uri: UriSerde::from_str("http://127.0.0.1:9000/endpoint") + .expect("should never fail to parse"), + method: HttpMethod::Post, + encoding: EncodingConfigWithFraming::new( + None, + JsonSerializerConfig::new(MetricTagValues::Full).into(), + Transformer::default(), + ), + auth: None, + headers: None, + compression: Compression::default(), + batch: BatchConfig::default(), + request: RequestConfig::default(), + tls: None, + acknowledgements: AcknowledgementsConfig::default(), + payload_prefix: String::new(), + payload_suffix: String::new(), + }; - let sad_external_resource = get_external_resource( - &happy_config, - // the external resource needs to use an encoder that actually works, in order to - // get the event into the topology successfully - Some(happy_encoder), + let external_resource = ExternalResource::new( + ResourceDirection::Push, + HttpResourceConfig::from_parts(config.uri.uri.clone(), Some(config.method.into())), + config.encoding.clone(), ); ValidationConfiguration::from_sink( Self::NAME, - vec![ - ComponentTestCaseConfig::from_sink( - happy_config, - None, - Some(happy_external_resource), - ), - // this config only runs with the test case "encoding_error" in the yaml file. - ComponentTestCaseConfig::from_sink( - sad_config, - Some("encoding_error".to_owned()), - Some(sad_external_resource), - ), - ], + vec![ComponentTestCaseConfig::from_sink( + config, + None, + Some(external_resource), + )], ) } } diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index 3093847904101..0466917cbdd1b 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -238,11 +238,12 @@ impl SourceConfig for HttpClientConfig { impl ValidatableComponent for HttpClientConfig { fn validation_configuration() -> ValidationConfiguration { - let uri = Uri::from_static("http://127.0.0.1:9898/logs"); + let uri = Uri::from_static("http://127.0.0.1:9898"); let config = Self { endpoint: uri.to_string(), interval: Duration::from_secs(1), + timeout: Duration::from_secs(1), decoding: DeserializerConfig::Json(Default::default()), ..Default::default() }; diff --git a/tests/validation/components/sinks/http.yaml b/tests/validation/components/sinks/http.yaml index 3d3525766d74f..861e64f5fefc1 100644 --- a/tests/validation/components/sinks/http.yaml +++ b/tests/validation/components/sinks/http.yaml @@ -5,10 +5,6 @@ - simple message 2 - simple message 3 - name: sad path - config_name: encoding_error expectation: failure events: - - event: simple message with the invalid data type for encoder - name: level - value: "1" - fail: true + - external_resource_rejects: simple message downstream rejects From c71d5d16493f1662187ed6e7a11c8a88fbc4e133 Mon Sep 17 00:00:00 2001 From: neuronull Date: Thu, 29 Feb 2024 11:22:55 -0700 Subject: [PATCH 0076/1491] chore(testing): expose component validation framework (#19964) * chore(testing): expose component validation framework * ff --- src/components/validation/mod.rs | 327 +++++++++++++++---------------- 1 file changed, 162 insertions(+), 165 deletions(-) diff --git a/src/components/validation/mod.rs b/src/components/validation/mod.rs index d195b48ea99b0..54b3d48238244 100644 --- a/src/components/validation/mod.rs +++ b/src/components/validation/mod.rs @@ -255,180 +255,177 @@ pub struct RunnerMetrics { pub discarded_events_total: u64, } -#[cfg(all(test, feature = "component-validation-tests"))] -mod tests { - use std::{ - collections::VecDeque, - path::{Component, Path, PathBuf}, - }; - - use test_generator::test_resources; - - use crate::components::validation::{Runner, StandardValidators}; - use crate::extra_context::ExtraContext; - - use super::{ComponentType, ValidatableComponentDescription, ValidationConfiguration}; +#[cfg(feature = "component-validation-runner")] +fn run_validation(configuration: ValidationConfiguration, test_case_data_path: std::path::PathBuf) { + let component_name = configuration.component_name(); + info!( + "Running validation for component '{}' (type: {:?})...", + component_name, + configuration.component_type() + ); + + let rt = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .unwrap(); + rt.block_on(async { + let mut runner = Runner::from_configuration( + configuration, + test_case_data_path, + crate::extra_context::ExtraContext::default(), + ); + runner.add_validator(StandardValidators::ComponentSpec); + + match runner.run_validation().await { + Ok(test_case_results) => { + let mut details = Vec::new(); + let mut had_failures = false; + + for test_case_result in test_case_results.into_iter() { + for validator_result in test_case_result.validator_results() { + match validator_result { + Ok(success) => { + if success.is_empty() { + details.push(format!( + " test case '{}': passed", + test_case_result.test_name() + )); + } else { + let formatted = success + .iter() + .map(|s| format!(" - {}\n", s)) + .collect::>(); + + details.push(format!( + " test case '{}': passed\n{}", + test_case_result.test_name(), + formatted.join("") + )); + } + } + Err(failure) => { + had_failures = true; + + if failure.is_empty() { + details.push(format!( + " test case '{}': failed", + test_case_result.test_name() + )); + } else { + let formatted = failure + .iter() + .map(|s| format!(" - {}\n", s)) + .collect::>(); + + details.push(format!( + " test case '{}': failed\n{}", + test_case_result.test_name(), + formatted.join("") + )); + } + } + } + } + } - #[test_resources("tests/validation/components/**/*.yaml")] - fn validate_component(test_case_data_path: &str) { - let test_case_data_path = PathBuf::from(test_case_data_path.to_string()); - if !test_case_data_path.exists() { - panic!("Component validation test invoked with path to test case data that could not be found: {}", test_case_data_path.to_string_lossy()); + if had_failures { + panic!( + "Failed to validate component '{}':\n{}", + component_name, + details.join("") + ); + } else { + info!( + "Successfully validated component '{}':\n{}", + component_name, + details.join("") + ); + } + } + Err(e) => panic!( + "Failed to complete validation run for component '{}': {}", + component_name, e + ), } + }); +} - let configuration = get_validation_configuration_from_test_case_path(&test_case_data_path) - .expect("Failed to find validation configuration from given test case data path."); - - run_validation(configuration, test_case_data_path); +#[cfg(feature = "component-validation-runner")] +fn get_validation_configuration_from_test_case_path( + test_case_data_path: &std::path::Path, +) -> Result { + // The test case data path should follow a fixed structure where the 2nd to last segment is + // the component type, and the last segment -- when the extension is removed -- is the + // component name. + let mut path_segments = test_case_data_path + .components() + .filter_map(|c| match c { + std::path::Component::Normal(path) => Some(std::path::Path::new(path)), + _ => None, + }) + .collect::>(); + if path_segments.len() <= 2 { + return Err(format!( + "Test case data path contained {} normal path segment(s), expected at least 2 or more.", + path_segments.len() + )); } - fn get_validation_configuration_from_test_case_path( - test_case_data_path: &Path, - ) -> Result { - // The test case data path should follow a fixed structure where the 2nd to last segment is - // the component type, and the last segment -- when the extension is removed -- is the - // component name. - let mut path_segments = test_case_data_path - .components() - .filter_map(|c| match c { - Component::Normal(path) => Some(Path::new(path)), - _ => None, - }) - .collect::>(); - if path_segments.len() <= 2 { - return Err(format!("Test case data path contained {} normal path segment(s), expected at least 2 or more.", path_segments.len())); - } + let component_name = path_segments + .pop_back() + .and_then(|segment| segment.file_stem().map(|s| s.to_string_lossy().to_string())) + .ok_or(format!( + "Test case data path '{}' contained unexpected or invalid filename.", + test_case_data_path.as_os_str().to_string_lossy() + ))?; + + let component_type = path_segments + .pop_back() + .map(|segment| { + segment + .as_os_str() + .to_string_lossy() + .to_string() + .to_ascii_lowercase() + }) + .and_then(|segment| match segment.as_str() { + "sources" => Some(ComponentType::Source), + "transforms" => Some(ComponentType::Transform), + "sinks" => Some(ComponentType::Sink), + _ => None, + }) + .ok_or(format!( + "Test case data path '{}' contained unexpected or invalid component type.", + test_case_data_path.as_os_str().to_string_lossy() + ))?; + + // Now that we've theoretically got the component type and component name, try to query the + // validatable component descriptions to find it. + ValidatableComponentDescription::query(&component_name, component_type).ok_or(format!( + "No validation configuration for component '{}' with component type '{}'.", + component_name, + component_type.as_str() + )) +} - let component_name = path_segments - .pop_back() - .and_then(|segment| segment.file_stem().map(|s| s.to_string_lossy().to_string())) - .ok_or(format!( - "Test case data path '{}' contained unexpected or invalid filename.", - test_case_data_path.as_os_str().to_string_lossy() - ))?; - - let component_type = path_segments - .pop_back() - .map(|segment| { - segment - .as_os_str() - .to_string_lossy() - .to_string() - .to_ascii_lowercase() - }) - .and_then(|segment| match segment.as_str() { - "sources" => Some(ComponentType::Source), - "transforms" => Some(ComponentType::Transform), - "sinks" => Some(ComponentType::Sink), - _ => None, - }) - .ok_or(format!( - "Test case data path '{}' contained unexpected or invalid component type.", - test_case_data_path.as_os_str().to_string_lossy() - ))?; - - // Now that we've theoretically got the component type and component name, try to query the - // validatable component descriptions to find it. - ValidatableComponentDescription::query(&component_name, component_type).ok_or(format!( - "No validation configuration for component '{}' with component type '{}'.", - component_name, - component_type.as_str() - )) +#[cfg(feature = "component-validation-runner")] +pub fn validate_component(test_case_data_path: &str) { + let test_case_data_path = std::path::PathBuf::from(test_case_data_path.to_string()); + if !test_case_data_path.exists() { + panic!("Component validation test invoked with path to test case data that could not be found: {}", test_case_data_path.to_string_lossy()); } - fn run_validation(configuration: ValidationConfiguration, test_case_data_path: PathBuf) { - crate::test_util::trace_init(); - - let component_name = configuration.component_name(); - info!( - "Running validation for component '{}' (type: {:?})...", - component_name, - configuration.component_type() - ); + let configuration = get_validation_configuration_from_test_case_path(&test_case_data_path) + .expect("Failed to find validation configuration from given test case data path."); - let rt = tokio::runtime::Builder::new_current_thread() - .enable_all() - .build() - .unwrap(); - rt.block_on(async { - let mut runner = Runner::from_configuration( - configuration, - test_case_data_path, - ExtraContext::default(), - ); - runner.add_validator(StandardValidators::ComponentSpec); - - match runner.run_validation().await { - Ok(test_case_results) => { - let mut details = Vec::new(); - let mut had_failures = false; - - for test_case_result in test_case_results.into_iter() { - for validator_result in test_case_result.validator_results() { - match validator_result { - Ok(success) => { - if success.is_empty() { - details.push(format!( - " test case '{}': passed", - test_case_result.test_name() - )); - } else { - let formatted = success - .iter() - .map(|s| format!(" - {}\n", s)) - .collect::>(); - - details.push(format!( - " test case '{}': passed\n{}", - test_case_result.test_name(), - formatted.join("") - )); - } - } - Err(failure) => { - had_failures = true; - - if failure.is_empty() { - details.push(format!( - " test case '{}': failed", - test_case_result.test_name() - )); - } else { - let formatted = failure - .iter() - .map(|s| format!(" - {}\n", s)) - .collect::>(); - - details.push(format!( - " test case '{}': failed\n{}", - test_case_result.test_name(), - formatted.join("") - )); - } - } - } - } - } + run_validation(configuration, test_case_data_path); +} - if had_failures { - panic!( - "Failed to validate component '{}':\n{}", - component_name, - details.join("") - ); - } else { - info!( - "Successfully validated component '{}':\n{}", - component_name, - details.join("") - ); - } - } - Err(e) => panic!( - "Failed to complete validation run for component '{}': {}", - component_name, e - ), - } - }); +#[cfg(all(test, feature = "component-validation-tests"))] +mod tests { + #[test_generator::test_resources("tests/validation/components/**/*.yaml")] + pub fn validate_component(test_case_data_path: &str) { + crate::test_util::trace_init(); + super::validate_component(test_case_data_path); } } From 44150403903915f0fa8b31e8fd20b2d8cb33b480 Mon Sep 17 00:00:00 2001 From: neuronull Date: Thu, 29 Feb 2024 11:28:46 -0700 Subject: [PATCH 0077/1491] chore(ci): add component validation (#19932) --- .github/workflows/test.yml | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 3780e8e94b3ac..01ddedd79ea46 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -81,6 +81,10 @@ jobs: env: CARGO_BUILD_JOBS: 5 + # Validates components for adherence to the Component Specification + - name: Check Component Spec + run: make test-component-validation + - name: Upload test results run: scripts/upload-test-results.sh if: always() From 9acc151516e8db9b8798eb80b10cee8f843b6da7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 13:35:45 +0000 Subject: [PATCH 0078/1491] chore(deps): Bump log from 0.4.20 to 0.4.21 (#19977) Bumps [log](https://github.com/rust-lang/log) from 0.4.20 to 0.4.21. - [Release notes](https://github.com/rust-lang/log/releases) - [Changelog](https://github.com/rust-lang/log/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/log/compare/0.4.20...0.4.21) --- updated-dependencies: - dependency-name: log dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- vdev/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 6e8671279a76b..2d981d7fdd9e5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4937,9 +4937,9 @@ checksum = "ee33defb27b106378a6efcfcde4dda6226dfdac8ba7a2904f5bc93363cb88557" [[package]] name = "log" -version = "0.4.20" +version = "0.4.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" +checksum = "90ed8c1e510134f979dbc4f070f87d4313098b704861a105fe34231c70a3901c" [[package]] name = "logfmt" diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index 3fc57a1acddd2..ba9e48dedc3e4 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -23,7 +23,7 @@ hex = "0.4.3" indexmap.workspace = true indicatif = { version = "0.17.8", features = ["improved_unicode"] } itertools = "0.12.1" -log = "0.4.20" +log = "0.4.21" once_cell = "1.19" os_info = { version = "3.7.0", default-features = false } # watch https://github.com/epage/anstyle for official interop with Clap From 29a9167c8554befaa5a56a188b3c44e18d08c638 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 13:35:55 +0000 Subject: [PATCH 0079/1491] chore(deps): Bump syn from 2.0.51 to 2.0.52 (#19979) Bumps [syn](https://github.com/dtolnay/syn) from 2.0.51 to 2.0.52. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/2.0.51...2.0.52) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 94 +++++++++++++++++++++++++++--------------------------- 1 file changed, 47 insertions(+), 47 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2d981d7fdd9e5..0d96a838c5b11 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "strum 0.25.0", - "syn 2.0.51", + "syn 2.0.52", "thiserror", ] @@ -651,7 +651,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -691,7 +691,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -708,7 +708,7 @@ checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -1467,7 +1467,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9990737a6d5740ff51cdbbc0f0503015cb30c390f6623968281eb214a520cfc0" dependencies = [ "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -1594,7 +1594,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", "syn_derive", ] @@ -2026,7 +2026,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -2554,7 +2554,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -2626,7 +2626,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "strsim 0.10.0", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -2659,7 +2659,7 @@ checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ "darling_core 0.20.8", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -2764,7 +2764,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -3045,7 +3045,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -3057,7 +3057,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -3077,7 +3077,7 @@ checksum = "5c785274071b1b420972453b306eeca06acf4633829db4223b58a2a8c5953bc4" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -3479,7 +3479,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -5169,7 +5169,7 @@ checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -5289,7 +5289,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "regex", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -5738,7 +5738,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -5750,7 +5750,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -5938,7 +5938,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -6223,7 +6223,7 @@ dependencies = [ "pest_meta", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -6311,7 +6311,7 @@ checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -6588,7 +6588,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ "proc-macro2 1.0.78", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -6791,7 +6791,7 @@ dependencies = [ "prost 0.12.3", "prost-types 0.12.3", "regex", - "syn 2.0.51", + "syn 2.0.52", "tempfile", "which 4.4.2", ] @@ -6819,7 +6819,7 @@ dependencies = [ "itertools 0.11.0", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -7603,7 +7603,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.0", - "syn 2.0.51", + "syn 2.0.52", "unicode-ident", ] @@ -8052,7 +8052,7 @@ checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -8063,7 +8063,7 @@ checksum = "330f01ce65a3a5fe59a60c82f3c9a024b573b8a6e875bd233fe5f934e71d54e3" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -8125,7 +8125,7 @@ checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -8198,7 +8198,7 @@ dependencies = [ "darling 0.20.8", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -8477,7 +8477,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -8667,7 +8667,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "rustversion", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -8680,7 +8680,7 @@ dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", "rustversion", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -8723,9 +8723,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.51" +version = "2.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ab617d94515e94ae53b8406c628598680aa0c9587474ecbe58188f7b345d66c" +checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", @@ -8741,7 +8741,7 @@ dependencies = [ "proc-macro-error", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -8909,7 +8909,7 @@ checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -9057,7 +9057,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -9304,7 +9304,7 @@ dependencies = [ "proc-macro2 1.0.78", "prost-build 0.12.3", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -9407,7 +9407,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -9641,7 +9641,7 @@ checksum = "f03ca4cb38206e2bef0700092660bb74d696f808514dae47fa1467cbfe26e96e" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -9671,7 +9671,7 @@ checksum = "ac73887f47b9312552aa90ef477927ff014d63d1920ca8037c6c1951eab64bb1" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] @@ -10230,7 +10230,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_json", - "syn 2.0.51", + "syn 2.0.52", "tracing 0.1.40", ] @@ -10243,7 +10243,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_derive_internals", - "syn 2.0.51", + "syn 2.0.52", "vector-config", "vector-config-common", ] @@ -10651,7 +10651,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", "wasm-bindgen-shared", ] @@ -10685,7 +10685,7 @@ checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11130,7 +11130,7 @@ checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", - "syn 2.0.51", + "syn 2.0.52", ] [[package]] From 6ef50922b302519518937008b99cba9f97a7283c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 13:36:04 +0000 Subject: [PATCH 0080/1491] chore(deps): Bump mlua from 0.9.5 to 0.9.6 (#19985) Bumps [mlua](https://github.com/khvzak/mlua) from 0.9.5 to 0.9.6. - [Release notes](https://github.com/khvzak/mlua/releases) - [Changelog](https://github.com/mlua-rs/mlua/blob/master/CHANGELOG.md) - [Commits](https://github.com/khvzak/mlua/compare/v0.9.5...v0.9.6) --- updated-dependencies: - dependency-name: mlua dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- lib/vector-core/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0d96a838c5b11..cf222c44897da 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5252,9 +5252,9 @@ dependencies = [ [[package]] name = "mlua" -version = "0.9.5" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d3561f79659ff3afad7b25e2bf2ec21507fe601ebecb7f81088669ec4bfd51e" +checksum = "868d02cb5eb97761bbf6bd6922c1c7a88b8ea252bbf43bd8350a0bf8497a1fc0" dependencies = [ "bstr 1.9.1", "mlua-sys", diff --git a/Cargo.toml b/Cargo.toml index 0582b03bde6bc..545baef8795c8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -345,7 +345,7 @@ arr_macro = { version = "0.2.1" } heim = { git = "https://github.com/vectordotdev/heim.git", branch = "update-nix", default-features = false, features = ["disk"] } # make sure to update the external docs when the Lua version changes -mlua = { version = "0.9.5", default-features = false, features = ["lua54", "send", "vendored", "macros"], optional = true } +mlua = { version = "0.9.6", default-features = false, features = ["lua54", "send", "vendored", "macros"], optional = true } [target.'cfg(windows)'.dependencies] windows-service = "0.6.0" diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index 631403d6a3ae0..c6995e3bce72a 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -27,7 +27,7 @@ lookup = { package = "vector-lookup", path = "../vector-lookup" } metrics = "0.21.1" metrics-tracing-context = { version = "0.14.0", default-features = false } metrics-util = { version = "0.15.1", default-features = false, features = ["registry"] } -mlua = { version = "0.9.5", default-features = false, features = ["lua54", "send", "vendored"], optional = true } +mlua = { version = "0.9.6", default-features = false, features = ["lua54", "send", "vendored"], optional = true } no-proxy = { version = "0.3.4", default-features = false, features = ["serialize"] } once_cell = { version = "1.19", default-features = false } ordered-float = { version = "4.2.0", default-features = false } From 69e84b335edef665264aab16a5895c3877b99b5e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 13:36:15 +0000 Subject: [PATCH 0081/1491] chore(deps): Bump confy from 0.6.0 to 0.6.1 (#19986) Bumps [confy](https://github.com/rust-cli/confy) from 0.6.0 to 0.6.1. - [Release notes](https://github.com/rust-cli/confy/releases) - [Commits](https://github.com/rust-cli/confy/compare/v0.6.0...v0.6.1) --- updated-dependencies: - dependency-name: confy dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- vdev/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cf222c44897da..bb818c4ea21d8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2182,9 +2182,9 @@ dependencies = [ [[package]] name = "confy" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15d296c475c6ed4093824c28e222420831d27577aaaf0a1163a3b7fc35b248a5" +checksum = "45b1f4c00870f07dc34adcac82bb6a72cc5aabca8536ba1797e01df51d2ce9a0" dependencies = [ "directories", "serde", diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index ba9e48dedc3e4..e7124c58d0d22 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -14,7 +14,7 @@ chrono.workspace = true clap.workspace = true clap-verbosity-flag = "2.2.0" clap_complete = "4.5.1" -confy = "0.6.0" +confy = "0.6.1" directories = "5.0.1" # remove this when stabilized https://doc.rust-lang.org/stable/std/path/fn.absolute.html dunce = "1.0.4" From e2d8ad468ba7fa96598cf8cd3cc80641861d8b30 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Mar 2024 13:36:24 +0000 Subject: [PATCH 0082/1491] chore(deps): Bump indexmap from 2.2.3 to 2.2.5 (#19987) Bumps [indexmap](https://github.com/indexmap-rs/indexmap) from 2.2.3 to 2.2.5. - [Changelog](https://github.com/indexmap-rs/indexmap/blob/master/RELEASES.md) - [Commits](https://github.com/indexmap-rs/indexmap/compare/2.2.3...2.2.5) --- updated-dependencies: - dependency-name: indexmap dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 42 +++++++++++++++++++------------------- Cargo.toml | 2 +- lib/file-source/Cargo.toml | 2 +- 3 files changed, 23 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index bb818c4ea21d8..a7b19ff0969e1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -441,7 +441,7 @@ dependencies = [ "fnv", "futures-util", "http 1.0.0", - "indexmap 2.2.3", + "indexmap 2.2.5", "mime", "multer", "num-traits", @@ -491,7 +491,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7cf4d4e86208f4f9b81a503943c07e6e7f29ad3505e6c9ce6431fe64dc241681" dependencies = [ "bytes 1.5.0", - "indexmap 2.2.3", + "indexmap 2.2.5", "serde", "serde_json", ] @@ -3262,7 +3262,7 @@ dependencies = [ "flate2", "futures 0.3.30", "glob", - "indexmap 2.2.3", + "indexmap 2.2.5", "libc", "quickcheck", "scan_fmt", @@ -3742,7 +3742,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.9", - "indexmap 2.2.3", + "indexmap 2.2.5", "slab", "tokio", "tokio-util", @@ -3761,7 +3761,7 @@ dependencies = [ "futures-sink", "futures-util", "http 1.0.0", - "indexmap 2.2.3", + "indexmap 2.2.5", "slab", "tokio", "tokio-util", @@ -4342,9 +4342,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.3" +version = "2.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "233cf39063f058ea2caae4091bf4a3ef70a653afbc026f5c4a4135d114e3c177" +checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" dependencies = [ "equivalent", "hashbrown 0.14.3", @@ -6244,7 +6244,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.2.3", + "indexmap 2.2.5", ] [[package]] @@ -6691,7 +6691,7 @@ dependencies = [ name = "prometheus-parser" version = "0.1.0" dependencies = [ - "indexmap 2.2.3", + "indexmap 2.2.5", "nom", "num_enum 0.7.2", "prost 0.12.3", @@ -8072,7 +8072,7 @@ version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ - "indexmap 2.2.3", + "indexmap 2.2.5", "itoa", "ryu", "serde", @@ -8169,7 +8169,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.2.3", + "indexmap 2.2.5", "serde", "serde_derive", "serde_json", @@ -8219,7 +8219,7 @@ version = "0.9.32" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8fd075d994154d4a774f95b51fb96bdc2832b0ea48425c92546073816cda1f2f" dependencies = [ - "indexmap 2.2.3", + "indexmap 2.2.5", "itoa", "ryu", "serde", @@ -9220,7 +9220,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.2.3", + "indexmap 2.2.5", "toml_datetime", "winnow", ] @@ -9231,7 +9231,7 @@ version = "0.20.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" dependencies = [ - "indexmap 2.2.3", + "indexmap 2.2.5", "toml_datetime", "winnow", ] @@ -9242,7 +9242,7 @@ version = "0.22.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c9ffdf896f8daaabf9b66ba8e77ea1ed5ed0f72821b398aba62352e95062951" dependencies = [ - "indexmap 2.2.3", + "indexmap 2.2.5", "serde", "serde_spanned", "toml_datetime", @@ -9880,7 +9880,7 @@ dependencies = [ "dunce", "glob", "hex", - "indexmap 2.2.3", + "indexmap 2.2.5", "indicatif", "itertools 0.12.1", "log", @@ -9986,7 +9986,7 @@ dependencies = [ "hyper", "hyper-openssl", "hyper-proxy", - "indexmap 2.2.3", + "indexmap 2.2.5", "indoc", "infer 0.15.0", "inventory", @@ -10171,7 +10171,7 @@ dependencies = [ "crossbeam-utils", "derivative", "futures 0.3.30", - "indexmap 2.2.3", + "indexmap 2.2.5", "metrics", "nom", "ordered-float 4.2.0", @@ -10202,7 +10202,7 @@ dependencies = [ "chrono-tz", "encoding_rs", "http 0.2.9", - "indexmap 2.2.3", + "indexmap 2.2.5", "inventory", "no-proxy", "num-traits", @@ -10272,7 +10272,7 @@ dependencies = [ "headers", "http 0.2.9", "hyper-proxy", - "indexmap 2.2.3", + "indexmap 2.2.5", "metrics", "metrics-tracing-context", "metrics-util", @@ -10479,7 +10479,7 @@ dependencies = [ "hostname", "iana-time-zone", "idna 0.5.0", - "indexmap 2.2.3", + "indexmap 2.2.5", "indoc", "itertools 0.12.1", "lalrpop", diff --git a/Cargo.toml b/Cargo.toml index 545baef8795c8..270313c8305cc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -128,7 +128,7 @@ members = [ [workspace.dependencies] chrono = { version = "0.4.34", default-features = false, features = ["clock", "serde"] } clap = { version = "4.5.1", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } -indexmap = { version = "2.2.3", default-features = false, features = ["serde", "std"] } +indexmap = { version = "2.2.5", default-features = false, features = ["serde", "std"] } pin-project = { version = "1.1.4", default-features = false } proptest = "1.4" proptest-derive = "0.4.0" diff --git a/lib/file-source/Cargo.toml b/lib/file-source/Cargo.toml index 13e7bd3698103..8a0b57cc1f006 100644 --- a/lib/file-source/Cargo.toml +++ b/lib/file-source/Cargo.toml @@ -39,7 +39,7 @@ default-features = false features = [] [dependencies.indexmap] -version = "2.2.3" +version = "2.2.5" default-features = false features = ["serde"] From 4677102f189dfb9f3f63ea2f03ad4008fa01b30e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Mar 2024 07:05:42 -0800 Subject: [PATCH 0083/1491] chore(deps): Bump opendal from 0.45.0 to 0.45.1 (#19996) Bumps [opendal](https://github.com/apache/opendal) from 0.45.0 to 0.45.1. - [Release notes](https://github.com/apache/opendal/releases) - [Changelog](https://github.com/apache/opendal/blob/main/CHANGELOG.md) - [Commits](https://github.com/apache/opendal/compare/v0.45.0...v0.45.1) --- updated-dependencies: - dependency-name: opendal dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 18 ++++-------------- 1 file changed, 4 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a7b19ff0969e1..51c6dfc4624d4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1275,7 +1275,7 @@ dependencies = [ "log", "paste", "pin-project", - "quick-xml 0.31.0", + "quick-xml", "rand 0.8.5", "reqwest", "rustc_version 0.4.0", @@ -5857,9 +5857,9 @@ checksum = "624a8340c38c1b80fd549087862da4ba43e08858af025b236e509b6649fc13d5" [[package]] name = "opendal" -version = "0.45.0" +version = "0.45.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3350be0d4ba326017ce22c98a9e94d21b069160fcd95bbe6c2555dac4e93c47a" +checksum = "52c17c077f23fa2d2c25d9d22af98baa43b8bbe2ef0de80cf66339aa70401467" dependencies = [ "anyhow", "async-trait", @@ -5875,7 +5875,7 @@ dependencies = [ "md-5", "once_cell", "percent-encoding", - "quick-xml 0.30.0", + "quick-xml", "reqwest", "serde", "serde_json", @@ -6971,16 +6971,6 @@ version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" -[[package]] -name = "quick-xml" -version = "0.30.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eff6510e86862b57b210fd8cbe8ed3f0d7d600b9c2863cd4549a2e033c66e956" -dependencies = [ - "memchr", - "serde", -] - [[package]] name = "quick-xml" version = "0.31.0" From 02bb9b2e7eda2326f4da9d6500c76f1b6e812b28 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Mar 2024 07:05:57 -0800 Subject: [PATCH 0084/1491] chore(deps): Bump arc-swap from 1.6.0 to 1.7.0 (#19997) Bumps [arc-swap](https://github.com/vorner/arc-swap) from 1.6.0 to 1.7.0. - [Changelog](https://github.com/vorner/arc-swap/blob/master/CHANGELOG.md) - [Commits](https://github.com/vorner/arc-swap/commits) --- updated-dependencies: - dependency-name: arc-swap dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- lib/enrichment/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 51c6dfc4624d4..f3400811cab07 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -274,9 +274,9 @@ dependencies = [ [[package]] name = "arc-swap" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bddcadddf5e9015d310179a59bb28c4d4b9920ad0f11e8e14dbadf654890c9a6" +checksum = "7b3d0060af21e8d11a926981cc00c6c1541aa91dd64b9f881985c3da1094425f" [[package]] name = "arr_macro" diff --git a/Cargo.toml b/Cargo.toml index 270313c8305cc..41a87b1ff28b8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -250,7 +250,7 @@ sha2 = { version = "0.10.8", default-features = false, optional = true } greptimedb-client = { git = "https://github.com/GreptimeTeam/greptimedb-ingester-rust.git", rev = "4cb19ec47eeaf634c451d9ae438dac445a8a3dce", optional = true } # External libs -arc-swap = { version = "1.6", default-features = false, optional = true } +arc-swap = { version = "1.7", default-features = false, optional = true } async-compression = { version = "0.4.6", default-features = false, features = ["tokio", "gzip", "zstd"], optional = true } apache-avro = { version = "0.16.0", default-features = false, optional = true } axum = { version = "0.6.20", default-features = false } diff --git a/lib/enrichment/Cargo.toml b/lib/enrichment/Cargo.toml index d1487f1f1b603..74a2025884ac7 100644 --- a/lib/enrichment/Cargo.toml +++ b/lib/enrichment/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" publish = false [dependencies] -arc-swap = { version = "1.6.0", default-features = false } +arc-swap = { version = "1.7.0", default-features = false } chrono.workspace = true dyn-clone = { version = "1.0.17", default-features = false } vrl.workspace = true From 8ca10a0232889fc8195911409d78469e50e76e12 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Mar 2024 15:07:36 +0000 Subject: [PATCH 0085/1491] chore(deps): Bump the aws group with 3 updates (#19976) Bumps the aws group with 3 updates: [aws-sdk-sts](https://github.com/awslabs/aws-sdk-rust), [aws-types](https://github.com/smithy-lang/smithy-rs) and [aws-sigv4](https://github.com/smithy-lang/smithy-rs). Updates `aws-sdk-sts` from 1.3.0 to 1.3.1 - [Release notes](https://github.com/awslabs/aws-sdk-rust/releases) - [Commits](https://github.com/awslabs/aws-sdk-rust/commits) Updates `aws-types` from 1.1.6 to 1.1.7 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-sigv4` from 1.1.6 to 1.1.7 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) --- updated-dependencies: - dependency-name: aws-sdk-sts dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-types dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-sigv4 dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 12 ++++++------ Cargo.toml | 6 +++--- 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f3400811cab07..4c3342df6d64b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -999,9 +999,9 @@ dependencies = [ [[package]] name = "aws-sdk-sts" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5700da387716ccfc30b27f44b008f457e1baca5b0f05b6b95455778005e3432a" +checksum = "798c8d82203af9e15a8b406574e0b36da91dd6db533028b74676489a1bc8bc7d" dependencies = [ "aws-credential-types", "aws-http", @@ -1022,9 +1022,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "404c64a104188ac70dd1684718765cb5559795458e446480e41984e68e57d888" +checksum = "8ada00a4645d7d89f296fe0ddbc3fe3554f03035937c849a05d37ddffc1f29a1" dependencies = [ "aws-credential-types", "aws-smithy-eventstream", @@ -1200,9 +1200,9 @@ dependencies = [ [[package]] name = "aws-types" -version = "1.1.6" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fbb5d48aae496f628e7aa2e41991dd4074f606d9e3ade1ce1059f293d40f9a2" +checksum = "d07c63521aa1ea9a9f92a701f1a08ce3fd20b46c6efc0d5c8947c1fd879e3df1" dependencies = [ "aws-credential-types", "aws-smithy-async", diff --git a/Cargo.toml b/Cargo.toml index 41a87b1ff28b8..9bdf1c26d5f39 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -184,9 +184,9 @@ aws-sdk-kinesis = { version = "1.3.0", default-features = false, features = ["be # The sts crate is needed despite not being referred to anywhere in the code because we need to set the # `behavior-version-latest` feature. Without this we get a runtime panic when `auth.assume_role` authentication # is configured. -aws-sdk-sts = { version = "1.3.0", default-features = false, features = ["behavior-version-latest"], optional = true } -aws-types = { version = "1.1.6", default-features = false, optional = true } -aws-sigv4 = { version = "1.1.6", default-features = false, features = ["sign-http"], optional = true } +aws-sdk-sts = { version = "1.3.1", default-features = false, features = ["behavior-version-latest"], optional = true } +aws-types = { version = "1.1.7", default-features = false, optional = true } +aws-sigv4 = { version = "1.1.7", default-features = false, features = ["sign-http"], optional = true } aws-config = { version = "1.0.1", default-features = false, features = ["behavior-version-latest"], optional = true } aws-credential-types = { version = "1.1.7", default-features = false, features = ["hardcoded-credentials"], optional = true } aws-smithy-http = { version = "0.60", default-features = false, features = ["event-stream"], optional = true } From 312056c39178c3f40369d3aeefaf059dc9611626 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Mar 2024 15:42:16 +0000 Subject: [PATCH 0086/1491] chore(deps): Bump bollard from 0.15.0 to 0.16.0 (#19998) * chore(deps): Bump bollard from 0.15.0 to 0.16.0 Bumps [bollard](https://github.com/fussybeaver/bollard) from 0.15.0 to 0.16.0. - [Release notes](https://github.com/fussybeaver/bollard/releases) - [Commits](https://github.com/fussybeaver/bollard/compare/v0.15.0...v0.16.0) --- updated-dependencies: - dependency-name: bollard dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * regenerate licenses Signed-off-by: Jesse Szwedko --------- Signed-off-by: dependabot[bot] Signed-off-by: Jesse Szwedko Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jesse Szwedko --- Cargo.lock | 197 ++++++++++++++++++++++++++++++++----------- Cargo.toml | 2 +- LICENSE-3rdparty.csv | 4 +- 3 files changed, 153 insertions(+), 50 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4c3342df6d64b..7fdc7b3879cfd 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -754,7 +754,7 @@ dependencies = [ "bytes 1.5.0", "fastrand 2.0.1", "http 0.2.9", - "hyper", + "hyper 0.14.28", "time", "tokio", "tracing 0.1.40", @@ -783,7 +783,7 @@ dependencies = [ "aws-types", "bytes 1.5.0", "http 0.2.9", - "http-body", + "http-body 0.4.5", "pin-project-lite", "tracing 0.1.40", ] @@ -944,7 +944,7 @@ dependencies = [ "aws-types", "bytes 1.5.0", "http 0.2.9", - "http-body", + "http-body 0.4.5", "once_cell", "percent-encoding", "regex", @@ -1068,7 +1068,7 @@ dependencies = [ "crc32fast", "hex", "http 0.2.9", - "http-body", + "http-body 0.4.5", "md-5", "pin-project-lite", "sha1", @@ -1100,7 +1100,7 @@ dependencies = [ "bytes-utils", "futures-core", "http 0.2.9", - "http-body", + "http-body 0.4.5", "once_cell", "percent-encoding", "pin-project-lite", @@ -1141,9 +1141,9 @@ dependencies = [ "fastrand 2.0.1", "h2 0.3.24", "http 0.2.9", - "http-body", - "hyper", - "hyper-rustls", + "http-body 0.4.5", + "hyper 0.14.28", + "hyper-rustls 0.24.2", "once_cell", "pin-project-lite", "pin-utils", @@ -1179,7 +1179,7 @@ dependencies = [ "bytes-utils", "futures-core", "http 0.2.9", - "http-body", + "http-body 0.4.5", "itoa", "num-integer", "pin-project-lite", @@ -1225,8 +1225,8 @@ dependencies = [ "bytes 1.5.0", "futures-util", "http 0.2.9", - "http-body", - "hyper", + "http-body 0.4.5", + "hyper 0.14.28", "itoa", "matchit", "memchr", @@ -1252,7 +1252,7 @@ dependencies = [ "bytes 1.5.0", "futures-util", "http 0.2.9", - "http-body", + "http-body 0.4.5", "mime", "rustversion", "tower-layer", @@ -1527,9 +1527,9 @@ dependencies = [ [[package]] name = "bollard" -version = "0.15.0" +version = "0.16.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f03db470b3c0213c47e978da93200259a1eb4dae2e5512cba9955e2b540a6fc6" +checksum = "83545367eb6428eb35c29cdec3a1f350fa8d6d9085d59a7d7bcb637f2e38db5a" dependencies = [ "base64 0.21.7", "bollard-stubs", @@ -1539,16 +1539,19 @@ dependencies = [ "futures-util", "hex", "home", - "http 0.2.9", - "hyper", - "hyper-rustls", - "hyperlocal", + "http 1.0.0", + "http-body-util", + "hyper 1.2.0", + "hyper-named-pipe", + "hyper-rustls 0.26.0", + "hyper-util", + "hyperlocal-next", "log", "pin-project-lite", - "rustls 0.21.8", - "rustls-native-certs 0.6.3", - "rustls-pemfile 1.0.3", - "rustls-webpki 0.101.7", + "rustls 0.22.2", + "rustls-native-certs 0.7.0", + "rustls-pemfile 2.1.0", + "rustls-pki-types", "serde", "serde_derive", "serde_json", @@ -1557,16 +1560,16 @@ dependencies = [ "thiserror", "tokio", "tokio-util", + "tower-service", "url", - "webpki-roots", "winapi", ] [[package]] name = "bollard-stubs" -version = "1.43.0-rc.2" +version = "1.44.0-rc.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b58071e8fd9ec1e930efd28e3a90c1251015872a2ce49f81f36421b86466932e" +checksum = "709d9aa1c37abb89d40f19f5d0ad6f0d88cb1581264e571c9350fc5bb89cf1c5" dependencies = [ "chrono", "serde", @@ -4101,6 +4104,29 @@ dependencies = [ "pin-project-lite", ] +[[package]] +name = "http-body" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" +dependencies = [ + "bytes 1.5.0", + "http 1.0.0", +] + +[[package]] +name = "http-body-util" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840" +dependencies = [ + "bytes 1.5.0", + "futures-util", + "http 1.0.0", + "http-body 1.0.0", + "pin-project-lite", +] + [[package]] name = "http-range-header" version = "0.3.1" @@ -4168,7 +4194,7 @@ dependencies = [ "futures-util", "h2 0.3.24", "http 0.2.9", - "http-body", + "http-body 0.4.5", "httparse", "httpdate", "itoa", @@ -4180,6 +4206,40 @@ dependencies = [ "want", ] +[[package]] +name = "hyper" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" +dependencies = [ + "bytes 1.5.0", + "futures-channel", + "futures-util", + "http 1.0.0", + "http-body 1.0.0", + "httparse", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + +[[package]] +name = "hyper-named-pipe" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73b7d8abf35697b81a825e386fc151e0d503e8cb5fcb93cc8669c376dfd6f278" +dependencies = [ + "hex", + "hyper 1.2.0", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", + "winapi", +] + [[package]] name = "hyper-openssl" version = "0.9.2" @@ -4187,7 +4247,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6ee5d7a8f718585d1c3c61dfde28ef5b0bb14734b4db13f5ada856cdc6c612b" dependencies = [ "http 0.2.9", - "hyper", + "hyper 0.14.28", "linked_hash_set", "once_cell", "openssl", @@ -4208,7 +4268,7 @@ dependencies = [ "futures 0.3.30", "headers", "http 0.2.9", - "hyper", + "hyper 0.14.28", "openssl", "tokio", "tokio-openssl", @@ -4223,7 +4283,7 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.9", - "hyper", + "hyper 0.14.28", "log", "rustls 0.21.8", "rustls-native-certs 0.6.3", @@ -4231,13 +4291,32 @@ dependencies = [ "tokio-rustls 0.24.1", ] +[[package]] +name = "hyper-rustls" +version = "0.26.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0bea761b46ae2b24eb4aef630d8d1c398157b6fc29e6350ecf090a0b70c952c" +dependencies = [ + "futures-util", + "http 1.0.0", + "hyper 1.2.0", + "hyper-util", + "log", + "rustls 0.22.2", + "rustls-native-certs 0.7.0", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.25.0", + "tower-service", +] + [[package]] name = "hyper-timeout" version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" dependencies = [ - "hyper", + "hyper 0.14.28", "pin-project-lite", "tokio", "tokio-io-timeout", @@ -4250,23 +4329,45 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ "bytes 1.5.0", - "hyper", + "hyper 0.14.28", "native-tls", "tokio", "tokio-native-tls", ] [[package]] -name = "hyperlocal" -version = "0.8.0" +name = "hyper-util" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fafdf7b2b2de7c9784f76e02c0935e65a8117ec3b768644379983ab333ac98c" +checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" dependencies = [ + "bytes 1.5.0", + "futures-channel", "futures-util", + "http 1.0.0", + "http-body 1.0.0", + "hyper 1.2.0", + "pin-project-lite", + "socket2 0.5.6", + "tokio", + "tower", + "tower-service", + "tracing 0.1.40", +] + +[[package]] +name = "hyperlocal-next" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "acf569d43fa9848e510358c07b80f4adf34084ddc28c6a4a651ee8474c070dcc" +dependencies = [ "hex", - "hyper", - "pin-project", + "http-body-util", + "hyper 1.2.0", + "hyper-util", + "pin-project-lite", "tokio", + "tower-service", ] [[package]] @@ -4710,8 +4811,8 @@ dependencies = [ "either", "futures 0.3.30", "http 0.2.9", - "http-body", - "hyper", + "http-body 0.4.5", + "hyper 0.14.28", "hyper-openssl", "hyper-timeout", "jsonpath_lib", @@ -7394,9 +7495,9 @@ dependencies = [ "futures-util", "h2 0.3.24", "http 0.2.9", - "http-body", - "hyper", - "hyper-rustls", + "http-body 0.4.5", + "hyper 0.14.28", + "hyper-rustls 0.24.2", "hyper-tls", "ipnet", "js-sys", @@ -9253,8 +9354,8 @@ dependencies = [ "flate2", "h2 0.3.24", "http 0.2.9", - "http-body", - "hyper", + "http-body 0.4.5", + "hyper 0.14.28", "hyper-timeout", "percent-encoding", "pin-project", @@ -9330,7 +9431,7 @@ dependencies = [ "futures-core", "futures-util", "http 0.2.9", - "http-body", + "http-body 0.4.5", "http-range-header", "mime", "pin-project-lite", @@ -9971,9 +10072,9 @@ dependencies = [ "hickory-proto", "hostname", "http 0.2.9", - "http-body", + "http-body 0.4.5", "http-serde", - "hyper", + "hyper 0.14.28", "hyper-openssl", "hyper-proxy", "indexmap 2.2.5", @@ -10589,7 +10690,7 @@ dependencies = [ "futures-util", "headers", "http 0.2.9", - "hyper", + "hyper 0.14.28", "log", "mime", "mime_guess", @@ -11060,7 +11161,7 @@ dependencies = [ "futures 0.3.30", "futures-timer", "http-types", - "hyper", + "hyper 0.14.28", "log", "once_cell", "regex", diff --git a/Cargo.toml b/Cargo.toml index 9bdf1c26d5f39..2497d73f29873 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -256,7 +256,7 @@ apache-avro = { version = "0.16.0", default-features = false, optional = true } axum = { version = "0.6.20", default-features = false } base64 = { version = "0.21.7", default-features = false, optional = true } bloomy = { version = "1.2.0", default-features = false, optional = true } -bollard = { version = "0.15.0", default-features = false, features = ["ssl", "chrono"], optional = true } +bollard = { version = "0.16.0", default-features = false, features = ["ssl", "chrono"], optional = true } bytes = { version = "1.5.0", default-features = false, features = ["serde"] } bytesize = { version = "1.3.0", default-features = false } chrono.workspace = true diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 746d2a7e171f4..001e98fb9f7ec 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -257,12 +257,14 @@ http-types,https://github.com/http-rs/http-types,MIT OR Apache-2.0,Yoshua Wuyts httparse,https://github.com/seanmonstar/httparse,MIT OR Apache-2.0,Sean McArthur httpdate,https://github.com/pyfisch/httpdate,MIT OR Apache-2.0,Pyfisch hyper,https://github.com/hyperium/hyper,MIT,Sean McArthur +hyper-named-pipe,https://github.com/fussybeaver/hyper-named-pipe,Apache-2.0,The hyper-named-pipe Authors hyper-openssl,https://github.com/sfackler/hyper-openssl,MIT OR Apache-2.0,Steven Fackler hyper-proxy,https://github.com/tafia/hyper-proxy,MIT,Johann Tuffe hyper-rustls,https://github.com/rustls/hyper-rustls,Apache-2.0 OR ISC OR MIT,The hyper-rustls Authors hyper-timeout,https://github.com/hjr3/hyper-timeout,MIT OR Apache-2.0,Herman J. Radtke III hyper-tls,https://github.com/hyperium/hyper-tls,MIT OR Apache-2.0,Sean McArthur -hyperlocal,https://github.com/softprops/hyperlocal,MIT,softprops +hyper-util,https://github.com/hyperium/hyper-util,MIT,Sean McArthur +hyperlocal-next,https://github.com/softprops/hyperlocal,MIT,softprops iana-time-zone,https://github.com/strawlab/iana-time-zone,MIT OR Apache-2.0,"Andrew Straw , René Kijewski , Ryan Lopopolo " iana-time-zone-haiku,https://github.com/strawlab/iana-time-zone,MIT OR Apache-2.0,René Kijewski ident_case,https://github.com/TedDriggs/ident_case,MIT OR Apache-2.0,Ted Driggs From c7e4e33ca0c479cd9c8b0c5af72f6bc804d287fe Mon Sep 17 00:00:00 2001 From: neuronull Date: Mon, 4 Mar 2024 10:19:24 -0700 Subject: [PATCH 0087/1491] chore(observability): extend component validation framework for more flexible test case building (#19941) * add fix and small refactor * fix compilation errors * 3 ticks * dont compute expected metrics in validator * cleanup * cleanup * clippy * feedback tz: sent_eventssssss * feedback tz: fix telemetry shutdown finishing logic * 3 ticks * small reorg to add sinks * mini refactor of the component spec validators * attempt to set expected values from the resource * feedback tz- from not try_from * back to 3 ticks * fix incorrect expected values * Even more reduction * clippy * add the discarded events total check * workaround the new sync issues * multi config support * cleanup * check events * partial feedback * thought i removed that * use ref * feedback: dont introduce PassThroughFail variant * feedback: adjust enum variant names for clarity * feedback: no idea what I was thinking with `input_codec` * spell check * fr * fix sync issues * remove unused enum variant * feedback- update docs * check_events * touchup * spell checker * merge leftover * chore(observability): extend component validation framework for more flexible test case coverage * feedback: log formating * feedback- better approach to driving shutdown * give a generous timeout * feedback --- src/components/validation/mod.rs | 12 +++++++ src/components/validation/resources/event.rs | 17 ++++++++-- src/components/validation/resources/http.rs | 25 +++++++++++--- src/components/validation/runner/mod.rs | 21 ++++++++++-- .../validators/component_spec/mod.rs | 34 +++++++++++++++++-- src/components/validation/validators/mod.rs | 1 + tests/validation/components/sinks/http.yaml | 9 ++--- .../components/sources/http_client.yaml | 13 +++---- .../components/sources/http_server.yaml | 13 +++---- 9 files changed, 117 insertions(+), 28 deletions(-) diff --git a/src/components/validation/mod.rs b/src/components/validation/mod.rs index 54b3d48238244..3bf8494a3d89b 100644 --- a/src/components/validation/mod.rs +++ b/src/components/validation/mod.rs @@ -8,6 +8,17 @@ mod validators; use crate::config::{BoxedSink, BoxedSource, BoxedTransform}; +/// For components implementing `ValidatableComponent` +pub mod prelude { + pub use super::ComponentTestCaseConfig; + pub use super::ExternalResource; + pub use super::HttpResourceConfig; + pub use super::ResourceDirection; + pub use super::ValidatableComponent; + pub use super::ValidationConfiguration; + pub use crate::register_validatable_component; +} + pub use self::resources::*; #[cfg(feature = "component-validation-runner")] pub use self::runner::*; @@ -268,6 +279,7 @@ fn run_validation(configuration: ValidationConfiguration, test_case_data_path: s .enable_all() .build() .unwrap(); + rt.block_on(async { let mut runner = Runner::from_configuration( configuration, diff --git a/src/components/validation/resources/event.rs b/src/components/validation/resources/event.rs index 9008cf291ef46..4c03e3b4eed56 100644 --- a/src/components/validation/resources/event.rs +++ b/src/components/validation/resources/event.rs @@ -1,3 +1,5 @@ +use std::collections::HashMap; + use bytes::BytesMut; use serde::Deserialize; use snafu::Snafu; @@ -36,10 +38,12 @@ pub enum RawTestEvent { } #[derive(Clone, Debug, Deserialize)] -#[serde(untagged)] +#[serde(rename_all = "snake_case")] pub enum EventData { - /// A log event. + /// A simple log event. Log(String), + /// A log event built from key-value pairs + LogBuilder(HashMap), } impl EventData { @@ -47,6 +51,15 @@ impl EventData { pub fn into_event(self) -> Event { match self { Self::Log(message) => Event::Log(LogEvent::from_bytes_legacy(&message.into())), + Self::LogBuilder(data) => { + let mut log_event = LogEvent::default(); + for (k, v) in data { + log_event + .parse_path_and_insert(&k, v) + .unwrap_or_else(|_| panic!("Unable to build log event for {}", &k)); + } + Event::Log(log_event) + } } } } diff --git a/src/components/validation/resources/http.rs b/src/components/validation/resources/http.rs index cb78aa50081ab..8fbe651cb4f8a 100644 --- a/src/components/validation/resources/http.rs +++ b/src/components/validation/resources/http.rs @@ -1,5 +1,5 @@ use std::{ - collections::VecDeque, + collections::{HashMap, VecDeque}, future::Future, net::{IpAddr, SocketAddr}, str::FromStr, @@ -33,11 +33,21 @@ use super::{encode_test_event, ResourceCodec, ResourceDirection, TestEvent}; pub struct HttpResourceConfig { uri: Uri, method: Option, + headers: Option>, } impl HttpResourceConfig { pub const fn from_parts(uri: Uri, method: Option) -> Self { - Self { uri, method } + Self { + uri, + method, + headers: None, + } + } + + pub fn with_headers(mut self, headers: HashMap) -> Self { + self.headers = Some(headers); + self } pub fn spawn_as_input( @@ -219,6 +229,7 @@ fn spawn_input_http_client( let client = Client::builder().build_http::(); let request_uri = config.uri; let request_method = config.method.unwrap_or(Method::POST); + let headers = config.headers.unwrap_or_default(); while let Some(event) = input_rx.recv().await { debug!("Got event to send from runner."); @@ -226,9 +237,15 @@ fn spawn_input_http_client( let mut buffer = BytesMut::new(); encode_test_event(&mut encoder, &mut buffer, event); - let request = Request::builder() + let mut request_builder = Request::builder() .uri(request_uri.clone()) - .method(request_method.clone()) + .method(request_method.clone()); + + for (key, value) in &headers { + request_builder = request_builder.header(key, value); + } + + let request = request_builder .body(buffer.freeze().into()) .expect("should not fail to build request"); diff --git a/src/components/validation/runner/mod.rs b/src/components/validation/runner/mod.rs index 9e2dc82a02831..561f1eeec7533 100644 --- a/src/components/validation/runner/mod.rs +++ b/src/components/validation/runner/mod.rs @@ -205,6 +205,14 @@ impl Runner { let test_cases = load_component_test_cases(&self.test_case_data_path)?; for test_case in test_cases { + println!(""); + println!(""); + info!( + "Running test '{}' case for component '{}' (type: {:?})...", + test_case.name, + self.configuration.component_name, + self.configuration.component_type() + ); // Create a task coordinator for each relevant phase of the test. // // This provides us the granularity to know when the tasks associated with each phase @@ -602,10 +610,17 @@ fn spawn_input_driver( if !failure_case || component_type == ComponentType::Sink { input_runner_metrics.sent_events_total += 1; - // The event is wrapped in a Vec to match the actual event storage in - // the real topology + // This particular metric is tricky because a component can run the + // EstimatedJsonSizeOf calculation on a single event or an array of + // events. If it's an array of events, the size calculation includes + // the size of bracket ('[', ']') characters... But we have no way + // of knowing which case it will be. Indeed, there are even components + // where BOTH scenarios are possible, depending on how the component + // is configured. + // This is handled in the component spec validator code where we compare + // the actual to the expected. input_runner_metrics.sent_event_bytes_total += - vec![event].estimated_json_encoded_size_of().get() as u64; + event.estimated_json_encoded_size_of().get() as u64; } } info!("Input driver sent all events."); diff --git a/src/components/validation/validators/component_spec/mod.rs b/src/components/validation/validators/component_spec/mod.rs index 5cce50c01c195..855a7633a9b71 100644 --- a/src/components/validation/validators/component_spec/mod.rs +++ b/src/components/validation/validators/component_spec/mod.rs @@ -30,6 +30,11 @@ impl Validator for ComponentSpecValidator { telemetry_events: &[Event], runner_metrics: &RunnerMetrics, ) -> Result, Vec> { + let expect_received_events = inputs + .iter() + .filter(|te| !te.should_fail() || te.should_reject()) + .count() as u64; + for input in inputs { info!("Validator observed input event: {:?}", input); } @@ -79,7 +84,12 @@ impl Validator for ComponentSpecValidator { format!("received {} telemetry events", telemetry_events.len()), ]; - let out = validate_telemetry(component_type, telemetry_events, runner_metrics)?; + let out = validate_telemetry( + component_type, + telemetry_events, + runner_metrics, + expect_received_events, + )?; run_out.extend(out); Ok(run_out) @@ -90,6 +100,7 @@ fn validate_telemetry( component_type: ComponentType, telemetry_events: &[Event], runner_metrics: &RunnerMetrics, + expect_received_events: u64, ) -> Result, Vec> { let mut out: Vec = Vec::new(); let mut errs: Vec = Vec::new(); @@ -111,6 +122,7 @@ fn validate_telemetry( runner_metrics, metric_type, component_type, + expect_received_events, ) { Err(e) => errs.extend(e), Ok(m) => out.extend(m), @@ -129,6 +141,7 @@ fn validate_metric( runner_metrics: &RunnerMetrics, metric_type: &ComponentMetricType, component_type: ComponentType, + expect_received_events: u64, ) -> Result, Vec> { let component_id = match component_type { ComponentType::Source => TEST_SOURCE_NAME, @@ -179,7 +192,13 @@ fn validate_metric( ComponentMetricType::DiscardedEventsTotal => runner_metrics.discarded_events_total, }; - compare_actual_to_expected(telemetry_events, metric_type, component_id, expected) + compare_actual_to_expected( + telemetry_events, + metric_type, + component_id, + expected, + expect_received_events, + ) } fn filter_events_by_metric_and_component<'a>( @@ -253,6 +272,7 @@ fn compare_actual_to_expected( metric_type: &ComponentMetricType, component_id: &str, expected: u64, + expect_received_events: u64, ) -> Result, Vec> { let mut errs: Vec = Vec::new(); @@ -263,7 +283,15 @@ fn compare_actual_to_expected( info!("{}: expected {}, actual {}.", metric_type, expected, actual,); - if actual != expected { + if actual != expected && + // This is a bit messy. The issue is that EstimatedJsonSizeOf can be called by a component + // on an event array, or on a single event. And we have no way of knowing which that is. + // By default the input driver for the framework is not assuming it is an array, so we + // check here if it matches what the array scenario would be, which is to add the size of + // the brackets, for each event. + (metric_type != &ComponentMetricType::EventsReceivedBytes + || (actual != (expected + (expect_received_events * 2)))) + { errs.push(format!( "{}: expected {}, but received {}", metric_type, expected, actual diff --git a/src/components/validation/validators/mod.rs b/src/components/validation/validators/mod.rs index 7941a177a527e..13693eb5ecd0c 100644 --- a/src/components/validation/validators/mod.rs +++ b/src/components/validation/validators/mod.rs @@ -52,6 +52,7 @@ impl From for Box { } } +#[derive(PartialEq)] pub enum ComponentMetricType { EventsReceived, EventsReceivedBytes, diff --git a/tests/validation/components/sinks/http.yaml b/tests/validation/components/sinks/http.yaml index 861e64f5fefc1..cf0f912d160a2 100644 --- a/tests/validation/components/sinks/http.yaml +++ b/tests/validation/components/sinks/http.yaml @@ -1,10 +1,11 @@ - name: happy path expectation: success events: - - simple message 1 - - simple message 2 - - simple message 3 + - log: simple message 1 + - log: simple message 2 + - log: simple message 3 - name: sad path expectation: failure events: - - external_resource_rejects: simple message downstream rejects + - external_resource_rejects: + log: simple message downstream rejects diff --git a/tests/validation/components/sources/http_client.yaml b/tests/validation/components/sources/http_client.yaml index 437a7d680b566..5e1d8bf644038 100644 --- a/tests/validation/components/sources/http_client.yaml +++ b/tests/validation/components/sources/http_client.yaml @@ -1,12 +1,13 @@ - name: happy path expectation: success events: - - simple message 1 - - simple message 2 - - simple message 3 + - log: simple message 1 + - log: simple message 2 + - log: simple message 3 - name: sad path expectation: partial_success events: - - simple message 1 - - simple message 2 - - fail_encoding_of: simple message with the wrong encoding + - log: simple message 1 + - log: simple message 2 + - fail_encoding_of: + log: simple message with the wrong encoding diff --git a/tests/validation/components/sources/http_server.yaml b/tests/validation/components/sources/http_server.yaml index 437a7d680b566..5e1d8bf644038 100644 --- a/tests/validation/components/sources/http_server.yaml +++ b/tests/validation/components/sources/http_server.yaml @@ -1,12 +1,13 @@ - name: happy path expectation: success events: - - simple message 1 - - simple message 2 - - simple message 3 + - log: simple message 1 + - log: simple message 2 + - log: simple message 3 - name: sad path expectation: partial_success events: - - simple message 1 - - simple message 2 - - fail_encoding_of: simple message with the wrong encoding + - log: simple message 1 + - log: simple message 2 + - fail_encoding_of: + log: simple message with the wrong encoding From 676318aa258e9b211fd6bd8330eb900788f0473f Mon Sep 17 00:00:00 2001 From: neuronull Date: Mon, 4 Mar 2024 11:08:25 -0700 Subject: [PATCH 0088/1491] chore(dedupe transform): expose deduping logic (#19992) --- Cargo.toml | 3 +- src/internal_events/mod.rs | 4 +- .../{dedupe.rs => dedupe/config.rs} | 253 ++---------------- src/transforms/dedupe/mod.rs | 111 ++++++++ src/transforms/dedupe/transform.rs | 139 ++++++++++ src/transforms/mod.rs | 3 +- 6 files changed, 270 insertions(+), 243 deletions(-) rename src/transforms/{dedupe.rs => dedupe/config.rs} (67%) create mode 100644 src/transforms/dedupe/mod.rs create mode 100644 src/transforms/dedupe/transform.rs diff --git a/Cargo.toml b/Cargo.toml index 2497d73f29873..6398ae4aad19f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -616,7 +616,7 @@ transforms-metrics = [ transforms-aggregate = [] transforms-aws_ec2_metadata = ["dep:arc-swap"] -transforms-dedupe = ["dep:lru"] +transforms-dedupe = ["transforms-impl-dedupe"] transforms-filter = [] transforms-log_to_metric = [] transforms-lua = ["dep:mlua", "vector-lib/lua"] @@ -631,6 +631,7 @@ transforms-throttle = ["dep:governor"] # Implementations of transforms transforms-impl-sample = [] +transforms-impl-dedupe = ["dep:lru"] # Sinks sinks = ["sinks-logs", "sinks-metrics"] diff --git a/src/internal_events/mod.rs b/src/internal_events/mod.rs index 1ca57638f20f4..0da2383b7eaee 100644 --- a/src/internal_events/mod.rs +++ b/src/internal_events/mod.rs @@ -34,7 +34,7 @@ mod conditions; mod datadog_metrics; #[cfg(feature = "sinks-datadog_traces")] mod datadog_traces; -#[cfg(feature = "transforms-dedupe")] +#[cfg(feature = "transforms-impl-dedupe")] mod dedupe; #[cfg(feature = "sources-demo_logs")] mod demo_logs; @@ -169,7 +169,7 @@ pub(crate) use self::codecs::*; pub(crate) use self::datadog_metrics::*; #[cfg(feature = "sinks-datadog_traces")] pub(crate) use self::datadog_traces::*; -#[cfg(feature = "transforms-dedupe")] +#[cfg(feature = "transforms-impl-dedupe")] pub(crate) use self::dedupe::*; #[cfg(feature = "sources-demo_logs")] pub(crate) use self::demo_logs::*; diff --git a/src/transforms/dedupe.rs b/src/transforms/dedupe/config.rs similarity index 67% rename from src/transforms/dedupe.rs rename to src/transforms/dedupe/config.rs index 722752e74c4ae..5ddf26e23ba52 100644 --- a/src/transforms/dedupe.rs +++ b/src/transforms/dedupe/config.rs @@ -1,74 +1,21 @@ -use std::{future::ready, num::NonZeroUsize, pin::Pin}; - -use bytes::Bytes; -use futures::{Stream, StreamExt}; -use lru::LruCache; -use vector_lib::config::{clone_input_definitions, LogNamespace}; -use vector_lib::configurable::configurable_component; -use vector_lib::lookup::lookup_v2::ConfigTargetPath; -use vrl::path::OwnedTargetPath; +use vector_lib::{ + config::{clone_input_definitions, LogNamespace}, + configurable::configurable_component, +}; use crate::{ config::{ - log_schema, DataType, GenerateConfig, Input, OutputId, TransformConfig, TransformContext, + DataType, GenerateConfig, Input, OutputId, TransformConfig, TransformContext, TransformOutput, }, - event::{Event, Value}, - internal_events::DedupeEventsDropped, schema, - transforms::{TaskTransform, Transform}, + transforms::Transform, }; -/// Options to control what fields to match against. -/// -/// When no field matching configuration is specified, events are matched using the `timestamp`, -/// `host`, and `message` fields from an event. The specific field names used are those set in -/// the global [`log schema`][global_log_schema] configuration. -/// -/// [global_log_schema]: https://vector.dev/docs/reference/configuration/global-options/#log_schema -// TODO: This enum renders correctly in terms of providing equivalent Cue output when using the -// machine-generated stuff vs the previously-hand-written Cue... but what it _doesn't_ have in the -// machine-generated output is any sort of blurb that these "fields" (`match` and `ignore`) are -// actually mutually exclusive. -// -// We know that to be the case when we're generating the output from the configuration schema, so we -// need to emit something in that output to indicate as much, and further, actually use it on the -// Cue side to add some sort of boilerplate about them being mutually exclusive, etc. -#[configurable_component] -#[derive(Clone, Debug)] -#[serde(deny_unknown_fields)] -pub enum FieldMatchConfig { - /// Matches events using only the specified fields. - #[serde(rename = "match")] - MatchFields( - #[configurable(metadata( - docs::examples = "field1", - docs::examples = "parent.child_field" - ))] - Vec, - ), - - /// Matches events using all fields except for the ignored ones. - #[serde(rename = "ignore")] - IgnoreFields( - #[configurable(metadata( - docs::examples = "field1", - docs::examples = "parent.child_field", - docs::examples = "host", - docs::examples = "hostname" - ))] - Vec, - ), -} - -/// Caching configuration for deduplication. -#[configurable_component] -#[derive(Clone, Debug)] -#[serde(deny_unknown_fields)] -pub struct CacheConfig { - /// Number of events to cache and use for comparing incoming events to previously seen events. - pub num_events: NonZeroUsize, -} +use super::{ + common::{default_cache_config, fill_default_fields_match, CacheConfig, FieldMatchConfig}, + transform::Dedupe, +}; /// Configuration for the `dedupe` transform. #[configurable_component(transform("dedupe", "Deduplicate logs passing through a topology."))] @@ -84,59 +31,6 @@ pub struct DedupeConfig { pub cache: CacheConfig, } -fn default_cache_config() -> CacheConfig { - CacheConfig { - num_events: NonZeroUsize::new(5000).expect("static non-zero number"), - } -} - -// TODO: Add support to the `configurable(metadata(..))` helper attribute for passing an expression -// that will provide the value for the metadata attribute's value, as well as letting all metadata -// attributes have whatever value they want, so long as it can be serialized by `serde_json`. -// -// Once we have that, we could curry these default values (and others) via a metadata attribute -// instead of via `serde(default = "...")` to allow for displaying default values in the -// configuration schema _without_ actually changing how a field is populated during deserialization. -// -// See the comment in `fill_default_fields_match` for more information on why this is required. -// -// TODO: These values are used even for events with the new "Vector" log namespace. -// These aren't great defaults in that case, but hard-coding isn't much better since the -// structure can vary significantly. This should probably either become a required field -// in the future, or maybe the "semantic meaning" can be utilized here. -fn default_match_fields() -> Vec { - let mut fields = Vec::new(); - if let Some(message_key) = log_schema().message_key_target_path() { - fields.push(ConfigTargetPath(message_key.clone())); - } - if let Some(host_key) = log_schema().host_key_target_path() { - fields.push(ConfigTargetPath(host_key.clone())); - } - if let Some(timestamp_key) = log_schema().timestamp_key_target_path() { - fields.push(ConfigTargetPath(timestamp_key.clone())); - } - fields -} - -impl DedupeConfig { - pub fn fill_default_fields_match(&self) -> FieldMatchConfig { - // We provide a default value on `fields`, based on `default_match_fields`, in order to - // drive the configuration schema and documentation. Since we're getting the values from the - // configured log schema, though, the default field values shown in the configuration - // schema/documentation may not be the same as an actual user's Vector configuration. - match &self.fields { - Some(FieldMatchConfig::MatchFields(x)) => FieldMatchConfig::MatchFields(x.clone()), - Some(FieldMatchConfig::IgnoreFields(y)) => FieldMatchConfig::IgnoreFields(y.clone()), - None => FieldMatchConfig::MatchFields(default_match_fields()), - } - } -} - -pub struct Dedupe { - fields: FieldMatchConfig, - cache: LruCache, -} - impl GenerateConfig for DedupeConfig { fn generate_config() -> toml::Value { toml::Value::try_from(Self { @@ -151,7 +45,10 @@ impl GenerateConfig for DedupeConfig { #[typetag::serde(name = "dedupe")] impl TransformConfig for DedupeConfig { async fn build(&self, _context: &TransformContext) -> crate::Result { - Ok(Transform::event_task(Dedupe::new(self.clone()))) + Ok(Transform::event_task(Dedupe::new( + self.cache.num_events, + fill_default_fields_match(self.fields.as_ref()), + ))) } fn input(&self) -> Input { @@ -171,126 +68,6 @@ impl TransformConfig for DedupeConfig { } } -type TypeId = u8; - -/// A CacheEntry comes in two forms, depending on the FieldMatchConfig in use. -/// -/// When matching fields, a CacheEntry contains a vector of optional 2-tuples. -/// Each element in the vector represents one field in the corresponding -/// LogEvent. Elements in the vector will correspond 1:1 (and in order) to the -/// fields specified in "fields.match". The tuples each store the TypeId for -/// this field and the data as Bytes for the field. There is no need to store -/// the field name because the elements of the vector correspond 1:1 to -/// "fields.match", so there is never any ambiguity about what field is being -/// referred to. If a field from "fields.match" does not show up in an incoming -/// Event, the CacheEntry will have None in the correspond location in the -/// vector. -/// -/// When ignoring fields, a CacheEntry contains a vector of 3-tuples. Each -/// element in the vector represents one field in the corresponding LogEvent. -/// The tuples will each contain the field name, TypeId, and data as Bytes for -/// the corresponding field (in that order). Since the set of fields that might -/// go into CacheEntries is not known at startup, we must store the field names -/// as part of CacheEntries. Since Event objects store their field in alphabetic -/// order (as they are backed by a BTreeMap), and we build CacheEntries by -/// iterating over the fields of the incoming Events, we know that the -/// CacheEntries for 2 equivalent events will always contain the fields in the -/// same order. -#[derive(PartialEq, Eq, Hash)] -enum CacheEntry { - Match(Vec>), - Ignore(Vec<(OwnedTargetPath, TypeId, Bytes)>), -} - -/// Assigns a unique number to each of the types supported by Event::Value. -const fn type_id_for_value(val: &Value) -> TypeId { - match val { - Value::Bytes(_) => 0, - Value::Timestamp(_) => 1, - Value::Integer(_) => 2, - Value::Float(_) => 3, - Value::Boolean(_) => 4, - Value::Object(_) => 5, - Value::Array(_) => 6, - Value::Null => 7, - Value::Regex(_) => 8, - } -} - -impl Dedupe { - pub fn new(config: DedupeConfig) -> Self { - let num_entries = config.cache.num_events; - let fields = config.fill_default_fields_match(); - Self { - fields, - cache: LruCache::new(num_entries), - } - } - - fn transform_one(&mut self, event: Event) -> Option { - let cache_entry = build_cache_entry(&event, &self.fields); - if self.cache.put(cache_entry, true).is_some() { - emit!(DedupeEventsDropped { count: 1 }); - None - } else { - Some(event) - } - } -} - -/// Takes in an Event and returns a CacheEntry to place into the LRU cache -/// containing all relevant information for the fields that need matching -/// against according to the specified FieldMatchConfig. -fn build_cache_entry(event: &Event, fields: &FieldMatchConfig) -> CacheEntry { - match &fields { - FieldMatchConfig::MatchFields(fields) => { - let mut entry = Vec::new(); - for field_name in fields.iter() { - if let Some(value) = event.as_log().get(field_name) { - entry.push(Some((type_id_for_value(value), value.coerce_to_bytes()))); - } else { - entry.push(None); - } - } - CacheEntry::Match(entry) - } - FieldMatchConfig::IgnoreFields(fields) => { - let mut entry = Vec::new(); - - if let Some(event_fields) = event.as_log().all_event_fields() { - if let Some(metadata_fields) = event.as_log().all_metadata_fields() { - for (field_name, value) in event_fields.chain(metadata_fields) { - if let Ok(path) = ConfigTargetPath::try_from(field_name) { - if !fields.contains(&path) { - entry.push(( - path.0, - type_id_for_value(value), - value.coerce_to_bytes(), - )); - } - } - } - } - } - - CacheEntry::Ignore(entry) - } - } -} - -impl TaskTransform for Dedupe { - fn transform( - self: Box, - task: Pin + Send>>, - ) -> Pin + Send>> - where - Self: 'static, - { - let mut inner = self; - Box::pin(task.filter_map(move |v| ready(inner.transform_one(v)))) - } -} - #[cfg(test)] mod tests { use std::sync::Arc; @@ -306,7 +83,7 @@ mod tests { event::{Event, LogEvent, ObjectMap, Value}, test_util::components::assert_transform_compliance, transforms::{ - dedupe::{CacheConfig, DedupeConfig, FieldMatchConfig}, + dedupe::config::{CacheConfig, DedupeConfig, FieldMatchConfig}, test::create_topology, }, }; diff --git a/src/transforms/dedupe/mod.rs b/src/transforms/dedupe/mod.rs new file mode 100644 index 0000000000000..efd1b75951597 --- /dev/null +++ b/src/transforms/dedupe/mod.rs @@ -0,0 +1,111 @@ +#[cfg(feature = "transforms-dedupe")] +pub mod config; + +#[cfg(feature = "transforms-impl-dedupe")] +pub mod transform; + +#[cfg(feature = "transforms-impl-dedupe")] +pub mod common { + use std::num::NonZeroUsize; + + use vector_lib::{configurable::configurable_component, lookup::lookup_v2::ConfigTargetPath}; + + use crate::config::log_schema; + + /// Caching configuration for deduplication. + #[configurable_component] + #[derive(Clone, Debug)] + #[serde(deny_unknown_fields)] + pub struct CacheConfig { + /// Number of events to cache and use for comparing incoming events to previously seen events. + pub num_events: NonZeroUsize, + } + + pub fn default_cache_config() -> CacheConfig { + CacheConfig { + num_events: NonZeroUsize::new(5000).expect("static non-zero number"), + } + } + + /// Options to control what fields to match against. + /// + /// When no field matching configuration is specified, events are matched using the `timestamp`, + /// `host`, and `message` fields from an event. The specific field names used are those set in + /// the global [`log schema`][global_log_schema] configuration. + /// + /// [global_log_schema]: https://vector.dev/docs/reference/configuration/global-options/#log_schema + // TODO: This enum renders correctly in terms of providing equivalent Cue output when using the + // machine-generated stuff vs the previously-hand-written Cue... but what it _doesn't_ have in the + // machine-generated output is any sort of blurb that these "fields" (`match` and `ignore`) are + // actually mutually exclusive. + // + // We know that to be the case when we're generating the output from the configuration schema, so we + // need to emit something in that output to indicate as much, and further, actually use it on the + // Cue side to add some sort of boilerplate about them being mutually exclusive, etc. + #[configurable_component] + #[derive(Clone, Debug)] + #[serde(deny_unknown_fields)] + pub enum FieldMatchConfig { + /// Matches events using only the specified fields. + #[serde(rename = "match")] + MatchFields( + #[configurable(metadata( + docs::examples = "field1", + docs::examples = "parent.child_field" + ))] + Vec, + ), + + /// Matches events using all fields except for the ignored ones. + #[serde(rename = "ignore")] + IgnoreFields( + #[configurable(metadata( + docs::examples = "field1", + docs::examples = "parent.child_field", + docs::examples = "host", + docs::examples = "hostname" + ))] + Vec, + ), + } + + pub fn fill_default_fields_match(maybe_fields: Option<&FieldMatchConfig>) -> FieldMatchConfig { + // We provide a default value on `fields`, based on `default_match_fields`, in order to + // drive the configuration schema and documentation. Since we're getting the values from the + // configured log schema, though, the default field values shown in the configuration + // schema/documentation may not be the same as an actual user's Vector configuration. + match maybe_fields { + Some(FieldMatchConfig::MatchFields(x)) => FieldMatchConfig::MatchFields(x.clone()), + Some(FieldMatchConfig::IgnoreFields(y)) => FieldMatchConfig::IgnoreFields(y.clone()), + None => FieldMatchConfig::MatchFields(default_match_fields()), + } + } + + // TODO: Add support to the `configurable(metadata(..))` helper attribute for passing an expression + // that will provide the value for the metadata attribute's value, as well as letting all metadata + // attributes have whatever value they want, so long as it can be serialized by `serde_json`. + // + // Once we have that, we could curry these default values (and others) via a metadata attribute + // instead of via `serde(default = "...")` to allow for displaying default values in the + // configuration schema _without_ actually changing how a field is populated during deserialization. + // + // See the comment in `fill_default_fields_match` for more information on why this is required. + // + // TODO: These values are used even for events with the new "Vector" log namespace. + // These aren't great defaults in that case, but hard-coding isn't much better since the + // structure can vary significantly. This should probably either become a required field + // in the future, or maybe the "semantic meaning" can be utilized here. + fn default_match_fields() -> Vec { + let mut fields = Vec::new(); + if let Some(message_key) = log_schema().message_key_target_path() { + fields.push(ConfigTargetPath(message_key.clone())); + } + if let Some(host_key) = log_schema().host_key_target_path() { + fields.push(ConfigTargetPath(host_key.clone())); + } + if let Some(timestamp_key) = log_schema().timestamp_key_target_path() { + fields.push(ConfigTargetPath(timestamp_key.clone())); + } + fields + } +} diff --git a/src/transforms/dedupe/transform.rs b/src/transforms/dedupe/transform.rs new file mode 100644 index 0000000000000..9c1432d2abb12 --- /dev/null +++ b/src/transforms/dedupe/transform.rs @@ -0,0 +1,139 @@ +use std::{future::ready, num::NonZeroUsize, pin::Pin}; + +use bytes::Bytes; +use futures::{Stream, StreamExt}; +use lru::LruCache; +use vector_lib::lookup::lookup_v2::ConfigTargetPath; +use vrl::path::OwnedTargetPath; + +use crate::{ + event::{Event, Value}, + internal_events::DedupeEventsDropped, + transforms::TaskTransform, +}; + +use super::common::FieldMatchConfig; + +#[derive(Clone)] +pub struct Dedupe { + fields: FieldMatchConfig, + cache: LruCache, +} + +type TypeId = u8; + +/// A CacheEntry comes in two forms, depending on the FieldMatchConfig in use. +/// +/// When matching fields, a CacheEntry contains a vector of optional 2-tuples. +/// Each element in the vector represents one field in the corresponding +/// LogEvent. Elements in the vector will correspond 1:1 (and in order) to the +/// fields specified in "fields.match". The tuples each store the TypeId for +/// this field and the data as Bytes for the field. There is no need to store +/// the field name because the elements of the vector correspond 1:1 to +/// "fields.match", so there is never any ambiguity about what field is being +/// referred to. If a field from "fields.match" does not show up in an incoming +/// Event, the CacheEntry will have None in the correspond location in the +/// vector. +/// +/// When ignoring fields, a CacheEntry contains a vector of 3-tuples. Each +/// element in the vector represents one field in the corresponding LogEvent. +/// The tuples will each contain the field name, TypeId, and data as Bytes for +/// the corresponding field (in that order). Since the set of fields that might +/// go into CacheEntries is not known at startup, we must store the field names +/// as part of CacheEntries. Since Event objects store their field in alphabetic +/// order (as they are backed by a BTreeMap), and we build CacheEntries by +/// iterating over the fields of the incoming Events, we know that the +/// CacheEntries for 2 equivalent events will always contain the fields in the +/// same order. +#[derive(Clone, PartialEq, Eq, Hash)] +enum CacheEntry { + Match(Vec>), + Ignore(Vec<(OwnedTargetPath, TypeId, Bytes)>), +} + +/// Assigns a unique number to each of the types supported by Event::Value. +const fn type_id_for_value(val: &Value) -> TypeId { + match val { + Value::Bytes(_) => 0, + Value::Timestamp(_) => 1, + Value::Integer(_) => 2, + Value::Float(_) => 3, + Value::Boolean(_) => 4, + Value::Object(_) => 5, + Value::Array(_) => 6, + Value::Null => 7, + Value::Regex(_) => 8, + } +} + +impl Dedupe { + pub fn new(num_entries: NonZeroUsize, fields: FieldMatchConfig) -> Self { + Self { + fields, + cache: LruCache::new(num_entries), + } + } + + pub fn transform_one(&mut self, event: Event) -> Option { + let cache_entry = build_cache_entry(&event, &self.fields); + if self.cache.put(cache_entry, true).is_some() { + emit!(DedupeEventsDropped { count: 1 }); + None + } else { + Some(event) + } + } +} + +/// Takes in an Event and returns a CacheEntry to place into the LRU cache +/// containing all relevant information for the fields that need matching +/// against according to the specified FieldMatchConfig. +fn build_cache_entry(event: &Event, fields: &FieldMatchConfig) -> CacheEntry { + match &fields { + FieldMatchConfig::MatchFields(fields) => { + let mut entry = Vec::new(); + for field_name in fields.iter() { + if let Some(value) = event.as_log().get(field_name) { + entry.push(Some((type_id_for_value(value), value.coerce_to_bytes()))); + } else { + entry.push(None); + } + } + CacheEntry::Match(entry) + } + FieldMatchConfig::IgnoreFields(fields) => { + let mut entry = Vec::new(); + + if let Some(event_fields) = event.as_log().all_event_fields() { + if let Some(metadata_fields) = event.as_log().all_metadata_fields() { + for (field_name, value) in event_fields.chain(metadata_fields) { + if let Ok(path) = ConfigTargetPath::try_from(field_name) { + if !fields.contains(&path) { + entry.push(( + path.0, + type_id_for_value(value), + value.coerce_to_bytes(), + )); + } + } + } + } + } + + CacheEntry::Ignore(entry) + } + } +} + +impl TaskTransform for Dedupe { + fn transform( + self: Box, + task: Pin + Send>>, + ) -> Pin + Send>> + where + Self: 'static, + { + let mut inner = self; + Box::pin(task.filter_map(move |v| ready(inner.transform_one(v)))) + } +} diff --git a/src/transforms/mod.rs b/src/transforms/mod.rs index e90cb5fa10dcb..3d6b6d7448399 100644 --- a/src/transforms/mod.rs +++ b/src/transforms/mod.rs @@ -4,14 +4,13 @@ use std::collections::HashSet; use snafu::Snafu; +pub mod dedupe; pub mod sample; #[cfg(feature = "transforms-aggregate")] pub mod aggregate; #[cfg(feature = "transforms-aws_ec2_metadata")] pub mod aws_ec2_metadata; -#[cfg(feature = "transforms-dedupe")] -pub mod dedupe; #[cfg(feature = "transforms-filter")] pub mod filter; #[cfg(feature = "transforms-log_to_metric")] From f34738e6737e79f77dc6aa9aecb8d00430f64d99 Mon Sep 17 00:00:00 2001 From: neuronull Date: Mon, 4 Mar 2024 11:48:15 -0700 Subject: [PATCH 0089/1491] chore(ci): increase timeout for `cross` workflow (#20002) --- .github/workflows/cross.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/cross.yml b/.github/workflows/cross.yml index 20878c8c7d9f9..695f79d0a9ab3 100644 --- a/.github/workflows/cross.yml +++ b/.github/workflows/cross.yml @@ -7,7 +7,7 @@ jobs: cross-linux: name: Cross - ${{ matrix.target }} runs-on: ubuntu-latest - timeout-minutes: 30 + timeout-minutes: 45 env: CARGO_INCREMENTAL: 0 strategy: From a59aeb921bc93bc7590265f9e4335a8d824b95b4 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 5 Mar 2024 06:34:02 -0800 Subject: [PATCH 0090/1491] chore(deps): Update mio (#20005) Resolves RUSTSEC-2024-0019 Signed-off-by: Jesse Szwedko --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7fdc7b3879cfd..c9d9fc7b225ff 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5341,9 +5341,9 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.9" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" +checksum = "a4a650543ca06a924e8b371db273b2756685faae30f8487da1b56505a8f78b0c" dependencies = [ "libc", "log", From fa99d6c2cdc6457d6f70f00dccf8e03d57ffce3a Mon Sep 17 00:00:00 2001 From: neuronull Date: Tue, 5 Mar 2024 16:31:54 -0700 Subject: [PATCH 0091/1491] fix(splunk_hec_logs sink): don't remove timestamp for `raw` endpoint (#19975) * add fix and small refactor * fix compilation errors * 3 ticks * dont compute expected metrics in validator * cleanup * cleanup * clippy * feedback tz: sent_eventssssss * feedback tz: fix telemetry shutdown finishing logic * 3 ticks * small reorg to add sinks * mini refactor of the component spec validators * attempt to set expected values from the resource * feedback tz- from not try_from * back to 3 ticks * fix incorrect expected values * Even more reduction * clippy * add the discarded events total check * workaround the new sync issues * multi config support * cleanup * check events * partial feedback * thought i removed that * use ref * feedback: dont introduce PassThroughFail variant * feedback: adjust enum variant names for clarity * feedback: no idea what I was thinking with `input_codec` * spell check * fr * fix sync issues * remove unused enum variant * feedback- update docs * check_events * touchup * spell checker * merge leftover * chore(observability): extend component validation framework for more flexible test case coverage * save * save * feedback: log formating * feedback: add comment about rejected events * feedback- better approach to driving shutdown * give a generous timeout * fix bug in the sink * cleanup * changelog * feedback bg * simplify prelude --- Cargo.toml | 2 +- .../splunk_hec_logs_raw_timestamp.fix.md | 1 + src/components/validation/resources/http.rs | 6 ++ src/components/validation/runner/mod.rs | 2 +- src/sinks/prelude.rs | 6 +- src/sinks/splunk_hec/logs/config.rs | 89 +++++++++++++++---- src/sinks/splunk_hec/logs/sink.rs | 53 +++++------ .../components/sinks/splunk_hec_logs.yaml | 11 +++ 8 files changed, 120 insertions(+), 50 deletions(-) create mode 100644 changelog.d/splunk_hec_logs_raw_timestamp.fix.md create mode 100644 tests/validation/components/sinks/splunk_hec_logs.yaml diff --git a/Cargo.toml b/Cargo.toml index 6398ae4aad19f..6c23f2053c24a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -906,7 +906,7 @@ enterprise-tests = [ component-validation-runner = ["dep:tonic", "sources-internal_logs", "sources-internal_metrics", "sources-vector", "sinks-vector"] # For now, only include components that implement ValidatableComponent. # In the future, this can change to simply reference the targets `sources`, `transforms`, `sinks` -component-validation-tests = ["component-validation-runner", "sources-http_client", "sources-http_server", "sinks-http"] +component-validation-tests = ["component-validation-runner", "sources-http_client", "sources-http_server", "sinks-http", "sinks-splunk_hec"] # Grouping together features for benchmarks. We exclude the API client due to it causing the build process to run out # of memory when those additional dependencies are built in CI. diff --git a/changelog.d/splunk_hec_logs_raw_timestamp.fix.md b/changelog.d/splunk_hec_logs_raw_timestamp.fix.md new file mode 100644 index 0000000000000..b5b553d486380 --- /dev/null +++ b/changelog.d/splunk_hec_logs_raw_timestamp.fix.md @@ -0,0 +1 @@ +The `splunk_hec_logs` sink when configured with the `raw` endpoint target, was removing the timestamp from the event. This was due to a bug in the handling of the `auto_extract_timestamp` configuration option, which is only supposed to apply to the `event` endpoint target. diff --git a/src/components/validation/resources/http.rs b/src/components/validation/resources/http.rs index 8fbe651cb4f8a..4932a2ff456c9 100644 --- a/src/components/validation/resources/http.rs +++ b/src/components/validation/resources/http.rs @@ -284,6 +284,12 @@ fn spawn_output_http_server( // First, we'll build and spawn our HTTP server. let decoder = codec.into_decoder()?; + // Note that we currently don't differentiate which events should and shouldn't be rejected- + // we reject all events in this server if any are marked for rejection. + // In the future it might be useful to be able to select which to reject. That will involve + // adding logic to the test case which is passed down here, and to the event itself. Since + // we can't guarantee the order of events, we'd need a way to flag which ones need to be + // rejected. let should_reject = input_events.iter().filter(|te| te.should_reject()).count() > 0; let (_, http_server_shutdown_tx) = spawn_http_server( diff --git a/src/components/validation/runner/mod.rs b/src/components/validation/runner/mod.rs index 561f1eeec7533..76f88f2c03297 100644 --- a/src/components/validation/runner/mod.rs +++ b/src/components/validation/runner/mod.rs @@ -521,7 +521,7 @@ fn spawn_component_topology( let mut config = config_builder .build() .expect("config should not have any errors"); - config.healthchecks.set_require_healthy(Some(true)); + config.healthchecks.enabled = false; _ = std::thread::spawn(move || { let test_runtime = Builder::new_current_thread() diff --git a/src/sinks/prelude.rs b/src/sinks/prelude.rs index 1d197d7ccc80d..12ddbd7efd920 100644 --- a/src/sinks/prelude.rs +++ b/src/sinks/prelude.rs @@ -25,14 +25,10 @@ pub use vector_lib::{ pub use crate::{ codecs::{Encoder, EncodingConfig, Transformer}, - components::validation::{ - ExternalResource, HttpResourceConfig, ResourceDirection, ValidatableComponent, - ValidationConfiguration, - }, + components::validation::prelude::*, config::{DataType, GenerateConfig, SinkConfig, SinkContext}, event::{Event, LogEvent}, internal_events::{SinkRequestBuildError, TemplateRenderingError}, - register_validatable_component, sinks::{ util::{ builder::SinkBuilderExt, diff --git a/src/sinks/splunk_hec/logs/config.rs b/src/sinks/splunk_hec/logs/config.rs index 9670d928ff692..11f2a9d98bd01 100644 --- a/src/sinks/splunk_hec/logs/config.rs +++ b/src/sinks/splunk_hec/logs/config.rs @@ -1,36 +1,28 @@ use std::sync::Arc; -use futures_util::FutureExt; -use tower::ServiceBuilder; -use vector_lib::codecs::TextSerializerConfig; -use vector_lib::configurable::configurable_component; -use vector_lib::lookup::lookup_v2::{ConfigValuePath, OptionalTargetPath}; -use vector_lib::sensitive_string::SensitiveString; -use vector_lib::sink::VectorSink; +use vector_lib::{ + codecs::{JsonSerializerConfig, MetricTagValues, TextSerializerConfig}, + lookup::lookup_v2::{ConfigValuePath, OptionalTargetPath}, + sensitive_string::SensitiveString, +}; -use super::{encoder::HecLogsEncoder, request_builder::HecLogsRequestBuilder, sink::HecLogsSink}; -use crate::sinks::splunk_hec::common::config_timestamp_key_target_path; use crate::{ - codecs::{Encoder, EncodingConfig}, - config::{AcknowledgementsConfig, DataType, GenerateConfig, Input, SinkConfig, SinkContext}, http::HttpClient, sinks::{ + prelude::*, splunk_hec::common::{ acknowledgements::HecClientAcknowledgementsConfig, build_healthcheck, build_http_batch_service, config_host_key_target_path, - create_client, + config_timestamp_key_target_path, create_client, service::{HecService, HttpRequestBuilder}, EndpointTarget, SplunkHecDefaultBatchSettings, }, - util::{ - http::HttpRetryLogic, BatchConfig, Compression, ServiceBuilderExt, TowerRequestConfig, - }, - Healthcheck, + util::http::HttpRetryLogic, }, - template::Template, - tls::TlsConfig, }; +use super::{encoder::HecLogsEncoder, request_builder::HecLogsRequestBuilder, sink::HecLogsSink}; + /// Configuration for the `splunk_hec_logs` sink. #[configurable_component(sink( "splunk_hec_logs", @@ -288,6 +280,67 @@ impl HecLogsSinkConfig { } } +impl ValidatableComponent for HecLogsSinkConfig { + fn validation_configuration() -> ValidationConfiguration { + let endpoint = "http://127.0.0.1:9001".to_string(); + + let mut batch = BatchConfig::default(); + batch.max_events = Some(1); + + let config = Self { + endpoint: endpoint.clone(), + default_token: "i_am_an_island".to_string().into(), + host_key: config_host_key_target_path(), + indexed_fields: vec![], + index: None, + sourcetype: None, + source: None, + encoding: EncodingConfig::new( + JsonSerializerConfig::new(MetricTagValues::Full).into(), + Transformer::default(), + ), + compression: Compression::default(), + batch, + request: TowerRequestConfig { + timeout_secs: 2, + retry_attempts: 0, + ..Default::default() + }, + tls: None, + acknowledgements: HecClientAcknowledgementsConfig { + indexer_acknowledgements_enabled: false, + ..Default::default() + }, + timestamp_nanos_key: None, + timestamp_key: config_timestamp_key_target_path(), + auto_extract_timestamp: None, + endpoint_target: EndpointTarget::Raw, + }; + + let endpoint = format!("{endpoint}/services/collector/raw"); + + let external_resource = ExternalResource::new( + ResourceDirection::Push, + HttpResourceConfig::from_parts( + http::Uri::try_from(&endpoint).expect("should not fail to parse URI"), + None, + ), + config.encoding.clone(), + ); + + ValidationConfiguration::from_sink( + Self::NAME, + vec![ComponentTestCaseConfig::from_sink( + config, + None, + Some(external_resource), + )], + ) + } +} + +register_validatable_component!(HecLogsSinkConfig); + #[cfg(test)] mod tests { use super::HecLogsSinkConfig; diff --git a/src/sinks/splunk_hec/logs/sink.rs b/src/sinks/splunk_hec/logs/sink.rs index ed74b57778fc2..898b4b5d9cb66 100644 --- a/src/sinks/splunk_hec/logs/sink.rs +++ b/src/sinks/splunk_hec/logs/sink.rs @@ -1,7 +1,5 @@ use std::{fmt, sync::Arc}; -use serde::Serialize; - use super::request_builder::HecLogsRequestBuilder; use crate::{ internal_events::SplunkEventTimestampInvalidType, @@ -195,9 +193,8 @@ impl Partitioner for EventPartitioner { } } -#[derive(PartialEq, Default, Clone, Debug, Serialize)] +#[derive(PartialEq, Default, Clone, Debug)] pub struct HecLogsProcessedEventMetadata { - pub event_byte_size: usize, pub sourcetype: Option, pub source: Option, pub index: Option, @@ -220,7 +217,6 @@ impl ByteSizeOf for HecLogsProcessedEventMetadata { pub type HecProcessedEvent = ProcessedEvent; pub fn process_log(event: Event, data: &HecLogData) -> HecProcessedEvent { - let event_byte_size = event.size_of(); let mut log = event.into_log(); let sourcetype = data @@ -237,27 +233,35 @@ pub fn process_log(event: Event, data: &HecLogData) -> HecProcessedEvent { let host = data.host_key.as_ref().and_then(|key| log.get(key)).cloned(); - let timestamp = data.timestamp_key.as_ref().and_then(|timestamp_key| { - match log.remove(timestamp_key) { - Some(Value::Timestamp(ts)) => { - // set nanos in log if valid timestamp in event and timestamp_nanos_key is configured - if let Some(key) = data.timestamp_nanos_key { - log.try_insert(event_path!(key), ts.timestamp_subsec_nanos() % 1_000_000); + let timestamp = match data.endpoint_target { + EndpointTarget::Event => { + data.timestamp_key.as_ref().and_then(|timestamp_key| { + match log.remove(timestamp_key) { + Some(Value::Timestamp(ts)) => { + // set nanos in log if valid timestamp in event and timestamp_nanos_key is configured + if let Some(key) = data.timestamp_nanos_key { + log.try_insert( + event_path!(key), + ts.timestamp_subsec_nanos() % 1_000_000, + ); + } + Some((ts.timestamp_millis() as f64) / 1000f64) + } + Some(value) => { + emit!(SplunkEventTimestampInvalidType { + r#type: value.kind_str() + }); + None + } + None => { + emit!(SplunkEventTimestampMissing {}); + None + } } - Some((ts.timestamp_millis() as f64) / 1000f64) - } - Some(value) => { - emit!(SplunkEventTimestampInvalidType { - r#type: value.kind_str() - }); - None - } - None => { - emit!(SplunkEventTimestampMissing {}); - None - } + }) } - }); + EndpointTarget::Raw => None, + }; let fields = data .indexed_fields @@ -269,7 +273,6 @@ pub fn process_log(event: Event, data: &HecLogData) -> HecProcessedEvent { .collect::(); let metadata = HecLogsProcessedEventMetadata { - event_byte_size, sourcetype, source, index, diff --git a/tests/validation/components/sinks/splunk_hec_logs.yaml b/tests/validation/components/sinks/splunk_hec_logs.yaml new file mode 100644 index 0000000000000..cf0f912d160a2 --- /dev/null +++ b/tests/validation/components/sinks/splunk_hec_logs.yaml @@ -0,0 +1,11 @@ +- name: happy path + expectation: success + events: + - log: simple message 1 + - log: simple message 2 + - log: simple message 3 +- name: sad path + expectation: failure + events: + - external_resource_rejects: + log: simple message downstream rejects From 9bf1872fa88ae94e99e01d87696ed294ad4a3da0 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Wed, 6 Mar 2024 06:45:13 -0800 Subject: [PATCH 0092/1491] chore(deps): Update lockfree-object-pool to 0.1.5 (#20001) We think this will fix #19627. Signed-off-by: Jesse Szwedko --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c9d9fc7b225ff..74c73348a1bf2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5032,9 +5032,9 @@ dependencies = [ [[package]] name = "lockfree-object-pool" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee33defb27b106378a6efcfcde4dda6226dfdac8ba7a2904f5bc93363cb88557" +checksum = "3a69c0481fc2424cb55795de7da41add33372ea75a94f9b6588ab6a2826dfebc" [[package]] name = "log" From 3b6066d9f93e753c0c4989173eaced46b1d2c519 Mon Sep 17 00:00:00 2001 From: Bruce Guenter Date: Wed, 6 Mar 2024 08:47:55 -0600 Subject: [PATCH 0093/1491] chore(core): Remove optionality from topology controller reload (#20010) The `TopologyController::reload` function took a configuration parameter to load as `Option`. However, if the config was `None`, it would immediately return with `ReloadOutcome::NoConfig`, which was the also only place that return value could be generated. Instead, this moves the handling of missing configurations to the two callers, simplifying the reload semantics. --- src/app.rs | 38 +++++++++++++++++++++++--------------- src/topology/controller.rs | 13 ++----------- 2 files changed, 25 insertions(+), 26 deletions(-) diff --git a/src/app.rs b/src/app.rs index 8cb968145b1d6..495e3d743c046 100644 --- a/src/app.rs +++ b/src/app.rs @@ -7,7 +7,7 @@ use futures::StreamExt; use futures_util::future::BoxFuture; use once_cell::race::OnceNonZeroUsize; use tokio::runtime::{self, Runtime}; -use tokio::sync::broadcast::error::RecvError; +use tokio::sync::{broadcast::error::RecvError, MutexGuard}; use tokio_stream::wrappers::UnboundedReceiverStream; #[cfg(feature = "enterprise")] @@ -22,7 +22,7 @@ use crate::{ cli::{handle_config_errors, LogFormat, Opts, RootOpts}, config::{self, Config, ConfigPath}, heartbeat, - internal_events::{VectorQuit, VectorStarted, VectorStopped}, + internal_events::{VectorConfigLoadError, VectorQuit, VectorStarted, VectorStopped}, signal::{SignalHandler, SignalPair, SignalRx, SignalTo}, topology::{ ReloadOutcome, RunningTopology, SharedTopologyController, ShutdownErrorReceiver, @@ -340,12 +340,8 @@ async fn handle_signal( ) -> Option { match signal { Ok(SignalTo::ReloadFromConfigBuilder(config_builder)) => { - let mut topology_controller = topology_controller.lock().await; - let new_config = config_builder.build().map_err(handle_config_errors).ok(); - match topology_controller.reload(new_config).await { - ReloadOutcome::FatalError(error) => Some(SignalTo::Shutdown(Some(error))), - _ => None, - } + let topology_controller = topology_controller.lock().await; + reload_config_from_result(topology_controller, config_builder.build()).await } Ok(SignalTo::ReloadFromDisk) => { let mut topology_controller = topology_controller.lock().await; @@ -361,14 +357,9 @@ async fn handle_signal( signal_handler, allow_empty_config, ) - .await - .map_err(handle_config_errors) - .ok(); + .await; - match topology_controller.reload(new_config).await { - ReloadOutcome::FatalError(error) => Some(SignalTo::Shutdown(Some(error))), - _ => None, - } + reload_config_from_result(topology_controller, new_config).await } Err(RecvError::Lagged(amt)) => { warn!("Overflow, dropped {} signals.", amt); @@ -379,6 +370,23 @@ async fn handle_signal( } } +async fn reload_config_from_result( + mut topology_controller: MutexGuard<'_, TopologyController>, + config: Result>, +) -> Option { + match config { + Ok(new_config) => match topology_controller.reload(new_config).await { + ReloadOutcome::FatalError(error) => Some(SignalTo::Shutdown(Some(error))), + _ => None, + }, + Err(errors) => { + handle_config_errors(errors); + emit!(VectorConfigLoadError); + None + } + } +} + pub struct FinishedApplication { pub signal: SignalTo, pub signal_rx: SignalRx, diff --git a/src/topology/controller.rs b/src/topology/controller.rs index bc8f87c7de331..7468640351380 100644 --- a/src/topology/controller.rs +++ b/src/topology/controller.rs @@ -13,9 +13,7 @@ use crate::config::enterprise::{ report_on_reload, EnterpriseError, EnterpriseMetadata, EnterpriseReporter, }; use crate::extra_context::ExtraContext; -use crate::internal_events::{ - VectorConfigLoadError, VectorRecoveryError, VectorReloadError, VectorReloaded, -}; +use crate::internal_events::{VectorRecoveryError, VectorReloadError, VectorReloaded}; use crate::{config, signal::ShutdownError, topology::RunningTopology}; @@ -58,7 +56,6 @@ impl std::fmt::Debug for TopologyController { #[derive(Clone, Debug)] pub enum ReloadOutcome { - NoConfig, MissingApiKey, Success, RolledBack, @@ -66,13 +63,7 @@ pub enum ReloadOutcome { } impl TopologyController { - pub async fn reload(&mut self, new_config: Option) -> ReloadOutcome { - if new_config.is_none() { - emit!(VectorConfigLoadError); - return ReloadOutcome::NoConfig; - } - let mut new_config = new_config.unwrap(); - + pub async fn reload(&mut self, mut new_config: config::Config) -> ReloadOutcome { new_config .healthchecks .set_require_healthy(self.require_healthy); From d75f74cd9f28621f676e5c93aefbdccd279662af Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 14:54:08 +0000 Subject: [PATCH 0094/1491] chore(deps): Bump cargo_toml from 0.19.1 to 0.19.2 (#20007) Bumps [cargo_toml](https://gitlab.com/lib.rs/cargo_toml) from 0.19.1 to 0.19.2. - [Commits](https://gitlab.com/lib.rs/cargo_toml/commits/v0.19.2) --- updated-dependencies: - dependency-name: cargo_toml dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- lib/vector-vrl/web-playground/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 74c73348a1bf2..9878215b2d50d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1748,9 +1748,9 @@ checksum = "ade8366b8bd5ba243f0a58f036cc0ca8a2f069cff1a2351ef1cac6b083e16fc0" [[package]] name = "cargo_toml" -version = "0.19.1" +version = "0.19.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3dc9f7a067415ab5058020f04c60ec7b557084dbec0e021217bbabc7a8d38d14" +checksum = "a98356df42a2eb1bd8f1793ae4ee4de48e384dd974ce5eac8eee802edb7492be" dependencies = [ "serde", "toml", diff --git a/lib/vector-vrl/web-playground/Cargo.toml b/lib/vector-vrl/web-playground/Cargo.toml index 6260a58f0d09e..a613d72cd329d 100644 --- a/lib/vector-vrl/web-playground/Cargo.toml +++ b/lib/vector-vrl/web-playground/Cargo.toml @@ -19,4 +19,4 @@ vector-vrl-functions = { path = "../functions" } enrichment = { path = "../../enrichment" } [build-dependencies] -cargo_toml = "0.19.1" +cargo_toml = "0.19.2" From c1141f9288007ec79c140d551a5ddfef483c40c5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 14:54:33 +0000 Subject: [PATCH 0095/1491] chore(deps): Bump wasm-bindgen from 0.2.91 to 0.2.92 (#20009) Bumps [wasm-bindgen](https://github.com/rustwasm/wasm-bindgen) from 0.2.91 to 0.2.92. - [Release notes](https://github.com/rustwasm/wasm-bindgen/releases) - [Changelog](https://github.com/rustwasm/wasm-bindgen/blob/main/CHANGELOG.md) - [Commits](https://github.com/rustwasm/wasm-bindgen/compare/0.2.91...0.2.92) --- updated-dependencies: - dependency-name: wasm-bindgen dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9878215b2d50d..60342fddcba43 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10723,9 +10723,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.91" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1e124130aee3fb58c5bdd6b639a0509486b0338acaaae0c84a5124b0f588b7f" +checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -10733,9 +10733,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.91" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9e7e1900c352b609c8488ad12639a311045f40a35491fb69ba8c12f758af70b" +checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" dependencies = [ "bumpalo", "log", @@ -10760,9 +10760,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.91" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b30af9e2d358182b5c7449424f017eba305ed32a7010509ede96cdc4696c46ed" +checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" dependencies = [ "quote 1.0.35", "wasm-bindgen-macro-support", @@ -10770,9 +10770,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.91" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "642f325be6301eb8107a83d12a8ac6c1e1c54345a7ef1a9261962dfefda09e66" +checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", @@ -10783,9 +10783,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.91" +version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f186bd2dcf04330886ce82d6f33dd75a7bfcf69ecf5763b89fcde53b6ac9838" +checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" [[package]] name = "wasm-streams" From cbebdb2689600b8515dc34430703c8281cf7caa0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Mar 2024 14:54:43 +0000 Subject: [PATCH 0096/1491] chore(deps): Bump pin-project from 1.1.4 to 1.1.5 (#20015) Bumps [pin-project](https://github.com/taiki-e/pin-project) from 1.1.4 to 1.1.5. - [Release notes](https://github.com/taiki-e/pin-project/releases) - [Changelog](https://github.com/taiki-e/pin-project/blob/main/CHANGELOG.md) - [Commits](https://github.com/taiki-e/pin-project/compare/v1.1.4...v1.1.5) --- updated-dependencies: - dependency-name: pin-project dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- Cargo.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 60342fddcba43..4cb200070130a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6397,18 +6397,18 @@ dependencies = [ [[package]] name = "pin-project" -version = "1.1.4" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0302c4a0442c456bd56f841aee5c3bfd17967563f6fadc9ceb9f9c23cf3807e0" +checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.4" +version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "266c042b60c9c76b8d53061e52b2e0d1116abc57cefc8c5cd671619a56ac3690" +checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", diff --git a/Cargo.toml b/Cargo.toml index 6c23f2053c24a..3a137adb1bf47 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -129,7 +129,7 @@ members = [ chrono = { version = "0.4.34", default-features = false, features = ["clock", "serde"] } clap = { version = "4.5.1", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } indexmap = { version = "2.2.5", default-features = false, features = ["serde", "std"] } -pin-project = { version = "1.1.4", default-features = false } +pin-project = { version = "1.1.5", default-features = false } proptest = "1.4" proptest-derive = "0.4.0" serde_json = { version = "1.0.114", default-features = false, features = ["raw_value", "std"] } From 8db6288b4cc2ecf070649e0dc53879f267f41c32 Mon Sep 17 00:00:00 2001 From: neuronull Date: Wed, 6 Mar 2024 09:02:25 -0700 Subject: [PATCH 0097/1491] fix(splunk_hec source): calculate `EstimatedJsonSizeOf` for `component_received_event_bytes_total` before enrichment (#19942) * add fix and small refactor * fix compilation errors * 3 ticks * dont compute expected metrics in validator * cleanup * cleanup * clippy * feedback tz: sent_eventssssss * feedback tz: fix telemetry shutdown finishing logic * 3 ticks * small reorg to add sinks * mini refactor of the component spec validators * attempt to set expected values from the resource * feedback tz- from not try_from * back to 3 ticks * fix incorrect expected values * Even more reduction * clippy * add the discarded events total check * workaround the new sync issues * multi config support * cleanup * check events * partial feedback * thought i removed that * use ref * feedback: dont introduce PassThroughFail variant * feedback: adjust enum variant names for clarity * feedback: no idea what I was thinking with `input_codec` * spell check * fr * fix sync issues * remove unused enum variant * feedback- update docs * check_events * touchup * spell checker * merge leftover * chore(observability): extend component validation framework for more flexible test case coverage * fix(splunk_hec source): calculate EstimatedJsonSizeOf for component_received_event_bytes_total before enrichment * feedback bg * feedback: log formating * feedback- better approach to driving shutdown * give a generous timeout * feedback --- Cargo.toml | 9 +- .../splunk_hec_received_event_bytes.fix.md | 3 + src/sources/splunk_hec/mod.rs | 150 +++++++++++++----- .../components/sources/splunk_hec.yaml | 19 +++ 4 files changed, 137 insertions(+), 44 deletions(-) create mode 100644 changelog.d/splunk_hec_received_event_bytes.fix.md create mode 100644 tests/validation/components/sources/splunk_hec.yaml diff --git a/Cargo.toml b/Cargo.toml index 3a137adb1bf47..805cc1ecefb48 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -906,7 +906,14 @@ enterprise-tests = [ component-validation-runner = ["dep:tonic", "sources-internal_logs", "sources-internal_metrics", "sources-vector", "sinks-vector"] # For now, only include components that implement ValidatableComponent. # In the future, this can change to simply reference the targets `sources`, `transforms`, `sinks` -component-validation-tests = ["component-validation-runner", "sources-http_client", "sources-http_server", "sinks-http", "sinks-splunk_hec"] +component-validation-tests = [ + "component-validation-runner", + "sources-http_client", + "sources-http_server", + "sinks-http", + "sinks-splunk_hec", + "sources-splunk_hec" +] # Grouping together features for benchmarks. We exclude the API client due to it causing the build process to run out # of memory when those additional dependencies are built in CI. diff --git a/changelog.d/splunk_hec_received_event_bytes.fix.md b/changelog.d/splunk_hec_received_event_bytes.fix.md new file mode 100644 index 0000000000000..842b2abefd507 --- /dev/null +++ b/changelog.d/splunk_hec_received_event_bytes.fix.md @@ -0,0 +1,3 @@ +We now correctly calculate the estimated JSON size in bytes for the metric `component_received_event_bytes_total` for the `splunk_hec` source. + +Previously this was being calculated after event enrichment. It is now calculated before enrichment, for both `raw` and `event` endpoints. diff --git a/src/sources/splunk_hec/mod.rs b/src/sources/splunk_hec/mod.rs index b5db6414e3c27..6fe435fc97a47 100644 --- a/src/sources/splunk_hec/mod.rs +++ b/src/sources/splunk_hec/mod.rs @@ -11,18 +11,27 @@ use bytes::{Buf, Bytes}; use chrono::{DateTime, TimeZone, Utc}; use flate2::read::MultiGzDecoder; use futures::FutureExt; -use http::StatusCode; +use http::{StatusCode, Uri}; use hyper::{service::make_service_fn, Server}; use serde::Serialize; -use serde_json::{de::Read as JsonRead, Deserializer, Value as JsonValue}; +use serde_json::{ + de::{Read as JsonRead, StrRead}, + Deserializer, Value as JsonValue, +}; use snafu::Snafu; use tokio::net::TcpStream; use tower::ServiceBuilder; use tracing::Span; -use vector_lib::internal_event::{CountByteSize, InternalEventHandle as _, Registered}; use vector_lib::lookup::lookup_v2::OptionalValuePath; -use vector_lib::lookup::{self, event_path, owned_value_path}; use vector_lib::sensitive_string::SensitiveString; +use vector_lib::{ + codecs::decoding::DeserializerConfig, + lookup::{self, event_path, owned_value_path}, +}; +use vector_lib::{ + codecs::BytesDecoderConfig, + internal_event::{CountByteSize, InternalEventHandle as _, Registered}, +}; use vector_lib::{ config::{LegacyKey, LogNamespace}, event::BatchNotifier, @@ -41,6 +50,8 @@ use self::{ splunk_response::{HecResponse, HecResponseMetadata, HecStatusCode}, }; use crate::{ + codecs::DecodingConfig, + components::validation::prelude::*, config::{log_schema, DataType, Resource, SourceConfig, SourceContext, SourceOutput}, event::{Event, LogEvent, Value}, http::{build_http_trace_layer, KeepaliveConfig, MaxConnectionAgeLayer}, @@ -61,6 +72,8 @@ pub const INDEX: &str = "splunk_index"; pub const SOURCE: &str = "splunk_source"; pub const SOURCETYPE: &str = "splunk_sourcetype"; +const X_SPLUNK_REQUEST_CHANNEL: &str = "x-splunk-request-channel"; + /// Configuration for the `splunk_hec` source. #[configurable_component(source("splunk_hec", "Receive logs from Splunk."))] #[derive(Clone, Debug)] @@ -319,7 +332,7 @@ impl SplunkSource { fn event_service(&self, out: SourceSender) -> BoxedFilter<(Response,)> { let splunk_channel_query_param = warp::query::>() .map(|qs: HashMap| qs.get("channel").map(|v| v.to_owned())); - let splunk_channel_header = warp::header::optional::("x-splunk-request-channel"); + let splunk_channel_header = warp::header::optional::(X_SPLUNK_REQUEST_CHANNEL); let splunk_channel = splunk_channel_header .and(splunk_channel_query_param) @@ -349,7 +362,7 @@ impl SplunkSource { token: Option, channel: Option, remote: Option, - xff: Option, + remote_addr: Option, gzip: bool, body: Bytes, path: warp::path::FullPath| { @@ -391,15 +404,19 @@ impl SplunkSource { let mut error = None; let mut events = Vec::new(); - let iter = EventIterator::new( - Deserializer::from_str(&body).into_iter::(), + + let iter: EventIterator<'_, StrRead<'_>> = EventIteratorGenerator { + deserializer: Deserializer::from_str(&body).into_iter::(), channel, remote, - xff, + remote_addr, batch, - token.filter(|_| store_hec_token).map(Into::into), + token: token.filter(|_| store_hec_token).map(Into::into), log_namespace, - ); + events_received, + } + .into(); + for result in iter { match result { Ok(event) => events.push(event), @@ -411,11 +428,6 @@ impl SplunkSource { } if !events.is_empty() { - events_received.emit(CountByteSize( - events.len(), - events.estimated_json_encoded_size_of(), - )); - if let Err(ClosedError) = out.send_batch(events).await { return Err(Rejection::from(ApiError::ServerShutdown)); } @@ -607,7 +619,7 @@ impl SplunkSource { fn required_channel() -> BoxedFilter<(String,)> { let splunk_channel_query_param = warp::query::>() .map(|qs: HashMap| qs.get("channel").map(|v| v.to_owned())); - let splunk_channel_header = warp::header::optional::("x-splunk-request-channel"); + let splunk_channel_header = warp::header::optional::(X_SPLUNK_REQUEST_CHANNEL); splunk_channel_header .and(splunk_channel_query_param) @@ -638,22 +650,28 @@ struct EventIterator<'de, R: JsonRead<'de>> { token: Option>, /// Lognamespace to put the events in log_namespace: LogNamespace, + /// handle to EventsReceived registry + events_received: Registered, } -impl<'de, R: JsonRead<'de>> EventIterator<'de, R> { - fn new( - deserializer: serde_json::StreamDeserializer<'de, R, JsonValue>, - channel: Option, - remote: Option, - remote_addr: Option, - batch: Option, - token: Option>, - log_namespace: LogNamespace, - ) -> Self { - EventIterator { - deserializer, +/// Intermediate struct to generate an `EventIterator` +struct EventIteratorGenerator<'de, R: JsonRead<'de>> { + deserializer: serde_json::StreamDeserializer<'de, R, JsonValue>, + channel: Option, + batch: Option, + token: Option>, + log_namespace: LogNamespace, + events_received: Registered, + remote: Option, + remote_addr: Option, +} + +impl<'de, R: JsonRead<'de>> From> for EventIterator<'de, R> { + fn from(f: EventIteratorGenerator<'de, R>) -> Self { + Self { + deserializer: f.deserializer, events: 0, - channel: channel.map(Value::from), + channel: f.channel.map(Value::from), time: Time::Now(Utc::now()), extractors: [ // Extract the host field with the given priority: @@ -663,25 +681,28 @@ impl<'de, R: JsonRead<'de>> EventIterator<'de, R> { DefaultExtractor::new_with( "host", log_schema().host_key().cloned().into(), - remote_addr - .or_else(|| remote.map(|addr| addr.to_string())) + f.remote_addr + .or_else(|| f.remote.map(|addr| addr.to_string())) .map(Value::from), - log_namespace, + f.log_namespace, ), - DefaultExtractor::new("index", OptionalValuePath::new(INDEX), log_namespace), - DefaultExtractor::new("source", OptionalValuePath::new(SOURCE), log_namespace), + DefaultExtractor::new("index", OptionalValuePath::new(INDEX), f.log_namespace), + DefaultExtractor::new("source", OptionalValuePath::new(SOURCE), f.log_namespace), DefaultExtractor::new( "sourcetype", OptionalValuePath::new(SOURCETYPE), - log_namespace, + f.log_namespace, ), ], - batch, - token, - log_namespace, + batch: f.batch, + token: f.token, + log_namespace: f.log_namespace, + events_received: f.events_received, } } +} +impl<'de, R: JsonRead<'de>> EventIterator<'de, R> { fn build_event(&mut self, mut json: JsonValue) -> Result { // Construct Event from parsed json event let mut log = match self.log_namespace { @@ -803,6 +824,10 @@ impl<'de, R: JsonRead<'de>> EventIterator<'de, R> { let event: Value = event.into(); let mut log = LogEvent::from(event); + // EstimatedJsonSizeOf must be calculated before enrichment + self.events_received + .emit(CountByteSize(1, log.estimated_json_encoded_size_of())); + // The timestamp is extracted from the message for the Legacy namespace. self.log_namespace.insert_vector_metadata( &mut log, @@ -857,6 +882,11 @@ impl<'de, R: JsonRead<'de>> EventIterator<'de, R> { }, None => return Err(ApiError::MissingEventField { event: self.events }.into()), }; + + // EstimatedJsonSizeOf must be calculated before enrichment + self.events_received + .emit(CountByteSize(1, log.estimated_json_encoded_size_of())); + Ok(log) } } @@ -1022,6 +1052,8 @@ fn raw_event( log } }; + // We need to calculate the estimated json size of the event BEFORE enrichment. + events_received.emit(CountByteSize(1, log.estimated_json_encoded_size_of())); // Add channel log_namespace.insert_source_metadata( @@ -1057,10 +1089,7 @@ fn raw_event( log = log.with_batch_notifier(&batch); } - let event = Event::from(log); - events_received.emit(CountByteSize(1, event.estimated_json_encoded_size_of())); - - Ok(event) + Ok(Event::from(log)) } #[derive(Clone, Copy, Debug, Snafu)] @@ -1220,6 +1249,41 @@ fn response_json(code: StatusCode, body: impl Serialize) -> Response { warp::reply::with_status(warp::reply::json(&body), code).into_response() } +impl ValidatableComponent for SplunkConfig { + fn validation_configuration() -> ValidationConfiguration { + let config = Self { + address: default_socket_address(), + ..Default::default() + }; + + let listen_addr_http = format!("http://{}/services/collector/event", config.address); + let uri = Uri::try_from(&listen_addr_http).expect("should not fail to parse URI"); + + let framing = BytesDecoderConfig::new().into(); + let decoding = DeserializerConfig::Json(Default::default()); + + let external_resource = ExternalResource::new( + ResourceDirection::Push, + HttpResourceConfig::from_parts(uri, None).with_headers(HashMap::from([( + X_SPLUNK_REQUEST_CHANNEL.to_string(), + "channel".to_string(), + )])), + DecodingConfig::new(framing, decoding, false.into()), + ); + + ValidationConfiguration::from_source( + Self::NAME, + vec![ComponentTestCaseConfig::from_source( + config, + None, + Some(external_resource), + )], + ) + } +} + +register_validatable_component!(SplunkConfig); + #[cfg(feature = "sinks-splunk_hec")] #[cfg(test)] mod tests { diff --git a/tests/validation/components/sources/splunk_hec.yaml b/tests/validation/components/sources/splunk_hec.yaml new file mode 100644 index 0000000000000..2aa081aa4b58c --- /dev/null +++ b/tests/validation/components/sources/splunk_hec.yaml @@ -0,0 +1,19 @@ +- name: happy path + expectation: success + events: + - log_builder: + event: simple message 1 + - log_builder: + event: simple message 2 + - log_builder: + event: simple message 3 +- name: sad path + expectation: partial_success + events: + - log_builder: + event: simple message 1 + - log_builder: + event: simple message 2 + - fail_encoding_of: + log_builder: + event: "simple message with wrong encoding" From eb3099657f53c8de5584b20fbb68f05c342f93c7 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Wed, 6 Mar 2024 09:13:03 -0800 Subject: [PATCH 0098/1491] chore(ci): Use gzip compression for datadog_logs regression tests (#20020) This seems to be closer to what real-world usage would look like given customers would likely want to compress egress traffic to reduce bandwidth costs. Signed-off-by: Jesse Szwedko --- .../cases/datadog_agent_remap_datadog_logs/vector/vector.yaml | 1 + .../datadog_agent_remap_datadog_logs_acks/vector/vector.yaml | 1 + 2 files changed, 2 insertions(+) diff --git a/regression/cases/datadog_agent_remap_datadog_logs/vector/vector.yaml b/regression/cases/datadog_agent_remap_datadog_logs/vector/vector.yaml index 4691ea851f14c..11e10dfb2f902 100644 --- a/regression/cases/datadog_agent_remap_datadog_logs/vector/vector.yaml +++ b/regression/cases/datadog_agent_remap_datadog_logs/vector/vector.yaml @@ -42,6 +42,7 @@ sinks: inputs: [ "parse_message" ] endpoint: "http://localhost:8080" default_api_key: "DEADBEEF" + compression: "gzip" healthcheck: enabled: false buffer: diff --git a/regression/cases/datadog_agent_remap_datadog_logs_acks/vector/vector.yaml b/regression/cases/datadog_agent_remap_datadog_logs_acks/vector/vector.yaml index 2e9d42fd6db20..e55ba16bcf509 100644 --- a/regression/cases/datadog_agent_remap_datadog_logs_acks/vector/vector.yaml +++ b/regression/cases/datadog_agent_remap_datadog_logs_acks/vector/vector.yaml @@ -42,6 +42,7 @@ sinks: inputs: [ "parse_message" ] endpoint: "http://localhost:8080" default_api_key: "DEADBEEF" + compression: "gzip" healthcheck: enabled: false buffer: From ea377f007e0657d65915f90b46e602ad6a149708 Mon Sep 17 00:00:00 2001 From: neuronull Date: Wed, 6 Mar 2024 11:54:40 -0700 Subject: [PATCH 0099/1491] chore(observability): add component spec validation tests for `datadog_logs` sink (#19887) * chore(observability): add happy path component spec validation tests for `datadog_logs` sink * add sad path case --- Cargo.toml | 3 +- src/components/validation/runner/mod.rs | 4 ++ src/sinks/datadog/logs/config.rs | 66 +++++++++++++++---- src/sources/http_client/client.rs | 3 +- src/sources/http_server.rs | 3 +- .../components/sinks/datadog_logs.yaml | 11 ++++ 6 files changed, 74 insertions(+), 16 deletions(-) create mode 100644 tests/validation/components/sinks/datadog_logs.yaml diff --git a/Cargo.toml b/Cargo.toml index 805cc1ecefb48..a0567530629cf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -912,7 +912,8 @@ component-validation-tests = [ "sources-http_server", "sinks-http", "sinks-splunk_hec", - "sources-splunk_hec" + "sources-splunk_hec", + "sinks-datadog_logs", ] # Grouping together features for benchmarks. We exclude the API client due to it causing the build process to run out diff --git a/src/components/validation/runner/mod.rs b/src/components/validation/runner/mod.rs index 76f88f2c03297..704444ded4c21 100644 --- a/src/components/validation/runner/mod.rs +++ b/src/components/validation/runner/mod.rs @@ -521,6 +521,10 @@ fn spawn_component_topology( let mut config = config_builder .build() .expect("config should not have any errors"); + + // It's possible we could extend the framework to allow specifying logic to + // handle that, but I don't see much value currently since the healthcheck is + // not enforced for components, and it doesn't impact the internal telemetry. config.healthchecks.enabled = false; _ = std::thread::spawn(move || { diff --git a/src/sinks/datadog/logs/config.rs b/src/sinks/datadog/logs/config.rs index 0163b9f6633a0..fe2f5f8306f44 100644 --- a/src/sinks/datadog/logs/config.rs +++ b/src/sinks/datadog/logs/config.rs @@ -2,28 +2,30 @@ use std::{convert::TryFrom, sync::Arc}; use indoc::indoc; use tower::ServiceBuilder; -use vector_lib::configurable::configurable_component; -use vector_lib::{config::proxy::ProxyConfig, schema::meaning}; + +use vector_lib::{ + codecs::{JsonSerializerConfig, MetricTagValues}, + config::proxy::ProxyConfig, + configurable::configurable_component, + schema::meaning, +}; use vrl::value::Kind; -use super::{service::LogApiRetry, sink::LogSinkBuilder}; -use crate::common::datadog; use crate::{ - codecs::Transformer, - config::{AcknowledgementsConfig, GenerateConfig, Input, SinkConfig, SinkContext}, + codecs::EncodingConfigWithFraming, + common::datadog, http::HttpClient, schema, sinks::{ datadog::{logs::service::LogApiService, DatadogCommonConfig, LocalDatadogCommonConfig}, - util::{ - http::RequestConfig, service::ServiceBuilderExt, BatchConfig, Compression, - SinkBatchSettings, - }, - Healthcheck, VectorSink, + prelude::*, + util::http::RequestConfig, }, tls::{MaybeTlsSettings, TlsEnableableConfig}, }; +use super::{service::LogApiRetry, sink::LogSinkBuilder}; + // The Datadog API has a hard limit of 5MB for uncompressed payloads. Above this // threshold the API will toss results. We previously serialized Events as they // came in -- a very CPU intensive process -- and to avoid that we only batch up @@ -182,6 +184,48 @@ impl SinkConfig for DatadogLogsConfig { } } +impl ValidatableComponent for DatadogLogsConfig { + fn validation_configuration() -> ValidationConfiguration { + let endpoint = "http://127.0.0.1:9005".to_string(); + let config = Self { + local_dd_common: LocalDatadogCommonConfig { + endpoint: Some(endpoint.clone()), + default_api_key: Some("unused".to_string().into()), + ..Default::default() + }, + ..Default::default() + }; + + let encoding = EncodingConfigWithFraming::new( + None, + JsonSerializerConfig::new(MetricTagValues::Full).into(), + config.encoding.clone(), + ); + + let logs_endpoint = format!("{endpoint}/api/v2/logs"); + + let external_resource = ExternalResource::new( + ResourceDirection::Push, + HttpResourceConfig::from_parts( + http::Uri::try_from(&logs_endpoint).expect("should not fail to parse URI"), + None, + ), + encoding, + ); + + ValidationConfiguration::from_sink( + Self::NAME, + vec![ComponentTestCaseConfig::from_sink( + config, + None, + Some(external_resource), + )], + ) + } +} + +register_validatable_component!(DatadogLogsConfig); + #[cfg(test)] mod test { use super::super::config::DatadogLogsConfig; diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index 0466917cbdd1b..ae399b6e9601b 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -14,7 +14,6 @@ use crate::{ codecs::{Decoder, DecodingConfig}, config::{SourceConfig, SourceContext}, http::Auth, - register_validatable_component, serde::{default_decoding, default_framing_message_based}, sources, sources::util::{ @@ -27,7 +26,7 @@ use crate::{ tls::{TlsConfig, TlsSettings}, Result, }; -use crate::{components::validation::*, sources::util::http_client}; +use crate::{components::validation::prelude::*, sources::util::http_client}; use vector_lib::codecs::{ decoding::{DeserializerConfig, FramingConfig}, StreamDecodingError, diff --git a/src/sources/http_server.rs b/src/sources/http_server.rs index 255c4231d7bfd..32a1961348376 100644 --- a/src/sources/http_server.rs +++ b/src/sources/http_server.rs @@ -22,14 +22,13 @@ use vector_lib::{ use crate::{ codecs::{Decoder, DecodingConfig}, - components::validation::*, + components::validation::prelude::*, config::{ GenerateConfig, Resource, SourceAcknowledgementsConfig, SourceConfig, SourceContext, SourceOutput, }, event::{Event, Value}, http::KeepaliveConfig, - register_validatable_component, serde::{bool_or_struct, default_decoding}, sources::util::{ http::{add_query_parameters, HttpMethod}, diff --git a/tests/validation/components/sinks/datadog_logs.yaml b/tests/validation/components/sinks/datadog_logs.yaml new file mode 100644 index 0000000000000..cf0f912d160a2 --- /dev/null +++ b/tests/validation/components/sinks/datadog_logs.yaml @@ -0,0 +1,11 @@ +- name: happy path + expectation: success + events: + - log: simple message 1 + - log: simple message 2 + - log: simple message 3 +- name: sad path + expectation: failure + events: + - external_resource_rejects: + log: simple message downstream rejects From 44ed0d146e274c9593db17f8e9fe74de3833e58f Mon Sep 17 00:00:00 2001 From: neuronull Date: Wed, 6 Mar 2024 13:40:46 -0700 Subject: [PATCH 0100/1491] chore(tests): caller resolves the component validation framework test case path (#20021) --- src/components/validation/mod.rs | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/components/validation/mod.rs b/src/components/validation/mod.rs index 3bf8494a3d89b..e8126ba4159be 100644 --- a/src/components/validation/mod.rs +++ b/src/components/validation/mod.rs @@ -421,8 +421,7 @@ fn get_validation_configuration_from_test_case_path( } #[cfg(feature = "component-validation-runner")] -pub fn validate_component(test_case_data_path: &str) { - let test_case_data_path = std::path::PathBuf::from(test_case_data_path.to_string()); +pub fn validate_component(test_case_data_path: std::path::PathBuf) { if !test_case_data_path.exists() { panic!("Component validation test invoked with path to test case data that could not be found: {}", test_case_data_path.to_string_lossy()); } @@ -438,6 +437,9 @@ mod tests { #[test_generator::test_resources("tests/validation/components/**/*.yaml")] pub fn validate_component(test_case_data_path: &str) { crate::test_util::trace_init(); + + let test_case_data_path = std::path::PathBuf::from(test_case_data_path.to_string()); + super::validate_component(test_case_data_path); } } From 0f472db2b153566df47caec0c50b2f26ba0a2197 Mon Sep 17 00:00:00 2001 From: Bruce Guenter Date: Wed, 6 Mar 2024 15:53:51 -0600 Subject: [PATCH 0101/1491] chore(core): Add missing `TraceEvent::remove` function (#20023) --- lib/vector-core/src/event/trace.rs | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/lib/vector-core/src/event/trace.rs b/lib/vector-core/src/event/trace.rs index d52295822e323..3956a9532315b 100644 --- a/lib/vector-core/src/event/trace.rs +++ b/lib/vector-core/src/event/trace.rs @@ -112,6 +112,10 @@ impl TraceEvent { } None } + + pub fn remove<'a>(&mut self, key: impl TargetPath<'a>) -> Option { + self.0.remove(key) + } } impl From for TraceEvent { From a3bedbd70b6b297e3d7cf9868a7c82f87a86d548 Mon Sep 17 00:00:00 2001 From: neuronull Date: Wed, 6 Mar 2024 15:05:05 -0700 Subject: [PATCH 0102/1491] chore(testing): only compile ValidatableComponent in test runs (#20024) chore(testing): only compile ValidatableComponent in test builds --- src/sinks/datadog/logs/config.rs | 95 ++++++++++----------- src/sinks/http/config.rs | 89 ++++++++++--------- src/sinks/prelude.rs | 1 - src/sinks/splunk_hec/logs/config.rs | 128 ++++++++++++++-------------- src/sources/http_client/client.rs | 33 +------ src/sources/http_client/tests.rs | 35 +++++++- src/sources/http_server.rs | 68 +++++++-------- src/sources/splunk_hec/mod.rs | 93 ++++++++++---------- 8 files changed, 273 insertions(+), 269 deletions(-) diff --git a/src/sinks/datadog/logs/config.rs b/src/sinks/datadog/logs/config.rs index fe2f5f8306f44..376c24f18cf7d 100644 --- a/src/sinks/datadog/logs/config.rs +++ b/src/sinks/datadog/logs/config.rs @@ -4,15 +4,11 @@ use indoc::indoc; use tower::ServiceBuilder; use vector_lib::{ - codecs::{JsonSerializerConfig, MetricTagValues}, - config::proxy::ProxyConfig, - configurable::configurable_component, - schema::meaning, + config::proxy::ProxyConfig, configurable::configurable_component, schema::meaning, }; use vrl::value::Kind; use crate::{ - codecs::EncodingConfigWithFraming, common::datadog, http::HttpClient, schema, @@ -184,54 +180,57 @@ impl SinkConfig for DatadogLogsConfig { } } -impl ValidatableComponent for DatadogLogsConfig { - fn validation_configuration() -> ValidationConfiguration { - let endpoint = "http://127.0.0.1:9005".to_string(); - let config = Self { - local_dd_common: LocalDatadogCommonConfig { - endpoint: Some(endpoint.clone()), - default_api_key: Some("unused".to_string().into()), - ..Default::default() - }, - ..Default::default() - }; - - let encoding = EncodingConfigWithFraming::new( - None, - JsonSerializerConfig::new(MetricTagValues::Full).into(), - config.encoding.clone(), - ); - - let logs_endpoint = format!("{endpoint}/api/v2/logs"); - - let external_resource = ExternalResource::new( - ResourceDirection::Push, - HttpResourceConfig::from_parts( - http::Uri::try_from(&logs_endpoint).expect("should not fail to parse URI"), - None, - ), - encoding, - ); - - ValidationConfiguration::from_sink( - Self::NAME, - vec![ComponentTestCaseConfig::from_sink( - config, - None, - Some(external_resource), - )], - ) - } -} - -register_validatable_component!(DatadogLogsConfig); - #[cfg(test)] mod test { - use super::super::config::DatadogLogsConfig; + use super::*; + use crate::codecs::EncodingConfigWithFraming; + use crate::components::validation::prelude::*; + use vector_lib::codecs::{JsonSerializerConfig, MetricTagValues}; #[test] fn generate_config() { crate::test_util::test_generate_config::(); } + + impl ValidatableComponent for DatadogLogsConfig { + fn validation_configuration() -> ValidationConfiguration { + let endpoint = "http://127.0.0.1:9005".to_string(); + let config = Self { + local_dd_common: LocalDatadogCommonConfig { + endpoint: Some(endpoint.clone()), + default_api_key: Some("unused".to_string().into()), + ..Default::default() + }, + ..Default::default() + }; + + let encoding = EncodingConfigWithFraming::new( + None, + JsonSerializerConfig::new(MetricTagValues::Full).into(), + config.encoding.clone(), + ); + + let logs_endpoint = format!("{endpoint}/api/v2/logs"); + + let external_resource = ExternalResource::new( + ResourceDirection::Push, + HttpResourceConfig::from_parts( + http::Uri::try_from(&logs_endpoint).expect("should not fail to parse URI"), + None, + ), + encoding, + ); + + ValidationConfiguration::from_sink( + Self::NAME, + vec![ComponentTestCaseConfig::from_sink( + config, + None, + Some(external_resource), + )], + ) + } + } + + register_validatable_component!(DatadogLogsConfig); } diff --git a/src/sinks/http/config.rs b/src/sinks/http/config.rs index a6313c1f1ae5b..6c00c82ea05f0 100644 --- a/src/sinks/http/config.rs +++ b/src/sinks/http/config.rs @@ -10,7 +10,6 @@ use vector_lib::codecs::{ use crate::{ codecs::{EncodingConfigWithFraming, SinkType}, - components::validation::ComponentTestCaseConfig, http::{Auth, HttpClient, MaybeAuth}, sinks::{ prelude::*, @@ -307,46 +306,52 @@ impl SinkConfig for HttpSinkConfig { } } -impl ValidatableComponent for HttpSinkConfig { - fn validation_configuration() -> ValidationConfiguration { - use std::str::FromStr; - use vector_lib::codecs::{JsonSerializerConfig, MetricTagValues}; - - let config = HttpSinkConfig { - uri: UriSerde::from_str("http://127.0.0.1:9000/endpoint") - .expect("should never fail to parse"), - method: HttpMethod::Post, - encoding: EncodingConfigWithFraming::new( - None, - JsonSerializerConfig::new(MetricTagValues::Full).into(), - Transformer::default(), - ), - auth: None, - headers: None, - compression: Compression::default(), - batch: BatchConfig::default(), - request: RequestConfig::default(), - tls: None, - acknowledgements: AcknowledgementsConfig::default(), - payload_prefix: String::new(), - payload_suffix: String::new(), - }; - - let external_resource = ExternalResource::new( - ResourceDirection::Push, - HttpResourceConfig::from_parts(config.uri.uri.clone(), Some(config.method.into())), - config.encoding.clone(), - ); - - ValidationConfiguration::from_sink( - Self::NAME, - vec![ComponentTestCaseConfig::from_sink( - config, - None, - Some(external_resource), - )], - ) +#[cfg(test)] +mod tests { + use super::*; + use crate::components::validation::prelude::*; + + impl ValidatableComponent for HttpSinkConfig { + fn validation_configuration() -> ValidationConfiguration { + use std::str::FromStr; + use vector_lib::codecs::{JsonSerializerConfig, MetricTagValues}; + + let config = HttpSinkConfig { + uri: UriSerde::from_str("http://127.0.0.1:9000/endpoint") + .expect("should never fail to parse"), + method: HttpMethod::Post, + encoding: EncodingConfigWithFraming::new( + None, + JsonSerializerConfig::new(MetricTagValues::Full).into(), + Transformer::default(), + ), + auth: None, + headers: None, + compression: Compression::default(), + batch: BatchConfig::default(), + request: RequestConfig::default(), + tls: None, + acknowledgements: AcknowledgementsConfig::default(), + payload_prefix: String::new(), + payload_suffix: String::new(), + }; + + let external_resource = ExternalResource::new( + ResourceDirection::Push, + HttpResourceConfig::from_parts(config.uri.uri.clone(), Some(config.method.into())), + config.encoding.clone(), + ); + + ValidationConfiguration::from_sink( + Self::NAME, + vec![ComponentTestCaseConfig::from_sink( + config, + None, + Some(external_resource), + )], + ) + } } -} -register_validatable_component!(HttpSinkConfig); + register_validatable_component!(HttpSinkConfig); +} diff --git a/src/sinks/prelude.rs b/src/sinks/prelude.rs index 12ddbd7efd920..bdfcfb21db195 100644 --- a/src/sinks/prelude.rs +++ b/src/sinks/prelude.rs @@ -25,7 +25,6 @@ pub use vector_lib::{ pub use crate::{ codecs::{Encoder, EncodingConfig, Transformer}, - components::validation::prelude::*, config::{DataType, GenerateConfig, SinkConfig, SinkContext}, event::{Event, LogEvent}, internal_events::{SinkRequestBuildError, TemplateRenderingError}, diff --git a/src/sinks/splunk_hec/logs/config.rs b/src/sinks/splunk_hec/logs/config.rs index 11f2a9d98bd01..9735ef9382984 100644 --- a/src/sinks/splunk_hec/logs/config.rs +++ b/src/sinks/splunk_hec/logs/config.rs @@ -1,7 +1,7 @@ use std::sync::Arc; use vector_lib::{ - codecs::{JsonSerializerConfig, MetricTagValues, TextSerializerConfig}, + codecs::TextSerializerConfig, lookup::lookup_v2::{ConfigValuePath, OptionalTargetPath}, sensitive_string::SensitiveString, }; @@ -280,73 +280,75 @@ impl HecLogsSinkConfig { } } -impl ValidatableComponent for HecLogsSinkConfig { - fn validation_configuration() -> ValidationConfiguration { - let endpoint = "http://127.0.0.1:9001".to_string(); - - let mut batch = BatchConfig::default(); - batch.max_events = Some(1); - - let config = Self { - endpoint: endpoint.clone(), - default_token: "i_am_an_island".to_string().into(), - host_key: config_host_key_target_path(), - indexed_fields: vec![], - index: None, - sourcetype: None, - source: None, - encoding: EncodingConfig::new( - JsonSerializerConfig::new(MetricTagValues::Full).into(), - Transformer::default(), - ), - compression: Compression::default(), - batch, - request: TowerRequestConfig { - timeout_secs: 2, - retry_attempts: 0, - ..Default::default() - }, - tls: None, - acknowledgements: HecClientAcknowledgementsConfig { - indexer_acknowledgements_enabled: false, - ..Default::default() - }, - timestamp_nanos_key: None, - timestamp_key: config_timestamp_key_target_path(), - auto_extract_timestamp: None, - endpoint_target: EndpointTarget::Raw, - }; - - let endpoint = format!("{endpoint}/services/collector/raw"); - - let external_resource = ExternalResource::new( - ResourceDirection::Push, - HttpResourceConfig::from_parts( - http::Uri::try_from(&endpoint).expect("should not fail to parse URI"), - None, - ), - config.encoding.clone(), - ); - - ValidationConfiguration::from_sink( - Self::NAME, - vec![ComponentTestCaseConfig::from_sink( - config, - None, - Some(external_resource), - )], - ) - } -} - -register_validatable_component!(HecLogsSinkConfig); - #[cfg(test)] mod tests { - use super::HecLogsSinkConfig; + use super::*; + use crate::components::validation::prelude::*; + use vector_lib::codecs::{JsonSerializerConfig, MetricTagValues}; #[test] fn generate_config() { crate::test_util::test_generate_config::(); } + + impl ValidatableComponent for HecLogsSinkConfig { + fn validation_configuration() -> ValidationConfiguration { + let endpoint = "http://127.0.0.1:9001".to_string(); + + let mut batch = BatchConfig::default(); + batch.max_events = Some(1); + + let config = Self { + endpoint: endpoint.clone(), + default_token: "i_am_an_island".to_string().into(), + host_key: config_host_key_target_path(), + indexed_fields: vec![], + index: None, + sourcetype: None, + source: None, + encoding: EncodingConfig::new( + JsonSerializerConfig::new(MetricTagValues::Full).into(), + Transformer::default(), + ), + compression: Compression::default(), + batch, + request: TowerRequestConfig { + timeout_secs: 2, + retry_attempts: 0, + ..Default::default() + }, + tls: None, + acknowledgements: HecClientAcknowledgementsConfig { + indexer_acknowledgements_enabled: false, + ..Default::default() + }, + timestamp_nanos_key: None, + timestamp_key: config_timestamp_key_target_path(), + auto_extract_timestamp: None, + endpoint_target: EndpointTarget::Raw, + }; + + let endpoint = format!("{endpoint}/services/collector/raw"); + + let external_resource = ExternalResource::new( + ResourceDirection::Push, + HttpResourceConfig::from_parts( + http::Uri::try_from(&endpoint).expect("should not fail to parse URI"), + None, + ), + config.encoding.clone(), + ); + + ValidationConfiguration::from_sink( + Self::NAME, + vec![ComponentTestCaseConfig::from_sink( + config, + None, + Some(external_resource), + )], + ) + } + } + + register_validatable_component!(HecLogsSinkConfig); } diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index ae399b6e9601b..67fa609980f93 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -10,6 +10,7 @@ use snafu::ResultExt; use std::{collections::HashMap, time::Duration}; use tokio_util::codec::Decoder as _; +use crate::sources::util::http_client; use crate::{ codecs::{Decoder, DecodingConfig}, config::{SourceConfig, SourceContext}, @@ -26,7 +27,6 @@ use crate::{ tls::{TlsConfig, TlsSettings}, Result, }; -use crate::{components::validation::prelude::*, sources::util::http_client}; use vector_lib::codecs::{ decoding::{DeserializerConfig, FramingConfig}, StreamDecodingError, @@ -235,37 +235,6 @@ impl SourceConfig for HttpClientConfig { } } -impl ValidatableComponent for HttpClientConfig { - fn validation_configuration() -> ValidationConfiguration { - let uri = Uri::from_static("http://127.0.0.1:9898"); - - let config = Self { - endpoint: uri.to_string(), - interval: Duration::from_secs(1), - timeout: Duration::from_secs(1), - decoding: DeserializerConfig::Json(Default::default()), - ..Default::default() - }; - - let external_resource = ExternalResource::new( - ResourceDirection::Pull, - HttpResourceConfig::from_parts(uri, Some(config.method.into())), - config.get_decoding_config(None), - ); - - ValidationConfiguration::from_source( - Self::NAME, - vec![ComponentTestCaseConfig::from_source( - config, - None, - Some(external_resource), - )], - ) - } -} - -register_validatable_component!(HttpClientConfig); - impl HttpClientConfig { pub fn get_decoding_config(&self, log_namespace: Option) -> DecodingConfig { let decoding = self.decoding.clone(); diff --git a/src/sources/http_client/tests.rs b/src/sources/http_client/tests.rs index 751bd407d72e3..b834089a6d96c 100644 --- a/src/sources/http_client/tests.rs +++ b/src/sources/http_client/tests.rs @@ -1,13 +1,15 @@ +use http::Uri; use std::collections::HashMap; use tokio::time::Duration; -use vector_lib::codecs::CharacterDelimitedDecoderConfig; use warp::{http::HeaderMap, Filter}; +use crate::components::validation::prelude::*; use crate::sources::util::http::HttpMethod; use crate::{serde::default_decoding, serde::default_framing_message_based}; use vector_lib::codecs::decoding::{ CharacterDelimitedDecoderOptions, DeserializerConfig, FramingConfig, }; +use vector_lib::codecs::CharacterDelimitedDecoderConfig; use vector_lib::event::Event; use super::HttpClientConfig; @@ -36,6 +38,37 @@ fn http_client_generate_config() { test_generate_config::(); } +impl ValidatableComponent for HttpClientConfig { + fn validation_configuration() -> ValidationConfiguration { + let uri = Uri::from_static("http://127.0.0.1:9898"); + + let config = Self { + endpoint: uri.to_string(), + interval: Duration::from_secs(1), + timeout: Duration::from_secs(1), + decoding: DeserializerConfig::Json(Default::default()), + ..Default::default() + }; + + let external_resource = ExternalResource::new( + ResourceDirection::Pull, + HttpResourceConfig::from_parts(uri, Some(config.method.into())), + config.get_decoding_config(None), + ); + + ValidationConfiguration::from_source( + Self::NAME, + vec![ComponentTestCaseConfig::from_source( + config, + None, + Some(external_resource), + )], + ) + } +} + +register_validatable_component!(HttpClientConfig); + /// Bytes should be decoded and HTTP header set to text/plain. #[tokio::test] async fn bytes_decoding() { diff --git a/src/sources/http_server.rs b/src/sources/http_server.rs index 32a1961348376..467dd20a97a7b 100644 --- a/src/sources/http_server.rs +++ b/src/sources/http_server.rs @@ -2,7 +2,7 @@ use std::{collections::HashMap, net::SocketAddr}; use bytes::{Bytes, BytesMut}; use chrono::Utc; -use http::{StatusCode, Uri}; +use http::StatusCode; use http_serde; use tokio_util::codec::Decoder as _; use vrl::value::{kind::Collection, Kind}; @@ -22,7 +22,6 @@ use vector_lib::{ use crate::{ codecs::{Decoder, DecodingConfig}, - components::validation::prelude::*, config::{ GenerateConfig, Resource, SourceAcknowledgementsConfig, SourceConfig, SourceContext, SourceOutput, @@ -273,37 +272,6 @@ impl Default for SimpleHttpConfig { impl_generate_config_from_default!(SimpleHttpConfig); -impl ValidatableComponent for SimpleHttpConfig { - fn validation_configuration() -> ValidationConfiguration { - let config = Self { - decoding: Some(DeserializerConfig::Json(Default::default())), - ..Default::default() - }; - - let listen_addr_http = format!("http://{}/", config.address); - let uri = Uri::try_from(&listen_addr_http).expect("should not fail to parse URI"); - - let external_resource = ExternalResource::new( - ResourceDirection::Push, - HttpResourceConfig::from_parts(uri, Some(config.method.into())), - config - .get_decoding_config() - .expect("should not fail to get decoding config"), - ); - - ValidationConfiguration::from_source( - Self::NAME, - vec![ComponentTestCaseConfig::from_source( - config, - None, - Some(external_resource), - )], - ) - } -} - -register_validatable_component!(SimpleHttpConfig); - const fn default_http_method() -> HttpMethod { HttpMethod::Post } @@ -549,7 +517,7 @@ mod tests { Compression, }; use futures::Stream; - use http::{HeaderMap, Method, StatusCode}; + use http::{HeaderMap, Method, StatusCode, Uri}; use similar_asserts::assert_eq; use vector_lib::codecs::{ decoding::{DeserializerConfig, FramingConfig}, @@ -564,6 +532,7 @@ mod tests { use crate::sources::http_server::HttpMethod; use crate::{ + components::validation::prelude::*, config::{log_schema, SourceConfig, SourceContext}, event::{Event, EventStatus, Value}, test_util::{ @@ -1552,4 +1521,35 @@ mod tests { ); } } + + impl ValidatableComponent for SimpleHttpConfig { + fn validation_configuration() -> ValidationConfiguration { + let config = Self { + decoding: Some(DeserializerConfig::Json(Default::default())), + ..Default::default() + }; + + let listen_addr_http = format!("http://{}/", config.address); + let uri = Uri::try_from(&listen_addr_http).expect("should not fail to parse URI"); + + let external_resource = ExternalResource::new( + ResourceDirection::Push, + HttpResourceConfig::from_parts(uri, Some(config.method.into())), + config + .get_decoding_config() + .expect("should not fail to get decoding config"), + ); + + ValidationConfiguration::from_source( + Self::NAME, + vec![ComponentTestCaseConfig::from_source( + config, + None, + Some(external_resource), + )], + ) + } + } + + register_validatable_component!(SimpleHttpConfig); } diff --git a/src/sources/splunk_hec/mod.rs b/src/sources/splunk_hec/mod.rs index 6fe435fc97a47..31493a2d9eb85 100644 --- a/src/sources/splunk_hec/mod.rs +++ b/src/sources/splunk_hec/mod.rs @@ -11,7 +11,7 @@ use bytes::{Buf, Bytes}; use chrono::{DateTime, TimeZone, Utc}; use flate2::read::MultiGzDecoder; use futures::FutureExt; -use http::{StatusCode, Uri}; +use http::StatusCode; use hyper::{service::make_service_fn, Server}; use serde::Serialize; use serde_json::{ @@ -22,16 +22,10 @@ use snafu::Snafu; use tokio::net::TcpStream; use tower::ServiceBuilder; use tracing::Span; +use vector_lib::internal_event::{CountByteSize, InternalEventHandle as _, Registered}; use vector_lib::lookup::lookup_v2::OptionalValuePath; +use vector_lib::lookup::{self, event_path, owned_value_path}; use vector_lib::sensitive_string::SensitiveString; -use vector_lib::{ - codecs::decoding::DeserializerConfig, - lookup::{self, event_path, owned_value_path}, -}; -use vector_lib::{ - codecs::BytesDecoderConfig, - internal_event::{CountByteSize, InternalEventHandle as _, Registered}, -}; use vector_lib::{ config::{LegacyKey, LogNamespace}, event::BatchNotifier, @@ -50,8 +44,6 @@ use self::{ splunk_response::{HecResponse, HecResponseMetadata, HecStatusCode}, }; use crate::{ - codecs::DecodingConfig, - components::validation::prelude::*, config::{log_schema, DataType, Resource, SourceConfig, SourceContext, SourceOutput}, event::{Event, LogEvent, Value}, http::{build_http_trace_layer, KeepaliveConfig, MaxConnectionAgeLayer}, @@ -1249,41 +1241,6 @@ fn response_json(code: StatusCode, body: impl Serialize) -> Response { warp::reply::with_status(warp::reply::json(&body), code).into_response() } -impl ValidatableComponent for SplunkConfig { - fn validation_configuration() -> ValidationConfiguration { - let config = Self { - address: default_socket_address(), - ..Default::default() - }; - - let listen_addr_http = format!("http://{}/services/collector/event", config.address); - let uri = Uri::try_from(&listen_addr_http).expect("should not fail to parse URI"); - - let framing = BytesDecoderConfig::new().into(); - let decoding = DeserializerConfig::Json(Default::default()); - - let external_resource = ExternalResource::new( - ResourceDirection::Push, - HttpResourceConfig::from_parts(uri, None).with_headers(HashMap::from([( - X_SPLUNK_REQUEST_CHANNEL.to_string(), - "channel".to_string(), - )])), - DecodingConfig::new(framing, decoding, false.into()), - ); - - ValidationConfiguration::from_source( - Self::NAME, - vec![ComponentTestCaseConfig::from_source( - config, - None, - Some(external_resource), - )], - ) - } -} - -register_validatable_component!(SplunkConfig); - #[cfg(feature = "sinks-splunk_hec")] #[cfg(test)] mod tests { @@ -1291,9 +1248,13 @@ mod tests { use chrono::{TimeZone, Utc}; use futures_util::Stream; + use http::Uri; use reqwest::{RequestBuilder, Response}; use serde::Deserialize; - use vector_lib::codecs::{JsonSerializerConfig, TextSerializerConfig}; + use vector_lib::codecs::{ + decoding::DeserializerConfig, BytesDecoderConfig, JsonSerializerConfig, + TextSerializerConfig, + }; use vector_lib::sensitive_string::SensitiveString; use vector_lib::{event::EventStatus, schema::Definition}; use vrl::path::PathPrefix; @@ -1303,7 +1264,8 @@ mod tests { config_host_key_target_path, config_timestamp_key_target_path, }; use crate::{ - codecs::EncodingConfig, + codecs::{DecodingConfig, EncodingConfig}, + components::validation::prelude::*, config::{log_schema, SinkConfig, SinkContext, SourceConfig, SourceContext}, event::{Event, LogEvent}, sinks::{ @@ -2650,4 +2612,39 @@ mod tests { assert_eq!(definitions, Some(expected_definition)); } + + impl ValidatableComponent for SplunkConfig { + fn validation_configuration() -> ValidationConfiguration { + let config = Self { + address: default_socket_address(), + ..Default::default() + }; + + let listen_addr_http = format!("http://{}/services/collector/event", config.address); + let uri = Uri::try_from(&listen_addr_http).expect("should not fail to parse URI"); + + let framing = BytesDecoderConfig::new().into(); + let decoding = DeserializerConfig::Json(Default::default()); + + let external_resource = ExternalResource::new( + ResourceDirection::Push, + HttpResourceConfig::from_parts(uri, None).with_headers(HashMap::from([( + X_SPLUNK_REQUEST_CHANNEL.to_string(), + "channel".to_string(), + )])), + DecodingConfig::new(framing, decoding, false.into()), + ); + + ValidationConfiguration::from_source( + Self::NAME, + vec![ComponentTestCaseConfig::from_source( + config, + None, + Some(external_resource), + )], + ) + } + } + + register_validatable_component!(SplunkConfig); } From eb690d4343e74078e4debd9f9984bcf0e89ad8a5 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ensar=20Saraj=C4=8Di=C4=87?= Date: Thu, 7 Mar 2024 12:26:37 +0100 Subject: [PATCH 0103/1491] feat(sources)!: add TCP mode to DNSTAP source (#19892) * feat(sources): add TCP mode to DNSTAP source This adds TCP mode to DNSTAP source, while leaving the current one ("unix") as the default. This required a minor hack (https://stackoverflow.com/questions/61216723/how-can-i-deserialize-an-enum-with-an-optional-internal-tag/61219284#61219284) in serde deserialization. Unix is the default when `cfg!(unix)` is true, otherwise TCP is the default on port 9000. This can be changed to never default to TCP without explicitly `mode` to TCP. This is implemented as a different framestream source, reusing some parts of TCP source code and some parts of framestream unix source. Fixes: #19735 * Add tests for TCP framestream * Add documentation for TCP mode * Add missing configuration options for DNSTAP TCP mode * Fix dnstap unix integration test * Support tls client metadata for dnstap TCP source * Add changelog entry * Fix fomatting in DNSTAP TCP address docs Co-authored-by: Ursula Chen <58821586+urseberry@users.noreply.github.com> * Remove @ from authors entry in changelog * Fix warnings in framestream.rs * Run generate-component-docs * Fix broken generated logs Since DNSTAP config is a bit different to other configs since it defaults to `unix` mode (so it had to be untagged for that to work), it does not generate proper docs. This adds overrides to properly flag different fields for `unix` or `tcp` mode. * Fix `TcpConfig` serde tag attribute condition * Add origin permit config options to DNSTAP in TCP mode * Set `permit_origin` option to be relevant only in tcp mode * Add test case for permit_origin * Generate component docs * Add `ipallowlist` to spellcheck allowed words * Reduce duplication in DNSTAP source * Make `permit_origin` optional * Update component docs for dnstap * Make `mode` required for `dnstap` source * Update changelog entry * Add must_use to `with_allowlist` * Fix issues in tests * Fix clippy warnings * Enable std for `ipnet` for tests * Add ipnet dependency to vector-core Co-authored-by: Stephen Wakely * Make `tcp` depend on `ipnet` and `dnstap` depend on `tcp` --------- Co-authored-by: Ursula Chen <58821586+urseberry@users.noreply.github.com> Co-authored-by: Stephen Wakely --- .github/actions/spelling/allow.txt | 1 + Cargo.lock | 5 + Cargo.toml | 5 +- changelog.d/19892_dnstap_over_tcp.breaking.md | 3 + lib/vector-core/Cargo.toml | 1 + lib/vector-core/src/ipallowlist.rs | 40 + lib/vector-core/src/lib.rs | 1 + lib/vector-core/src/tls/incoming.rs | 54 +- lib/vector-lib/src/lib.rs | 4 +- src/internal_events/tcp.rs | 24 + src/sources/dnstap/mod.rs | 208 ++-- src/sources/dnstap/tcp.rs | 279 ++++++ src/sources/dnstap/unix.rs | 153 +++ src/sources/fluent/mod.rs | 1 + src/sources/logstash.rs | 1 + src/sources/mod.rs | 2 +- src/sources/socket/mod.rs | 3 + src/sources/socket/tcp.rs | 7 + src/sources/statsd/mod.rs | 1 + src/sources/syslog.rs | 1 + src/sources/util/framestream.rs | 885 ++++++++++++++---- src/sources/util/net/mod.rs | 5 +- src/sources/util/net/tcp/mod.rs | 12 +- .../components/sources/base/dnstap.cue | 197 +++- .../components/sources/base/socket.cue | 10 + .../reference/components/sources/dnstap.cue | 54 +- 26 files changed, 1658 insertions(+), 299 deletions(-) create mode 100644 changelog.d/19892_dnstap_over_tcp.breaking.md create mode 100644 lib/vector-core/src/ipallowlist.rs create mode 100644 src/sources/dnstap/tcp.rs create mode 100644 src/sources/dnstap/unix.rs diff --git a/.github/actions/spelling/allow.txt b/.github/actions/spelling/allow.txt index 1943e682bcc80..b258ab9021bb6 100644 --- a/.github/actions/spelling/allow.txt +++ b/.github/actions/spelling/allow.txt @@ -344,6 +344,7 @@ ifneq imobile influxd ionik +ipallowlist ipod ircd jemalloc diff --git a/Cargo.lock b/Cargo.lock index 4cb200070130a..b95fe9655c6b4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4566,6 +4566,9 @@ name = "ipnet" version = "2.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +dependencies = [ + "serde", +] [[package]] name = "ipnetwork" @@ -10081,6 +10084,7 @@ dependencies = [ "indoc", "infer 0.15.0", "inventory", + "ipnet", "itertools 0.12.1", "k8s-openapi 0.18.0", "kube", @@ -10364,6 +10368,7 @@ dependencies = [ "http 0.2.9", "hyper-proxy", "indexmap 2.2.5", + "ipnet", "metrics", "metrics-tracing-context", "metrics-util", diff --git a/Cargo.toml b/Cargo.toml index a0567530629cf..758c4979fe84a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -290,6 +290,7 @@ indexmap.workspace = true infer = { version = "0.15.0", default-features = false, optional = true} indoc = { version = "2.0.4", default-features = false } inventory = { version = "0.3.15", default-features = false } +ipnet = { version = "2", default-features = false, optional = true, features = ["serde", "std"] } itertools = { version = "0.12.1", default-features = false, optional = false, features = ["use_alloc"] } k8s-openapi = { version = "0.18.0", default-features = false, features = ["api", "v1_26"], optional = true } kube = { version = "0.82.0", default-features = false, features = ["client", "openssl-tls", "runtime"], optional = true } @@ -538,7 +539,7 @@ sources-aws_s3 = ["aws-core", "dep:aws-sdk-sqs", "dep:aws-sdk-s3", "dep:semver", sources-aws_sqs = ["aws-core", "dep:aws-sdk-sqs"] sources-datadog_agent = ["sources-utils-http-error", "protobuf-build"] sources-demo_logs = ["dep:fakedata"] -sources-dnstap = ["dep:base64", "dep:hickory-proto", "dep:dnsmsg-parser", "protobuf-build"] +sources-dnstap = ["sources-utils-net-tcp", "dep:base64", "dep:hickory-proto", "dep:dnsmsg-parser", "protobuf-build"] sources-docker_logs = ["docker"] sources-eventstoredb_metrics = [] sources-exec = [] @@ -580,7 +581,7 @@ sources-utils-http-prelude = ["sources-utils-http", "sources-utils-http-auth", " sources-utils-http-query = [] sources-utils-http-client = ["sources-utils-http", "sources-http_server"] sources-utils-net = ["sources-utils-net-tcp", "sources-utils-net-udp", "sources-utils-net-unix"] -sources-utils-net-tcp = ["listenfd"] +sources-utils-net-tcp = ["listenfd", "dep:ipnet"] sources-utils-net-udp = ["listenfd"] sources-utils-net-unix = [] diff --git a/changelog.d/19892_dnstap_over_tcp.breaking.md b/changelog.d/19892_dnstap_over_tcp.breaking.md new file mode 100644 index 0000000000000..bb9e1dca6353b --- /dev/null +++ b/changelog.d/19892_dnstap_over_tcp.breaking.md @@ -0,0 +1,3 @@ +Added support for TCP mode for DNSTAP source. As the `dnstap` source now supports multiple socket types, you will need to update your configuration to specify which type - either `mode: unix` for the existing unix sockets mode or `mode: tcp` for the new tcp mode. + +authors: esensar diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index c6995e3bce72a..2e909658b50b3 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -23,6 +23,7 @@ headers = { version = "0.3.9", default-features = false } http = { version = "0.2.9", default-features = false } hyper-proxy = { version = "0.9.1", default-features = false, features = ["openssl-tls"] } indexmap.workspace = true +ipnet = { version = "2", default-features = false, features = ["serde", "std"] } lookup = { package = "vector-lookup", path = "../vector-lookup" } metrics = "0.21.1" metrics-tracing-context = { version = "0.14.0", default-features = false } diff --git a/lib/vector-core/src/ipallowlist.rs b/lib/vector-core/src/ipallowlist.rs new file mode 100644 index 0000000000000..7475e92fd2e67 --- /dev/null +++ b/lib/vector-core/src/ipallowlist.rs @@ -0,0 +1,40 @@ +use serde::{Deserialize, Serialize}; +use std::cell::RefCell; +use vector_config::GenerateError; + +use ipnet::IpNet; +use vector_config::{configurable_component, Configurable, Metadata, ToValue}; +use vector_config_common::schema::{InstanceType, SchemaGenerator, SchemaObject}; + +/// IP network allowlist settings for network components +#[configurable_component] +#[derive(Clone, Debug, PartialEq, Eq)] +#[serde(deny_unknown_fields, transparent)] +#[configurable(metadata(docs::human_name = "Allowed IP network origins"))] +pub struct IpAllowlistConfig(pub Vec); + +/// IP network +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] +#[serde(deny_unknown_fields, transparent)] +pub struct IpNetConfig(pub IpNet); + +impl ToValue for IpNetConfig { + fn to_value(&self) -> serde_json::Value { + serde_json::Value::String(self.0.to_string()) + } +} + +impl Configurable for IpNetConfig { + fn generate_schema( + _: &RefCell, + ) -> std::result::Result { + Ok(SchemaObject { + instance_type: Some(InstanceType::String.into()), + ..Default::default() + }) + } + + fn metadata() -> Metadata { + Metadata::with_description("IP network") + } +} diff --git a/lib/vector-core/src/lib.rs b/lib/vector-core/src/lib.rs index be60e0f03a128..2e71d42d8a1c3 100644 --- a/lib/vector-core/src/lib.rs +++ b/lib/vector-core/src/lib.rs @@ -30,6 +30,7 @@ pub mod config; pub mod event; pub mod fanout; +pub mod ipallowlist; pub mod metrics; pub mod partition; pub mod schema; diff --git a/lib/vector-core/src/tls/incoming.rs b/lib/vector-core/src/tls/incoming.rs index 2f988fef2427c..ca1d1f7dbc185 100644 --- a/lib/vector-core/src/tls/incoming.rs +++ b/lib/vector-core/src/tls/incoming.rs @@ -1,3 +1,4 @@ +use ipnet::IpNet; use std::{ collections::HashMap, future::Future, @@ -48,24 +49,64 @@ impl MaybeTlsSettings { Self::Raw(()) => None, }; - Ok(MaybeTlsListener { listener, acceptor }) + Ok(MaybeTlsListener { + listener, + acceptor, + origin_filter: None, + }) + } + + pub async fn bind_with_allowlist( + &self, + addr: &SocketAddr, + allow_origin: Vec, + ) -> crate::tls::Result { + let listener = TcpListener::bind(addr).await.context(TcpBindSnafu)?; + + let acceptor = match self { + Self::Tls(tls) => Some(tls.acceptor()?), + Self::Raw(()) => None, + }; + + Ok(MaybeTlsListener { + listener, + acceptor, + origin_filter: Some(allow_origin), + }) } } pub struct MaybeTlsListener { listener: TcpListener, acceptor: Option, + origin_filter: Option>, } impl MaybeTlsListener { pub async fn accept(&mut self) -> crate::tls::Result> { - self.listener + let listener = self + .listener .accept() .await .map(|(stream, peer_addr)| { MaybeTlsIncomingStream::new(stream, peer_addr, self.acceptor.clone()) }) - .context(IncomingListenerSnafu) + .context(IncomingListenerSnafu)?; + + if let Some(origin_filter) = &self.origin_filter { + if origin_filter + .iter() + .any(|net| net.contains(&listener.peer_addr().ip())) + { + Ok(listener) + } else { + Err(TlsError::Connect { + source: std::io::ErrorKind::ConnectionRefused.into(), + }) + } + } else { + Ok(listener) + } } async fn into_accept( @@ -127,6 +168,12 @@ impl MaybeTlsListener { pub fn local_addr(&self) -> Result { self.listener.local_addr() } + + #[must_use] + pub fn with_allowlist(mut self, allowlist: Option>) -> Self { + self.origin_filter = allowlist; + self + } } impl From for MaybeTlsListener { @@ -134,6 +181,7 @@ impl From for MaybeTlsListener { Self { listener, acceptor: None, + origin_filter: None, } } } diff --git a/lib/vector-lib/src/lib.rs b/lib/vector-lib/src/lib.rs index 9ffa72947f373..74e041ec7c0c7 100644 --- a/lib/vector-lib/src/lib.rs +++ b/lib/vector-lib/src/lib.rs @@ -18,8 +18,8 @@ pub use vector_config::impl_generate_config_from_default; #[cfg(feature = "vrl")] pub use vector_core::compile_vrl; pub use vector_core::{ - buckets, default_data_dir, emit, event, fanout, metric_tags, metrics, partition, quantiles, - register, samples, schema, serde, sink, source, tcp, tls, transform, update_counter, + buckets, default_data_dir, emit, event, fanout, ipallowlist, metric_tags, metrics, partition, + quantiles, register, samples, schema, serde, sink, source, tcp, tls, transform, update_counter, EstimatedJsonEncodedSizeOf, }; pub use vector_lookup as lookup; diff --git a/src/internal_events/tcp.rs b/src/internal_events/tcp.rs index 8372e5fd45082..2f73a97be6181 100644 --- a/src/internal_events/tcp.rs +++ b/src/internal_events/tcp.rs @@ -44,6 +44,30 @@ impl InternalEvent for TcpSocketConnectionShutdown { } } +#[derive(Debug)] +pub struct TcpSocketError<'a, E> { + pub(crate) error: &'a E, + pub peer_addr: SocketAddr, +} + +impl InternalEvent for TcpSocketError<'_, E> { + fn emit(self) { + error!( + message = "TCP socket error.", + error = %self.error, + peer_addr = ?self.peer_addr, + error_type = error_type::CONNECTION_FAILED, + stage = error_stage::PROCESSING, + internal_log_rate_limit = true, + ); + counter!( + "component_errors_total", 1, + "error_type" => error_type::CONNECTION_FAILED, + "stage" => error_stage::PROCESSING, + ); + } +} + #[derive(Debug)] pub struct TcpSocketTlsConnectionError { pub error: TlsError, diff --git a/src/sources/dnstap/mod.rs b/src/sources/dnstap/mod.rs index 8973ca8bac9db..60d109cd11290 100644 --- a/src/sources/dnstap/mod.rs +++ b/src/sources/dnstap/mod.rs @@ -1,39 +1,44 @@ use std::path::PathBuf; use base64::prelude::{Engine as _, BASE64_STANDARD}; -use bytes::Bytes; -use vector_lib::configurable::configurable_component; +use vector_lib::event::{Event, LogEvent}; use vector_lib::internal_event::{ - ByteSize, BytesReceived, InternalEventHandle as _, Protocol, Registered, + ByteSize, BytesReceived, InternalEventHandle, Protocol, Registered, }; -use vector_lib::lookup::{owned_value_path, path, OwnedValuePath}; -use vrl::path::PathPrefix; +use vector_lib::lookup::{owned_value_path, path}; +use vector_lib::{configurable::configurable_component, tls::MaybeTlsSettings}; +use vrl::path::{OwnedValuePath, PathPrefix}; use vrl::value::{kind::Collection, Kind}; -use super::util::framestream::{build_framestream_unix_source, FrameHandler}; +use self::parser::DnstapParser; + +use super::util::framestream::{ + build_framestream_tcp_source, build_framestream_unix_source, FrameHandler, +}; +use crate::internal_events::DnstapParseError; +use crate::sources::dnstap::schema::DNSTAP_VALUE_PATHS; use crate::{ config::{log_schema, DataType, SourceConfig, SourceContext, SourceOutput}, - event::{Event, LogEvent}, - internal_events::{DnstapParseError, SocketEventsReceived, SocketMode}, Result, }; pub mod parser; pub mod schema; -use crate::sources::dnstap::parser::DnstapParser; -use crate::sources::dnstap::schema::DNSTAP_VALUE_PATHS; +pub mod tcp; +#[cfg(unix)] +pub mod unix; use dnsmsg_parser::{dns_message, dns_message_parser}; pub use schema::DnstapEventSchema; +use vector_lib::config::{LegacyKey, LogNamespace}; use vector_lib::lookup::lookup_v2::OptionalValuePath; -use vector_lib::{ - config::{LegacyKey, LogNamespace}, - EstimatedJsonEncodedSizeOf, -}; /// Configuration for the `dnstap` source. #[configurable_component(source("dnstap", "Collect DNS logs from a dnstap-compatible server."))] #[derive(Clone, Debug)] pub struct DnstapConfig { + #[serde(flatten)] + pub mode: Mode, + /// Maximum DNSTAP frame length that the source accepts. /// /// If any frame is longer than this, it is discarded. @@ -50,17 +55,11 @@ pub struct DnstapConfig { /// [global_host_key]: https://vector.dev/docs/reference/configuration/global-options/#log_schema.host_key pub host_key: Option, - /// Absolute path to the socket file to read DNSTAP data from. - /// - /// The DNS server must be configured to send its DNSTAP data to this socket file. The socket file is created - /// if it doesn't already exist when the source first starts. - pub socket_path: PathBuf, - /// Whether or not to skip parsing or decoding of DNSTAP frames. /// /// If set to `true`, frames are not parsed or decoded. The raw frame data is set as a field on the event /// (called `rawData`) and encoded as a base64 string. - raw_data_only: Option, + pub raw_data_only: Option, /// Whether or not to concurrently process DNSTAP frames. pub multithreaded: Option, @@ -68,55 +67,55 @@ pub struct DnstapConfig { /// Maximum number of frames that can be processed concurrently. pub max_frame_handling_tasks: Option, - /// Unix file mode bits to be applied to the unix socket file as its designated file permissions. - /// - /// Note: The file mode value can be specified in any numeric format supported by your configuration - /// language, but it is most intuitive to use an octal number. - pub socket_file_mode: Option, - - /// The size, in bytes, of the receive buffer used for the socket. - /// - /// This should not typically needed to be changed. - #[configurable(metadata(docs::type_unit = "bytes"))] - pub socket_receive_buffer_size: Option, - - /// The size, in bytes, of the send buffer used for the socket. - /// - /// This should not typically needed to be changed. - #[configurable(metadata(docs::type_unit = "bytes"))] - pub socket_send_buffer_size: Option, - /// The namespace to use for logs. This overrides the global settings. #[configurable(metadata(docs::hidden))] #[serde(default)] - log_namespace: Option, + pub log_namespace: Option, } fn default_max_frame_length() -> usize { bytesize::kib(100u64) as usize } +/// Listening mode for the `dnstap` source. +#[configurable_component] +#[derive(Clone, Debug)] +#[serde(tag = "mode", rename_all = "snake_case")] +#[configurable(metadata(docs::enum_tag_description = "The type of dnstap socket to use."))] +#[allow(clippy::large_enum_variant)] // just used for configuration +pub enum Mode { + /// Listen on TCP. + Tcp(tcp::TcpConfig), + + /// Listen on a Unix domain socket + #[cfg(unix)] + Unix(unix::UnixConfig), +} + impl DnstapConfig { pub fn new(socket_path: PathBuf) -> Self { Self { - host_key: None, - socket_path, - ..Self::default() + mode: Mode::Unix(unix::UnixConfig::new(socket_path)), + ..Default::default() } } - fn content_type(&self) -> String { - "protobuf:dnstap.Dnstap".to_string() //content-type for framestream + fn log_namespace(&self) -> LogNamespace { + self.log_namespace.unwrap_or(false).into() + } + + fn raw_data_only(&self) -> bool { + self.raw_data_only.unwrap_or(false) } pub fn schema_definition(&self, log_namespace: LogNamespace) -> vector_lib::schema::Definition { let event_schema = DnstapEventSchema; - match log_namespace { + match self.log_namespace() { LogNamespace::Legacy => { let schema = vector_lib::schema::Definition::empty_legacy_namespace(); - if self.raw_data_only.unwrap_or(false) { + if self.raw_data_only() { if let Some(message_key) = log_schema().message_key() { return schema.with_event_field( message_key, @@ -134,7 +133,7 @@ impl DnstapConfig { ) .with_standard_vector_source_metadata(); - if self.raw_data_only.unwrap_or(false) { + if self.raw_data_only() { schema.with_event_field( &owned_value_path!("message"), Kind::bytes(), @@ -151,15 +150,18 @@ impl DnstapConfig { impl Default for DnstapConfig { fn default() -> Self { Self { - host_key: None, + #[cfg(unix)] + mode: Mode::Unix(unix::UnixConfig::default()), + #[cfg(not(unix))] + mode: Mode::Tcp(tcp::TcpConfig::from_address(std::net::SocketAddr::new( + std::net::IpAddr::V4(std::net::Ipv4Addr::new(0, 0, 0, 0)), + 9000, + ))), max_frame_length: default_max_frame_length(), - socket_path: PathBuf::from("/run/bind/dnstap.sock"), + host_key: None, raw_data_only: None, multithreaded: None, max_frame_handling_tasks: None, - socket_file_mode: None, - socket_receive_buffer_size: None, - socket_send_buffer_size: None, log_namespace: None, } } @@ -172,12 +174,32 @@ impl_generate_config_from_default!(DnstapConfig); impl SourceConfig for DnstapConfig { async fn build(&self, cx: SourceContext) -> Result { let log_namespace = cx.log_namespace(self.log_namespace); - let frame_handler = DnstapFrameHandler::new(self, log_namespace); - build_framestream_unix_source(frame_handler, cx.shutdown, cx.out) + let common_frame_handler = CommonFrameHandler::new(self, log_namespace); + match &self.mode { + Mode::Tcp(config) => { + let tls_config = config.tls().as_ref().map(|tls| tls.tls_config.clone()); + + let tls = MaybeTlsSettings::from_config(&tls_config, true)?; + let frame_handler = tcp::DnstapFrameHandler::new( + config.clone(), + tls, + common_frame_handler, + log_namespace, + ); + + build_framestream_tcp_source(frame_handler, cx.shutdown, cx.out) + } + #[cfg(unix)] + Mode::Unix(config) => { + let frame_handler = + unix::DnstapFrameHandler::new(config.clone(), common_frame_handler); + build_framestream_unix_source(frame_handler, cx.shutdown, cx.out) + } + } } fn outputs(&self, global_log_namespace: LogNamespace) -> Vec { - let log_namespace = global_log_namespace.merge(self.log_namespace); + let log_namespace = global_log_namespace.merge(Some(self.log_namespace())); let schema_definition = self .schema_definition(log_namespace) .with_standard_vector_source_metadata(); @@ -190,16 +212,12 @@ impl SourceConfig for DnstapConfig { } #[derive(Clone)] -pub struct DnstapFrameHandler { +struct CommonFrameHandler { max_frame_length: usize, - socket_path: PathBuf, content_type: String, raw_data_only: bool, multithreaded: bool, max_frame_handling_tasks: u32, - socket_file_mode: Option, - socket_receive_buffer_size: Option, - socket_send_buffer_size: Option, host_key: Option, timestamp_key: Option, source_type_key: Option, @@ -207,7 +225,7 @@ pub struct DnstapFrameHandler { log_namespace: LogNamespace, } -impl DnstapFrameHandler { +impl CommonFrameHandler { pub fn new(config: &DnstapConfig, log_namespace: LogNamespace) -> Self { let source_type_key = log_schema().source_type_key(); let timestamp_key = log_schema().timestamp_key(); @@ -219,14 +237,10 @@ impl DnstapFrameHandler { Self { max_frame_length: config.max_frame_length, - socket_path: config.socket_path.clone(), - content_type: config.content_type(), + content_type: "protobuf:dnstap.Dnstap".to_string(), raw_data_only: config.raw_data_only.unwrap_or(false), multithreaded: config.multithreaded.unwrap_or(false), max_frame_handling_tasks: config.max_frame_handling_tasks.unwrap_or(1000), - socket_file_mode: config.socket_file_mode, - socket_receive_buffer_size: config.socket_receive_buffer_size, - socket_send_buffer_size: config.socket_send_buffer_size, host_key, timestamp_key: timestamp_key.cloned(), source_type_key: source_type_key.cloned(), @@ -236,7 +250,7 @@ impl DnstapFrameHandler { } } -impl FrameHandler for DnstapFrameHandler { +impl FrameHandler for CommonFrameHandler { fn content_type(&self) -> String { self.content_type.clone() } @@ -245,11 +259,11 @@ impl FrameHandler for DnstapFrameHandler { self.max_frame_length } - /** - * Function to pass into util::framestream::build_framestream_unix_source - * Takes a data frame from the unix socket and turns it into a Vector Event. - **/ - fn handle_event(&self, received_from: Option, frame: Bytes) -> Option { + fn handle_event( + &self, + received_from: Option, + frame: vrl::prelude::Bytes, + ) -> Option { self.bytes_received.emit(ByteSize(frame.len())); let mut log_event = LogEvent::default(); @@ -276,12 +290,6 @@ impl FrameHandler for DnstapFrameHandler { return None; } - emit!(SocketEventsReceived { - mode: SocketMode::Unix, - byte_size: log_event.estimated_json_encoded_size_of(), - count: 1 - }); - if self.log_namespace == LogNamespace::Vector { // The timestamp is inserted by the parser which caters for the Legacy namespace. self.log_namespace.insert_vector_metadata( @@ -302,10 +310,6 @@ impl FrameHandler for DnstapFrameHandler { Some(Event::from(log_event)) } - fn socket_path(&self) -> PathBuf { - self.socket_path.clone() - } - fn multithreaded(&self) -> bool { self.multithreaded } @@ -314,33 +318,23 @@ impl FrameHandler for DnstapFrameHandler { self.max_frame_handling_tasks } - fn socket_file_mode(&self) -> Option { - self.socket_file_mode - } - - fn socket_receive_buffer_size(&self) -> Option { - self.socket_receive_buffer_size - } - - fn socket_send_buffer_size(&self) -> Option { - self.socket_send_buffer_size - } - - fn host_key(&self) -> &Option { + fn host_key(&self) -> &Option { &self.host_key } - fn source_type_key(&self) -> Option<&OwnedValuePath> { - self.source_type_key.as_ref() + fn timestamp_key(&self) -> Option<&vrl::path::OwnedValuePath> { + self.timestamp_key.as_ref() } - fn timestamp_key(&self) -> Option<&OwnedValuePath> { - self.timestamp_key.as_ref() + fn source_type_key(&self) -> Option<&vrl::path::OwnedValuePath> { + self.source_type_key.as_ref() } } #[cfg(test)] mod tests { + use vector_lib::event::{Event, LogEvent}; + use super::*; #[test] @@ -412,6 +406,10 @@ mod integration_tests { use futures::StreamExt; use serde_json::json; use tokio::time; + use vector_lib::event::Event; + use vector_lib::lookup::lookup_v2::OptionalValuePath; + + use self::unix::UnixConfig; use super::*; use crate::{ @@ -431,15 +429,17 @@ mod integration_tests { let socket = get_socket(raw_data, query_type); DnstapConfig { + mode: Mode::Unix(UnixConfig { + socket_path: socket, + socket_file_mode: Some(511), + socket_receive_buffer_size: Some(10485760), + socket_send_buffer_size: Some(10485760), + }), max_frame_length: 102400, host_key: Some(OptionalValuePath::from(owned_value_path!("key"))), - socket_path: socket, raw_data_only: Some(raw_data), multithreaded: Some(false), max_frame_handling_tasks: Some(100000), - socket_file_mode: Some(511), - socket_receive_buffer_size: Some(10485760), - socket_send_buffer_size: Some(10485760), log_namespace: None, } .build(SourceContext::new_test(sender, None)) diff --git a/src/sources/dnstap/tcp.rs b/src/sources/dnstap/tcp.rs new file mode 100644 index 0000000000000..f45dab66f1749 --- /dev/null +++ b/src/sources/dnstap/tcp.rs @@ -0,0 +1,279 @@ +use ipnet::IpNet; +use std::time::Duration; + +use bytes::Bytes; +use serde_with::serde_as; +use vector_lib::configurable::configurable_component; +use vector_lib::ipallowlist::IpAllowlistConfig; +use vector_lib::lookup::{owned_value_path, path}; +use vector_lib::tcp::TcpKeepaliveConfig; +use vector_lib::tls::{CertificateMetadata, MaybeTlsSettings, TlsSourceConfig}; +use vector_lib::EstimatedJsonEncodedSizeOf; +use vrl::path::OwnedValuePath; +use vrl::value::ObjectMap; + +use crate::internal_events::{SocketEventsReceived, SocketMode}; +use crate::sources::util::framestream::{FrameHandler, TcpFrameHandler}; +use crate::{event::Event, sources::util::net::SocketListenAddr}; + +use vector_lib::config::{LegacyKey, LogNamespace}; +use vector_lib::lookup::lookup_v2::OptionalValuePath; + +/// TCP configuration for the `dnstap` source. +#[serde_as] +#[configurable_component] +#[derive(Clone, Debug)] +pub struct TcpConfig { + #[configurable(derived)] + address: SocketListenAddr, + + #[configurable(derived)] + keepalive: Option, + + /// The timeout before a connection is forcefully closed during shutdown. + #[serde(default = "default_shutdown_timeout_secs")] + #[serde_as(as = "serde_with::DurationSeconds")] + #[configurable(metadata(docs::human_name = "Shutdown Timeout"))] + shutdown_timeout_secs: Duration, + + /// Overrides the name of the log field used to add the peer host's port to each event. + /// + /// The value will be the peer host's port i.e. `9000`. + /// + /// By default, `"port"` is used. + /// + /// Set to `""` to suppress this key. + #[serde(default = "default_port_key")] + pub port_key: OptionalValuePath, + + /// List of allowed origin IP networks + /// + /// By default, all origins are allowed + permit_origin: Option, + + #[configurable(derived)] + tls: Option, + + /// The size of the receive buffer used for each connection. + #[configurable(metadata(docs::type_unit = "bytes"))] + receive_buffer_bytes: Option, + + /// Maximum duration to keep each connection open. Connections open for longer than this duration are closed. + /// + /// This is helpful for load balancing long-lived connections. + #[configurable(metadata(docs::type_unit = "seconds"))] + max_connection_duration_secs: Option, + + /// The maximum number of TCP connections that are allowed at any given time. + #[configurable(metadata(docs::type_unit = "connections"))] + pub connection_limit: Option, +} + +const fn default_shutdown_timeout_secs() -> Duration { + Duration::from_secs(30) +} + +fn default_port_key() -> OptionalValuePath { + OptionalValuePath::from(owned_value_path!("port")) +} + +impl TcpConfig { + pub fn from_address(address: SocketListenAddr) -> Self { + Self { + address, + keepalive: None, + shutdown_timeout_secs: default_shutdown_timeout_secs(), + port_key: default_port_key(), + permit_origin: None, + tls: None, + receive_buffer_bytes: None, + max_connection_duration_secs: None, + connection_limit: None, + } + } + + pub const fn port_key(&self) -> &OptionalValuePath { + &self.port_key + } + + pub const fn tls(&self) -> &Option { + &self.tls + } + + pub const fn address(&self) -> SocketListenAddr { + self.address + } + + pub const fn keepalive(&self) -> Option { + self.keepalive + } + + pub const fn shutdown_timeout_secs(&self) -> Duration { + self.shutdown_timeout_secs + } + + pub const fn receive_buffer_bytes(&self) -> Option { + self.receive_buffer_bytes + } + + pub const fn max_connection_duration_secs(&self) -> Option { + self.max_connection_duration_secs + } +} + +#[derive(Clone)] +pub struct DnstapFrameHandler { + frame_handler: T, + address: SocketListenAddr, + keepalive: Option, + shutdown_timeout_secs: Duration, + tls: MaybeTlsSettings, + tls_client_metadata_key: Option, + tls_client_metadata: Option, + receive_buffer_bytes: Option, + max_connection_duration_secs: Option, + max_connections: Option, + allowlist: Option>, + log_namespace: LogNamespace, +} + +impl DnstapFrameHandler { + pub fn new( + config: TcpConfig, + tls: MaybeTlsSettings, + frame_handler: T, + log_namespace: LogNamespace, + ) -> Self { + let tls_client_metadata_key = config + .tls() + .as_ref() + .and_then(|tls| tls.client_metadata_key.clone()) + .and_then(|k| k.path); + + Self { + frame_handler, + address: config.address, + keepalive: config.keepalive, + shutdown_timeout_secs: config.shutdown_timeout_secs, + tls, + tls_client_metadata_key, + tls_client_metadata: None, + receive_buffer_bytes: config.receive_buffer_bytes, + max_connection_duration_secs: config.max_connection_duration_secs, + max_connections: config.connection_limit, + allowlist: config + .permit_origin + .map(|p| p.0.iter().map(|net| net.0).collect()), + log_namespace, + } + } +} + +impl FrameHandler for DnstapFrameHandler { + fn content_type(&self) -> String { + self.frame_handler.content_type() + } + + fn max_frame_length(&self) -> usize { + self.frame_handler.max_frame_length() + } + + /** + * Function to pass into util::framestream::build_framestream_unix_source + * Takes a data frame from the unix socket and turns it into a Vector Event. + **/ + fn handle_event(&self, received_from: Option, frame: Bytes) -> Option { + self.frame_handler + .handle_event(received_from, frame) + .map(|mut event| { + if let Event::Log(mut log_event) = event { + if let Some(tls_client_metadata) = &self.tls_client_metadata { + self.log_namespace.insert_source_metadata( + super::DnstapConfig::NAME, + &mut log_event, + self.tls_client_metadata_key + .as_ref() + .map(LegacyKey::Overwrite), + path!("tls_client_metadata"), + tls_client_metadata.clone(), + ); + } + + emit!(SocketEventsReceived { + mode: SocketMode::Tcp, + byte_size: log_event.estimated_json_encoded_size_of(), + count: 1 + }); + + event = Event::from(log_event); + } + event + }) + } + + fn multithreaded(&self) -> bool { + self.frame_handler.multithreaded() + } + + fn max_frame_handling_tasks(&self) -> u32 { + self.frame_handler.max_frame_handling_tasks() + } + + fn host_key(&self) -> &Option { + self.frame_handler.host_key() + } + + fn source_type_key(&self) -> Option<&OwnedValuePath> { + self.frame_handler.source_type_key() + } + + fn timestamp_key(&self) -> Option<&OwnedValuePath> { + self.frame_handler.timestamp_key() + } +} + +impl TcpFrameHandler for DnstapFrameHandler { + fn address(&self) -> SocketListenAddr { + self.address + } + + fn keepalive(&self) -> Option { + self.keepalive + } + + fn shutdown_timeout_secs(&self) -> Duration { + self.shutdown_timeout_secs + } + + fn tls(&self) -> MaybeTlsSettings { + self.tls.clone() + } + + fn tls_client_metadata_key(&self) -> Option { + self.tls_client_metadata_key.clone() + } + + fn receive_buffer_bytes(&self) -> Option { + self.receive_buffer_bytes + } + + fn max_connection_duration_secs(&self) -> Option { + self.max_connection_duration_secs + } + + fn max_connections(&self) -> Option { + self.max_connections + } + + fn insert_tls_client_metadata(&mut self, metadata: Option) { + self.tls_client_metadata = metadata.map(|c| { + let mut metadata = ObjectMap::new(); + metadata.insert("subject".into(), c.subject().into()); + metadata + }); + } + + fn allowed_origins(&self) -> Option<&[IpNet]> { + self.allowlist.as_deref() + } +} diff --git a/src/sources/dnstap/unix.rs b/src/sources/dnstap/unix.rs new file mode 100644 index 0000000000000..80229f715bd5b --- /dev/null +++ b/src/sources/dnstap/unix.rs @@ -0,0 +1,153 @@ +use std::path::PathBuf; + +use bytes::Bytes; +use vector_lib::configurable::configurable_component; +use vector_lib::lookup::OwnedValuePath; + +use crate::sources::util::framestream::FrameHandler; +use crate::{ + event::Event, + internal_events::{SocketEventsReceived, SocketMode}, + sources::util::framestream::UnixFrameHandler, +}; + +pub use super::schema::DnstapEventSchema; +use vector_lib::EstimatedJsonEncodedSizeOf; + +/// Unix domain socket configuration for the `dnstap` source. +#[configurable_component] +#[derive(Clone, Debug)] +#[serde(deny_unknown_fields)] +pub struct UnixConfig { + /// Absolute path to the socket file to read DNSTAP data from. + /// + /// The DNS server must be configured to send its DNSTAP data to this socket file. The socket file is created + /// if it doesn't already exist when the source first starts. + pub socket_path: PathBuf, + + /// Unix file mode bits to be applied to the unix socket file as its designated file permissions. + /// + /// Note: The file mode value can be specified in any numeric format supported by your configuration + /// language, but it is most intuitive to use an octal number. + pub socket_file_mode: Option, + + /// The size, in bytes, of the receive buffer used for the socket. + /// + /// This should not typically needed to be changed. + #[configurable(metadata(docs::type_unit = "bytes"))] + pub socket_receive_buffer_size: Option, + + /// The size, in bytes, of the send buffer used for the socket. + /// + /// This should not typically needed to be changed. + #[configurable(metadata(docs::type_unit = "bytes"))] + pub socket_send_buffer_size: Option, +} + +impl UnixConfig { + pub fn new(socket_path: PathBuf) -> Self { + Self { + socket_path, + ..Self::default() + } + } +} + +impl Default for UnixConfig { + fn default() -> Self { + Self { + socket_path: PathBuf::from("/run/bind/dnstap.sock"), + socket_file_mode: None, + socket_receive_buffer_size: None, + socket_send_buffer_size: None, + } + } +} + +#[derive(Clone)] +pub struct DnstapFrameHandler { + frame_handler: T, + socket_path: PathBuf, + socket_file_mode: Option, + socket_receive_buffer_size: Option, + socket_send_buffer_size: Option, +} + +impl DnstapFrameHandler { + pub fn new(config: UnixConfig, frame_handler: T) -> Self { + Self { + frame_handler, + socket_path: config.socket_path.clone(), + socket_file_mode: config.socket_file_mode, + socket_receive_buffer_size: config.socket_receive_buffer_size, + socket_send_buffer_size: config.socket_send_buffer_size, + } + } +} + +impl FrameHandler for DnstapFrameHandler { + fn content_type(&self) -> String { + self.frame_handler.content_type() + } + + fn max_frame_length(&self) -> usize { + self.frame_handler.max_frame_length() + } + + /** + * Function to pass into util::framestream::build_framestream_unix_source + * Takes a data frame from the unix socket and turns it into a Vector Event. + **/ + fn handle_event(&self, received_from: Option, frame: Bytes) -> Option { + self.frame_handler + .handle_event(received_from, frame) + .map(|event| { + if let Event::Log(ref log_event) = event { + emit!(SocketEventsReceived { + mode: SocketMode::Unix, + byte_size: log_event.estimated_json_encoded_size_of(), + count: 1 + }) + } + event + }) + } + + fn multithreaded(&self) -> bool { + self.frame_handler.multithreaded() + } + + fn max_frame_handling_tasks(&self) -> u32 { + self.frame_handler.max_frame_handling_tasks() + } + + fn host_key(&self) -> &Option { + self.frame_handler.host_key() + } + + fn source_type_key(&self) -> Option<&OwnedValuePath> { + self.frame_handler.source_type_key() + } + + fn timestamp_key(&self) -> Option<&OwnedValuePath> { + self.frame_handler.timestamp_key() + } +} + +impl UnixFrameHandler for DnstapFrameHandler { + fn socket_path(&self) -> PathBuf { + self.socket_path.clone() + } + + fn socket_file_mode(&self) -> Option { + self.socket_file_mode + } + + fn socket_receive_buffer_size(&self) -> Option { + self.socket_receive_buffer_size + } + + fn socket_send_buffer_size(&self) -> Option { + self.socket_send_buffer_size + } +} diff --git a/src/sources/fluent/mod.rs b/src/sources/fluent/mod.rs index a5d4432565813..023b7cc17e0a1 100644 --- a/src/sources/fluent/mod.rs +++ b/src/sources/fluent/mod.rs @@ -110,6 +110,7 @@ impl SourceConfig for FluentConfig { cx, self.acknowledgements, self.connection_limit, + None, FluentConfig::NAME, log_namespace, ) diff --git a/src/sources/logstash.rs b/src/sources/logstash.rs index 7545af923c3f5..37a0bdc9a9849 100644 --- a/src/sources/logstash.rs +++ b/src/sources/logstash.rs @@ -162,6 +162,7 @@ impl SourceConfig for LogstashConfig { cx, self.acknowledgements, self.connection_limit, + None, LogstashConfig::NAME, log_namespace, ) diff --git a/src/sources/mod.rs b/src/sources/mod.rs index 6526666d827b8..03d67ef3970fc 100644 --- a/src/sources/mod.rs +++ b/src/sources/mod.rs @@ -17,7 +17,7 @@ pub mod aws_sqs; pub mod datadog_agent; #[cfg(feature = "sources-demo_logs")] pub mod demo_logs; -#[cfg(all(unix, feature = "sources-dnstap"))] +#[cfg(feature = "sources-dnstap")] pub mod dnstap; #[cfg(feature = "sources-docker_logs")] pub mod docker_logs; diff --git a/src/sources/socket/mod.rs b/src/sources/socket/mod.rs index d1133dfd82a30..03cae75acd390 100644 --- a/src/sources/socket/mod.rs +++ b/src/sources/socket/mod.rs @@ -145,6 +145,9 @@ impl SourceConfig for SocketConfig { cx, false.into(), config.connection_limit, + config + .permit_origin + .map(|p| p.0.iter().map(|net| net.0).collect()), SocketConfig::NAME, log_namespace, ) diff --git a/src/sources/socket/tcp.rs b/src/sources/socket/tcp.rs index fe05930342872..1b917df06533a 100644 --- a/src/sources/socket/tcp.rs +++ b/src/sources/socket/tcp.rs @@ -1,4 +1,5 @@ use std::time::Duration; +use vector_lib::ipallowlist::IpAllowlistConfig; use chrono::Utc; use serde_with::serde_as; @@ -58,6 +59,11 @@ pub struct TcpConfig { #[serde(default = "default_port_key")] port_key: OptionalValuePath, + /// List of allowed origin IP networks + /// + /// By default, all origins are allowed + pub permit_origin: Option, + #[configurable(derived)] tls: Option, @@ -104,6 +110,7 @@ impl TcpConfig { shutdown_timeout_secs: default_shutdown_timeout_secs(), host_key: default_host_key(), port_key: default_port_key(), + permit_origin: None, tls: None, receive_buffer_bytes: None, max_connection_duration_secs: None, diff --git a/src/sources/statsd/mod.rs b/src/sources/statsd/mod.rs index 70b72eb941f79..e00b944bd4731 100644 --- a/src/sources/statsd/mod.rs +++ b/src/sources/statsd/mod.rs @@ -168,6 +168,7 @@ impl SourceConfig for StatsdConfig { cx, false.into(), config.connection_limit, + None, StatsdConfig::NAME, LogNamespace::Legacy, ) diff --git a/src/sources/syslog.rs b/src/sources/syslog.rs index 645be9efdfd03..0667571d10814 100644 --- a/src/sources/syslog.rs +++ b/src/sources/syslog.rs @@ -200,6 +200,7 @@ impl SourceConfig for SyslogConfig { cx, false.into(), connection_limit, + None, SyslogConfig::NAME, log_namespace, ) diff --git a/src/sources/util/framestream.rs b/src/sources/util/framestream.rs index 8571f2d1a4458..da1c395d84c85 100644 --- a/src/sources/util/framestream.rs +++ b/src/sources/util/framestream.rs @@ -1,9 +1,11 @@ +use ipnet::IpNet; #[cfg(unix)] use std::os::unix::{fs::PermissionsExt, io::AsRawFd}; use std::{ convert::TryInto, fs, marker::{Send, Sync}, + net::SocketAddr, path::PathBuf, sync::{ atomic::{AtomicU32, Ordering}, @@ -20,26 +22,50 @@ use futures::{ sink::{Sink, SinkExt}, stream::{self, StreamExt, TryStreamExt}, }; -use tokio::{self, net::UnixListener, task::JoinHandle}; +use futures_util::{future::BoxFuture, Future, FutureExt}; +use listenfd::ListenFd; +use tokio::{ + self, + io::{AsyncRead, AsyncWrite}, + net::{TcpStream, UnixListener}, + task::JoinHandle, + time::sleep, +}; use tokio_stream::wrappers::UnixListenerStream; use tokio_util::codec::{length_delimited, Framed}; -use tracing::{field, Instrument}; -use vector_lib::lookup::OwnedValuePath; +use tracing::{field, Instrument, Span}; +use vector_lib::{ + lookup::OwnedValuePath, + tcp::TcpKeepaliveConfig, + tls::{CertificateMetadata, MaybeTlsIncomingStream, MaybeTlsSettings}, +}; use crate::{ event::Event, - internal_events::{UnixSocketError, UnixSocketFileDeleteError}, + internal_events::{ + ConnectionOpen, OpenGauge, SocketBindError, SocketMode, SocketReceiveError, + TcpBytesReceived, TcpSocketError, TcpSocketTlsConnectionError, UnixSocketError, + UnixSocketFileDeleteError, + }, shutdown::ShutdownSignal, - sources::Source, + sources::{ + util::{ + net::{try_bind_tcp_listener, MAX_IN_FLIGHT_EVENTS_TARGET}, + AfterReadExt, + }, + Source, + }, SourceSender, }; +use super::net::{RequestLimiter, SocketListenAddr}; + const FSTRM_CONTROL_FRAME_LENGTH_MAX: usize = 512; const FSTRM_CONTROL_FIELD_CONTENT_TYPE_LENGTH_MAX: usize = 256; pub type FrameStreamSink = Box + Send + Unpin>; -struct FrameStreamReader { +pub struct FrameStreamReader { response_sink: Mutex, expected_content_type: String, state: FrameStreamState, @@ -342,24 +368,231 @@ pub trait FrameHandler { fn content_type(&self) -> String; fn max_frame_length(&self) -> usize; fn handle_event(&self, received_from: Option, frame: Bytes) -> Option; - fn socket_path(&self) -> PathBuf; fn multithreaded(&self) -> bool; fn max_frame_handling_tasks(&self) -> u32; - fn socket_file_mode(&self) -> Option; - fn socket_receive_buffer_size(&self) -> Option; - fn socket_send_buffer_size(&self) -> Option; fn host_key(&self) -> &Option; fn timestamp_key(&self) -> Option<&OwnedValuePath>; fn source_type_key(&self) -> Option<&OwnedValuePath>; } +pub trait UnixFrameHandler: FrameHandler { + fn socket_path(&self) -> PathBuf; + fn socket_file_mode(&self) -> Option; + fn socket_receive_buffer_size(&self) -> Option; + fn socket_send_buffer_size(&self) -> Option; +} + +pub trait TcpFrameHandler: FrameHandler { + fn address(&self) -> SocketListenAddr; + fn keepalive(&self) -> Option; + fn shutdown_timeout_secs(&self) -> Duration; + fn tls(&self) -> MaybeTlsSettings; + fn tls_client_metadata_key(&self) -> Option; + fn receive_buffer_bytes(&self) -> Option; + fn max_connection_duration_secs(&self) -> Option; + fn max_connections(&self) -> Option; + fn allowed_origins(&self) -> Option<&[IpNet]>; + fn insert_tls_client_metadata(&mut self, metadata: Option); +} + +/** + * Based off of the build_framestream_unix_source function. + * Functions similarly, just uses TCP socket instead of unix socket + **/ +pub fn build_framestream_tcp_source( + frame_handler: impl TcpFrameHandler + Send + Sync + Clone + 'static, + shutdown: ShutdownSignal, + out: SourceSender, +) -> crate::Result { + let addr = frame_handler.address(); + let tls = frame_handler.tls(); + let shutdown = shutdown.clone(); + let out = out.clone(); + + Ok(Box::pin(async move { + let listenfd = ListenFd::from_env(); + let listener = try_bind_tcp_listener( + addr, + listenfd, + &tls, + frame_handler + .allowed_origins() + .map(|origins| origins.to_vec()), + ) + .await + .map_err(|error| { + emit!(SocketBindError { + mode: SocketMode::Tcp, + error: &error, + }) + })?; + + info!( + message = "Listening.", + addr = %listener + .local_addr() + .map(SocketListenAddr::SocketAddr) + .unwrap_or(addr) + ); + + let tripwire = shutdown.clone(); + let shutdown_timeout_secs = frame_handler.shutdown_timeout_secs(); + let tripwire = async move { + _ = tripwire.await; + sleep(shutdown_timeout_secs).await; + } + .shared(); + + let connection_gauge = OpenGauge::new(); + let shutdown_clone = shutdown.clone(); + + let request_limiter = + RequestLimiter::new(MAX_IN_FLIGHT_EVENTS_TARGET, crate::num_threads()); + + listener + .accept_stream_limited(frame_handler.max_connections()) + .take_until(shutdown_clone) + .for_each(move |(connection, tcp_connection_permit)| { + let shutdown_signal = shutdown.clone(); + let tripwire = tripwire.clone(); + let out = out.clone(); + let connection_gauge = connection_gauge.clone(); + let request_limiter = request_limiter.clone(); + let frame_handler_clone = frame_handler.clone(); + + async move { + let socket = match connection { + Ok(socket) => socket, + Err(error) => { + emit!(SocketReceiveError { + mode: SocketMode::Tcp, + error: &error + }); + return; + } + }; + + let peer_addr = socket.peer_addr(); + let span = info_span!("connection", %peer_addr); + + let tripwire = tripwire + .map(move |_| { + info!( + message = "Resetting connection (still open after seconds).", + seconds = ?shutdown_timeout_secs + ); + }) + .boxed(); + + span.clone().in_scope(|| { + debug!(message = "Accepted a new connection.", peer_addr = %peer_addr); + + let open_token = + connection_gauge.open(|count| emit!(ConnectionOpen { count })); + + let fut = handle_stream( + frame_handler_clone, + shutdown_signal, + socket, + tripwire, + peer_addr, + out, + request_limiter, + ); + + tokio::spawn( + fut.map(move |()| { + drop(open_token); + drop(tcp_connection_permit); + }) + .instrument(span.or_current()), + ); + }); + } + }) + .map(Ok) + .await + })) +} + +#[allow(clippy::too_many_arguments)] +async fn handle_stream( + mut frame_handler: impl TcpFrameHandler + Send + Sync + Clone + 'static, + mut shutdown_signal: ShutdownSignal, + mut socket: MaybeTlsIncomingStream, + _tripwire: BoxFuture<'static, ()>, + peer_addr: SocketAddr, + out: SourceSender, + _request_limiter: RequestLimiter, +) { + tokio::select! { + result = socket.handshake() => { + if let Err(error) = result { + emit!(TcpSocketTlsConnectionError { error }); + return; + } + }, + _ = &mut shutdown_signal => { + return; + } + }; + + if let Some(keepalive) = frame_handler.keepalive() { + if let Err(error) = socket.set_keepalive(keepalive) { + warn!(message = "Failed configuring TCP keepalive.", %error); + } + } + + if let Some(receive_buffer_bytes) = frame_handler.receive_buffer_bytes() { + if let Err(error) = socket.set_receive_buffer_bytes(receive_buffer_bytes) { + warn!(message = "Failed configuring receive buffer size on TCP socket.", %error); + } + } + + let socket = socket.after_read(move |byte_size| { + emit!(TcpBytesReceived { + byte_size, + peer_addr + }); + }); + + let certificate_metadata = socket + .get_ref() + .ssl_stream() + .and_then(|stream| stream.ssl().peer_certificate()) + .map(CertificateMetadata::from); + + frame_handler.insert_tls_client_metadata(certificate_metadata); + + let span = info_span!("connection"); + span.record("peer_addr", &field::debug(&peer_addr)); + let received_from: Option = Some(peer_addr.to_string().into()); + let active_parsing_task_nums = Arc::new(AtomicU32::new(0)); + + build_framestream_source( + frame_handler, + socket, + received_from, + out, + shutdown_signal, + span, + active_parsing_task_nums, + move |error| { + emit!(TcpSocketError { + error: &error, + peer_addr, + }); + }, + ); +} + /** * Based off of the build_unix_source function. * Functions similarly, but uses the FrameStreamReader to deal with * framestream control packets, and responds appropriately. **/ pub fn build_framestream_unix_source( - frame_handler: impl FrameHandler + Send + Sync + Clone + 'static, + frame_handler: impl UnixFrameHandler + Send + Sync + Clone + 'static, shutdown: ShutdownSignal, out: SourceSender, ) -> crate::Result { @@ -449,9 +682,7 @@ pub fn build_framestream_unix_source( Ok(s) => s, }; let peer_addr = socket.peer_addr().ok(); - let content_type = frame_handler.content_type(); let listen_path = path.clone(); - let mut event_sink = out.clone(); let active_task_nums_ = Arc::clone(&active_parsing_task_nums); let span = info_span!("connection"); @@ -468,69 +699,21 @@ pub fn build_framestream_unix_source( let received_from: Option = path.map(|p| p.to_string_lossy().into_owned().into()); - let (sock_sink, sock_stream) = Framed::new( + build_framestream_source( + frame_handler.clone(), socket, - length_delimited::Builder::new() - .max_frame_length(frame_handler.max_frame_length()) - .new_codec(), - ) - .split(); - let mut fs_reader = FrameStreamReader::new(Box::new(sock_sink), content_type); - let frame_handler_copy = frame_handler.clone(); - let frames = sock_stream - .take_until(shutdown.clone()) - .map_err(move |error| { + received_from, + out.clone(), + shutdown.clone(), + span, + active_task_nums_, + move |error| { emit!(UnixSocketError { error: &error, path: &listen_path, }); - }) - .filter_map(move |frame| { - future::ready(match frame { - Ok(f) => fs_reader.handle_frame(Bytes::from(f)), - Err(_) => None, - }) - }); - if !frame_handler.multithreaded() { - let mut events = frames.filter_map(move |f| { - future::ready(frame_handler_copy.handle_event(received_from.clone(), f)) - }); - - let handler = async move { - if let Err(e) = event_sink.send_event_stream(&mut events).await { - error!("Error sending event: {:?}.", e); - } - - info!("Finished sending."); - }; - tokio::spawn(handler.instrument(span.or_current())); - } else { - let handler = async move { - frames - .for_each(move |f| { - future::ready({ - let max_frame_handling_tasks = - frame_handler_copy.max_frame_handling_tasks(); - let f_handler = frame_handler_copy.clone(); - let received_from_copy = received_from.clone(); - let event_sink_copy = event_sink.clone(); - let active_task_nums_copy = Arc::clone(&active_task_nums_); - - spawn_event_handling_tasks( - f, - f_handler, - event_sink_copy, - received_from_copy, - active_task_nums_copy, - max_frame_handling_tasks, - ); - }) - }) - .await; - info!("Finished sending."); - }; - tokio::spawn(handler.instrument(span.or_current())); - } + }, + ); } // Cleanup @@ -547,6 +730,79 @@ pub fn build_framestream_unix_source( Ok(Box::pin(fut)) } +#[allow(clippy::too_many_arguments)] +fn build_framestream_source( + frame_handler: impl FrameHandler + Send + Sync + Clone + 'static, + socket: impl AsyncRead + AsyncWrite + Send + 'static, + received_from: Option, + out: SourceSender, + shutdown: impl Future + Unpin + Send + 'static, + span: Span, + active_task_nums: Arc, + error_mapper: impl FnMut(std::io::Error) + Send + 'static, +) { + let content_type = frame_handler.content_type(); + let mut event_sink = out.clone(); + let (sock_sink, sock_stream) = Framed::new( + socket, + length_delimited::Builder::new() + .max_frame_length(frame_handler.max_frame_length()) + .new_codec(), + ) + .split(); + let mut fs_reader = FrameStreamReader::new(Box::new(sock_sink), content_type); + let frame_handler_copy = frame_handler.clone(); + let frames = sock_stream + .take_until(shutdown) + .map_err(error_mapper) + .filter_map(move |frame| { + future::ready(match frame { + Ok(f) => fs_reader.handle_frame(Bytes::from(f)), + Err(_) => None, + }) + }); + if !frame_handler.multithreaded() { + let mut events = frames.filter_map(move |f| { + future::ready(frame_handler_copy.handle_event(received_from.clone(), f)) + }); + + let handler = async move { + if let Err(e) = event_sink.send_event_stream(&mut events).await { + error!("Error sending event: {:?}.", e); + } + + info!("Finished sending."); + }; + tokio::spawn(handler.instrument(span.or_current())); + } else { + let handler = async move { + frames + .for_each(move |f| { + future::ready({ + let max_frame_handling_tasks = + frame_handler_copy.max_frame_handling_tasks(); + let f_handler = frame_handler_copy.clone(); + let received_from_copy = received_from.clone(); + let event_sink_copy = event_sink.clone(); + let active_task_nums_copy = Arc::clone(&active_task_nums); + + spawn_event_handling_tasks( + f, + f_handler, + event_sink_copy, + received_from_copy, + active_task_nums_copy, + max_frame_handling_tasks, + ); + }) + }) + .await; + info!("Finished sending."); + }; + tokio::spawn(handler.instrument(span.or_current())); + } +} + fn spawn_event_handling_tasks( event_data: Bytes, event_handler: impl FrameHandler + Send + Sync + 'static, @@ -579,6 +835,8 @@ fn wait_for_task_quota(active_task_nums: &Arc, max_tasks: u32) { #[cfg(test)] mod test { + use futures_util::Stream; + use std::net::SocketAddr; #[cfg(unix)] use std::{ path::PathBuf, @@ -588,6 +846,7 @@ mod test { }, thread, }; + use tokio::net::TcpStream; use bytes::{buf::Buf, Bytes, BytesMut}; use futures::{ @@ -595,6 +854,7 @@ mod test { sink::{Sink, SinkExt}, stream::{self, StreamExt}, }; + use ipnet::IpNet; use tokio::{ self, net::UnixStream, @@ -602,18 +862,26 @@ mod test { time::{Duration, Instant}, }; use tokio_util::codec::{length_delimited, Framed}; - use vector_lib::config::{LegacyKey, LogNamespace}; - use vector_lib::lookup::{owned_value_path, path, OwnedValuePath}; + use vector_lib::{ + config::{LegacyKey, LogNamespace}, + tcp::TcpKeepaliveConfig, + tls::{CertificateMetadata, MaybeTls}, + }; + use vector_lib::{ + lookup::{owned_value_path, path, OwnedValuePath}, + tls::MaybeTlsSettings, + }; use super::{ - build_framestream_unix_source, spawn_event_handling_tasks, ControlField, ControlHeader, - FrameHandler, + build_framestream_tcp_source, build_framestream_unix_source, spawn_event_handling_tasks, + ControlField, ControlHeader, FrameHandler, TcpFrameHandler, UnixFrameHandler, }; use crate::{ config::{log_schema, ComponentKey}, event::{Event, LogEvent}, shutdown::SourceShutdownCoordinator, - test_util::{collect_n, collect_n_stream}, + sources::util::net::SocketListenAddr, + test_util::{collect_n, collect_n_stream, next_addr}, SourceSender, }; @@ -621,12 +889,8 @@ mod test { struct MockFrameHandler { content_type: String, max_frame_length: usize, - socket_path: PathBuf, multithreaded: bool, max_frame_handling_tasks: u32, - socket_file_mode: Option, - socket_receive_buffer_size: Option, - socket_send_buffer_size: Option, extra_task_handling_routine: F, host_key: Option, timestamp_key: Option, @@ -634,17 +898,71 @@ mod test { log_namespace: LogNamespace, } + #[derive(Clone)] + struct MockUnixFrameHandler { + frame_handler: MockFrameHandler, + socket_path: PathBuf, + socket_file_mode: Option, + socket_receive_buffer_size: Option, + socket_send_buffer_size: Option, + } + + #[derive(Clone)] + struct MockTcpFrameHandler { + frame_handler: MockFrameHandler, + address: SocketListenAddr, + keepalive: Option, + shutdown_timeout_secs: Duration, + tls: MaybeTlsSettings, + tls_client_metadata_key: Option, + receive_buffer_bytes: Option, + max_connection_duration_secs: Option, + max_connections: Option, + permit_origin: Option>, + } + + impl MockTcpFrameHandler { + pub fn new( + addr: SocketAddr, + content_type: String, + multithreaded: bool, + extra_routine: F, + permit_origin: Option>, + ) -> Self { + Self { + frame_handler: MockFrameHandler::new(content_type, multithreaded, extra_routine), + address: addr.into(), + keepalive: None, + shutdown_timeout_secs: Duration::from_secs(30), + tls: MaybeTls::Raw(()), + tls_client_metadata_key: None, + receive_buffer_bytes: None, + max_connection_duration_secs: None, + max_connections: None, + permit_origin, + } + } + } + + impl MockUnixFrameHandler { + pub fn new(content_type: String, multithreaded: bool, extra_routine: F) -> Self { + Self { + frame_handler: MockFrameHandler::new(content_type, multithreaded, extra_routine), + socket_path: tempfile::tempdir().unwrap().into_path().join("unix_test"), + socket_file_mode: None, + socket_receive_buffer_size: None, + socket_send_buffer_size: None, + } + } + } + impl MockFrameHandler { pub fn new(content_type: String, multithreaded: bool, extra_routine: F) -> Self { Self { content_type, max_frame_length: bytesize::kib(100u64) as usize, - socket_path: tempfile::tempdir().unwrap().into_path().join("unix_test"), multithreaded, max_frame_handling_tasks: 0, - socket_file_mode: None, - socket_receive_buffer_size: None, - socket_send_buffer_size: None, extra_task_handling_routine: extra_routine, host_key: Some(owned_value_path!("test_framestream")), timestamp_key: Some(owned_value_path!("my_timestamp")), @@ -684,9 +1002,6 @@ mod test { Some(log_event.into()) } - fn socket_path(&self) -> PathBuf { - self.socket_path.clone() - } fn multithreaded(&self) -> bool { self.multithreaded } @@ -694,6 +1009,58 @@ mod test { self.max_frame_handling_tasks } + fn host_key(&self) -> &Option { + &self.host_key + } + + fn timestamp_key(&self) -> Option<&OwnedValuePath> { + self.timestamp_key.as_ref() + } + + fn source_type_key(&self) -> Option<&OwnedValuePath> { + self.source_type_key.as_ref() + } + } + + impl FrameHandler for MockUnixFrameHandler { + fn content_type(&self) -> String { + self.frame_handler.content_type() + } + + fn max_frame_length(&self) -> usize { + self.frame_handler.max_frame_length() + } + + fn handle_event(&self, received_from: Option, frame: Bytes) -> Option { + self.frame_handler.handle_event(received_from, frame) + } + + fn multithreaded(&self) -> bool { + self.frame_handler.multithreaded() + } + + fn max_frame_handling_tasks(&self) -> u32 { + self.frame_handler.max_frame_handling_tasks() + } + + fn host_key(&self) -> &Option { + self.frame_handler.host_key() + } + + fn timestamp_key(&self) -> Option<&OwnedValuePath> { + self.frame_handler.timestamp_key() + } + + fn source_type_key(&self) -> Option<&OwnedValuePath> { + self.frame_handler.source_type_key() + } + } + + impl UnixFrameHandler for MockUnixFrameHandler { + fn socket_path(&self) -> PathBuf { + self.socket_path.clone() + } + fn socket_file_mode(&self) -> Option { self.socket_file_mode } @@ -705,23 +1072,106 @@ mod test { fn socket_send_buffer_size(&self) -> Option { self.socket_send_buffer_size } + } + + impl FrameHandler for MockTcpFrameHandler { + fn content_type(&self) -> String { + self.frame_handler.content_type() + } + + fn max_frame_length(&self) -> usize { + self.frame_handler.max_frame_length() + } + + fn handle_event(&self, received_from: Option, frame: Bytes) -> Option { + self.frame_handler.handle_event(received_from, frame) + } + + fn multithreaded(&self) -> bool { + self.frame_handler.multithreaded() + } + + fn max_frame_handling_tasks(&self) -> u32 { + self.frame_handler.max_frame_handling_tasks() + } fn host_key(&self) -> &Option { - &self.host_key + self.frame_handler.host_key() } fn timestamp_key(&self) -> Option<&OwnedValuePath> { - self.timestamp_key.as_ref() + self.frame_handler.timestamp_key() } fn source_type_key(&self) -> Option<&OwnedValuePath> { - self.source_type_key.as_ref() + self.frame_handler.source_type_key() } } + impl TcpFrameHandler for MockTcpFrameHandler { + fn address(&self) -> SocketListenAddr { + self.address + } + + fn keepalive(&self) -> Option { + self.keepalive + } + + fn shutdown_timeout_secs(&self) -> Duration { + self.shutdown_timeout_secs + } + + fn tls(&self) -> MaybeTlsSettings { + self.tls.clone() + } + + fn tls_client_metadata_key(&self) -> Option { + self.tls_client_metadata_key.clone() + } + + fn receive_buffer_bytes(&self) -> Option { + self.receive_buffer_bytes + } + + fn max_connection_duration_secs(&self) -> Option { + self.max_connection_duration_secs + } + + fn max_connections(&self) -> Option { + self.max_connections + } + + fn insert_tls_client_metadata(&mut self, _: Option) {} + + fn allowed_origins(&self) -> Option<&[IpNet]> { + self.permit_origin.as_deref() + } + } + + fn init_framestream_tcp( + source_id: &str, + addr: &SocketAddr, + frame_handler: impl TcpFrameHandler + Send + Sync + Clone + 'static, + pipeline: SourceSender, + ) -> (JoinHandle>, SourceShutdownCoordinator) { + let source_id = ComponentKey::from(source_id); + let mut shutdown = SourceShutdownCoordinator::default(); + let (shutdown_signal, _) = shutdown.register_source(&source_id, false); + let server = build_framestream_tcp_source(frame_handler, shutdown_signal, pipeline) + .expect("Failed to build framestream tcp source."); + + let join_handle = tokio::spawn(server); + + while std::net::TcpStream::connect(addr).is_err() { + thread::sleep(Duration::from_millis(2)); + } + + (join_handle, shutdown) + } + fn init_framestream_unix( source_id: &str, - frame_handler: impl FrameHandler + Send + Sync + Clone + 'static, + frame_handler: impl UnixFrameHandler + Send + Sync + Clone + 'static, pipeline: SourceSender, ) -> ( PathBuf, @@ -745,6 +1195,13 @@ mod test { (socket_path, join_handle, shutdown) } + async fn make_tcp_stream( + addr: SocketAddr, + ) -> Framed { + let socket = TcpStream::connect(&addr).await.unwrap(); + Framed::new(socket, length_delimited::Builder::new().new_codec()) + } + async fn make_unix_stream( path: PathBuf, ) -> Framed { @@ -806,8 +1263,22 @@ mod test { assert_eq!(&frame[..], &expected_content_type[..]); } - fn create_frame_handler(multithreaded: bool) -> impl FrameHandler + Send + Sync + Clone { - MockFrameHandler::new("test_content".to_string(), multithreaded, move || {}) + fn create_frame_handler(multithreaded: bool) -> impl UnixFrameHandler + Send + Sync + Clone { + MockUnixFrameHandler::new("test_content".to_string(), multithreaded, move || {}) + } + + fn create_tcp_frame_handler( + addr: SocketAddr, + multithreaded: bool, + permit_origin: Option>, + ) -> impl TcpFrameHandler + Send + Sync + Clone { + MockTcpFrameHandler::new( + addr, + "test_content".to_string(), + multithreaded, + move || {}, + permit_origin, + ) } async fn signal_shutdown(source_name: &str, shutdown: &mut SourceShutdownCoordinator) { @@ -819,64 +1290,18 @@ mod test { assert!(shutdown_success); } - #[tokio::test(flavor = "multi_thread")] - async fn normal_framestream_singlethreaded() { - let source_name = "test_source"; - let (tx, rx) = SourceSender::new_test(); - let (path, source_handle, mut shutdown) = - init_framestream_unix(source_name, create_frame_handler(false), tx); - let (mut sock_sink, mut sock_stream) = make_unix_stream(path).await.split(); - - //1 - send READY frame (with content_type) - let content_type = Bytes::from(&b"test_content"[..]); - let ready_msg = - create_control_frame_with_content(ControlHeader::Ready, vec![content_type.clone()]); - send_control_frame(&mut sock_sink, ready_msg).await; - - //2 - wait for ACCEPT frame - let mut frame_vec = collect_n_stream(&mut sock_stream, 2).await; - //take second element, because first will be empty (signifying control frame) - assert_eq!(frame_vec[0].as_ref().unwrap().len(), 0); - assert_accept_frame(frame_vec[1].as_mut().unwrap(), content_type); - - //3 - send START frame - send_control_frame(&mut sock_sink, create_control_frame(ControlHeader::Start)).await; - - //4 - send data - send_data_frames( - &mut sock_sink, - vec![Ok(Bytes::from("hello")), Ok(Bytes::from("world"))], - ) - .await; - let events = collect_n(rx, 2).await; - - //5 - send STOP frame - send_control_frame(&mut sock_sink, create_control_frame(ControlHeader::Stop)).await; - - assert_eq!( - events[0].as_log()[log_schema().message_key().unwrap().to_string()], - "hello".into(), - ); - assert_eq!( - events[1].as_log()[log_schema().message_key().unwrap().to_string()], - "world".into(), - ); - - drop(sock_stream); //explicitly drop the stream so we don't get warnings about not using it - - // Ensure source actually shut down successfully. - signal_shutdown(source_name, &mut shutdown).await; - _ = source_handle.await.unwrap(); - } - - #[tokio::test(flavor = "multi_thread")] - async fn normal_framestream_multithreaded() { - let source_name = "test_source"; - let (tx, rx) = SourceSender::new_test(); - let (path, source_handle, mut shutdown) = - init_framestream_unix(source_name, create_frame_handler(true), tx); - let (mut sock_sink, mut sock_stream) = make_unix_stream(path).await.split(); - + async fn test_normal_framestream< + T: Sink + Unpin, + U: Stream> + Unpin, + V: Stream + Unpin, + >( + source_name: &str, + mut sock_sink: T, + mut sock_stream: U, + rx: V, + mut shutdown: SourceShutdownCoordinator, + source_handle: JoinHandle>, + ) { //1 - send READY frame (with content_type) let content_type = Bytes::from(&b"test_content"[..]); let ready_msg = @@ -918,14 +1343,16 @@ mod test { _ = source_handle.await.unwrap(); } - #[tokio::test(flavor = "multi_thread")] - async fn multiple_content_types() { - let source_name = "test_source"; - let (tx, _) = SourceSender::new_test(); - let (path, source_handle, mut shutdown) = - init_framestream_unix(source_name, create_frame_handler(false), tx); - let (mut sock_sink, mut sock_stream) = make_unix_stream(path).await.split(); - + async fn test_multiple_content_types< + T: Sink + Unpin, + U: Stream> + Unpin, + >( + source_name: &str, + mut sock_sink: T, + mut sock_stream: U, + mut shutdown: SourceShutdownCoordinator, + source_handle: JoinHandle>, + ) { //1 - send READY frame (with content_type) let content_type = Bytes::from(&b"test_content"[..]); let ready_msg = create_control_frame_with_content( @@ -948,6 +1375,146 @@ mod test { _ = source_handle.await.unwrap(); } + #[tokio::test(flavor = "multi_thread")] + #[should_panic] + async fn blocked_framestream_tcp() { + let source_name = "test_source"; + let (tx, rx) = SourceSender::new_test(); + let addr = next_addr(); + let (source_handle, shutdown) = init_framestream_tcp( + source_name, + &addr, + create_tcp_frame_handler(addr, false, Some(vec![])), + tx, + ); + let (sock_sink, sock_stream) = make_tcp_stream(addr).await.split(); + + test_normal_framestream( + source_name, + sock_sink, + sock_stream, + rx, + shutdown, + source_handle, + ) + .await; + } + + #[tokio::test(flavor = "multi_thread")] + async fn normal_framestream_singlethreaded_tcp() { + let source_name = "test_source"; + let (tx, rx) = SourceSender::new_test(); + let addr = next_addr(); + let (source_handle, shutdown) = init_framestream_tcp( + source_name, + &addr, + create_tcp_frame_handler(addr, false, None), + tx, + ); + let (sock_sink, sock_stream) = make_tcp_stream(addr).await.split(); + + test_normal_framestream( + source_name, + sock_sink, + sock_stream, + rx, + shutdown, + source_handle, + ) + .await; + } + + #[tokio::test(flavor = "multi_thread")] + async fn normal_framestream_singlethreaded_unix() { + let source_name = "test_source"; + let (tx, rx) = SourceSender::new_test(); + let (path, source_handle, shutdown) = + init_framestream_unix(source_name, create_frame_handler(false), tx); + let (sock_sink, sock_stream) = make_unix_stream(path).await.split(); + + test_normal_framestream( + source_name, + sock_sink, + sock_stream, + rx, + shutdown, + source_handle, + ) + .await; + } + + #[tokio::test(flavor = "multi_thread")] + async fn normal_framestream_multithreaded_tcp() { + let source_name = "test_source"; + let (tx, rx) = SourceSender::new_test(); + let addr = next_addr(); + let (source_handle, shutdown) = init_framestream_tcp( + source_name, + &addr, + create_tcp_frame_handler(addr, true, None), + tx, + ); + let (sock_sink, sock_stream) = make_tcp_stream(addr).await.split(); + + test_normal_framestream( + source_name, + sock_sink, + sock_stream, + rx, + shutdown, + source_handle, + ) + .await; + } + + #[tokio::test(flavor = "multi_thread")] + async fn normal_framestream_multithreaded_unix() { + let source_name = "test_source"; + let (tx, rx) = SourceSender::new_test(); + let (path, source_handle, shutdown) = + init_framestream_unix(source_name, create_frame_handler(true), tx); + let (sock_sink, sock_stream) = make_unix_stream(path).await.split(); + + test_normal_framestream( + source_name, + sock_sink, + sock_stream, + rx, + shutdown, + source_handle, + ) + .await; + } + + #[tokio::test(flavor = "multi_thread")] + async fn multiple_content_types_tcp() { + let source_name = "test_source"; + let (tx, _) = SourceSender::new_test(); + let addr = next_addr(); + let (source_handle, shutdown) = init_framestream_tcp( + source_name, + &addr, + create_tcp_frame_handler(addr, false, None), + tx, + ); + let (sock_sink, sock_stream) = make_tcp_stream(addr).await.split(); + + test_multiple_content_types(source_name, sock_sink, sock_stream, shutdown, source_handle) + .await; + } + + #[tokio::test(flavor = "multi_thread")] + async fn multiple_content_types_unix() { + let source_name = "test_source"; + let (tx, _) = SourceSender::new_test(); + let (path, source_handle, shutdown) = + init_framestream_unix(source_name, create_frame_handler(false), tx); + let (sock_sink, sock_stream) = make_unix_stream(path).await.split(); + + test_multiple_content_types(source_name, sock_sink, sock_stream, shutdown, source_handle) + .await; + } + #[tokio::test(flavor = "multi_thread")] async fn wrong_content_type() { let source_name = "test_source"; diff --git a/src/sources/util/net/mod.rs b/src/sources/util/net/mod.rs index 6a3329c88e0ff..a1a83662ccebb 100644 --- a/src/sources/util/net/mod.rs +++ b/src/sources/util/net/mod.rs @@ -11,7 +11,10 @@ use vector_lib::configurable::configurable_component; use crate::config::{Protocol, Resource}; #[cfg(feature = "sources-utils-net-tcp")] -pub use self::tcp::{TcpNullAcker, TcpSource, TcpSourceAck, TcpSourceAcker}; +pub use self::tcp::{ + request_limiter::RequestLimiter, try_bind_tcp_listener, TcpNullAcker, TcpSource, TcpSourceAck, + TcpSourceAcker, MAX_IN_FLIGHT_EVENTS_TARGET, +}; #[cfg(feature = "sources-utils-net-udp")] pub use self::udp::try_bind_udp_socket; diff --git a/src/sources/util/net/tcp/mod.rs b/src/sources/util/net/tcp/mod.rs index 3b8fe135b6081..f29a8df4e6b32 100644 --- a/src/sources/util/net/tcp/mod.rs +++ b/src/sources/util/net/tcp/mod.rs @@ -1,10 +1,11 @@ -mod request_limiter; +pub mod request_limiter; use std::{io, mem::drop, net::SocketAddr, time::Duration}; use bytes::Bytes; use futures::{future::BoxFuture, FutureExt, StreamExt}; use futures_util::future::OptionFuture; +use ipnet::IpNet; use listenfd::ListenFd; use smallvec::SmallVec; use socket2::SockRef; @@ -42,12 +43,13 @@ use crate::{ SourceSender, }; -const MAX_IN_FLIGHT_EVENTS_TARGET: usize = 100_000; +pub const MAX_IN_FLIGHT_EVENTS_TARGET: usize = 100_000; -async fn try_bind_tcp_listener( +pub async fn try_bind_tcp_listener( addr: SocketListenAddr, mut listenfd: ListenFd, tls: &MaybeTlsSettings, + allowlist: Option>, ) -> crate::Result { match addr { SocketListenAddr::SocketAddr(addr) => tls.bind(&addr).await.map_err(Into::into), @@ -60,6 +62,7 @@ async fn try_bind_tcp_listener( } }, } + .map(|listener| listener.with_allowlist(allowlist)) } #[derive(Clone, Copy, Eq, PartialEq)] @@ -118,6 +121,7 @@ where cx: SourceContext, acknowledgements: SourceAcknowledgementsConfig, max_connections: Option, + allowlist: Option>, source_name: &'static str, log_namespace: LogNamespace, ) -> crate::Result { @@ -125,7 +129,7 @@ where Ok(Box::pin(async move { let listenfd = ListenFd::from_env(); - let listener = try_bind_tcp_listener(addr, listenfd, &tls) + let listener = try_bind_tcp_listener(addr, listenfd, &tls, allowlist) .await .map_err(|error| { emit!(SocketBindError { diff --git a/website/cue/reference/components/sources/base/dnstap.cue b/website/cue/reference/components/sources/base/dnstap.cue index f6a31486c5b8d..83b40963f4379 100644 --- a/website/cue/reference/components/sources/base/dnstap.cue +++ b/website/cue/reference/components/sources/base/dnstap.cue @@ -1,6 +1,23 @@ package metadata base: components: sources: dnstap: configuration: { + address: { + description: """ + The socket address to listen for connections on, or `systemd{#N}` to use the Nth socket passed by + systemd socket activation. + + If a socket address is used, it _must_ include a port. + """ + relevant_when: "mode = \"tcp\"" + required: true + type: string: examples: ["0.0.0.0:9000", "systemd", "systemd#3"] + } + connection_limit: { + description: "The maximum number of TCP connections that are allowed at any given time." + relevant_when: "mode = \"tcp\"" + required: false + type: uint: unit: "connections" + } host_key: { description: """ Overrides the name of the log field used to add the source path to each event. @@ -14,6 +31,26 @@ base: components: sources: dnstap: configuration: { required: false type: string: {} } + keepalive: { + description: "TCP keepalive settings for socket-based components." + relevant_when: "mode = \"tcp\"" + required: false + type: object: options: time_secs: { + description: "The time to wait before starting to send TCP keepalive probes on an idle connection." + required: false + type: uint: unit: "seconds" + } + } + max_connection_duration_secs: { + description: """ + Maximum duration to keep each connection open. Connections open for longer than this duration are closed. + + This is helpful for load balancing long-lived connections. + """ + relevant_when: "mode = \"tcp\"" + required: false + type: uint: unit: "seconds" + } max_frame_handling_tasks: { description: "Maximum number of frames that can be processed concurrently." required: false @@ -31,11 +68,43 @@ base: components: sources: dnstap: configuration: { unit: "bytes" } } + mode: { + description: "The type of dnstap socket to use." + required: true + type: string: enum: { + tcp: "Listen on TCP." + unix: "Listen on a Unix domain socket" + } + } multithreaded: { description: "Whether or not to concurrently process DNSTAP frames." required: false type: bool: {} } + permit_origin: { + description: """ + List of allowed origin IP networks + + By default, all origins are allowed + """ + relevant_when: "mode = \"tcp\"" + required: false + type: array: items: type: string: {} + } + port_key: { + description: """ + Overrides the name of the log field used to add the peer host's port to each event. + + The value will be the peer host's port i.e. `9000`. + + By default, `"port"` is used. + + Set to `""` to suppress this key. + """ + relevant_when: "mode = \"tcp\"" + required: false + type: string: default: "port" + } raw_data_only: { description: """ Whether or not to skip parsing or decoding of DNSTAP frames. @@ -46,6 +115,21 @@ base: components: sources: dnstap: configuration: { required: false type: bool: {} } + receive_buffer_bytes: { + description: "The size of the receive buffer used for each connection." + relevant_when: "mode = \"tcp\"" + required: false + type: uint: unit: "bytes" + } + shutdown_timeout_secs: { + description: "The timeout before a connection is forcefully closed during shutdown." + relevant_when: "mode = \"tcp\"" + required: false + type: uint: { + default: 30 + unit: "seconds" + } + } socket_file_mode: { description: """ Unix file mode bits to be applied to the unix socket file as its designated file permissions. @@ -53,7 +137,8 @@ base: components: sources: dnstap: configuration: { Note: The file mode value can be specified in any numeric format supported by your configuration language, but it is most intuitive to use an octal number. """ - required: false + relevant_when: "mode = \"unix\"" + required: false type: uint: {} } socket_path: { @@ -63,7 +148,8 @@ base: components: sources: dnstap: configuration: { The DNS server must be configured to send its DNSTAP data to this socket file. The socket file is created if it doesn't already exist when the source first starts. """ - required: true + relevant_when: "mode = \"unix\"" + required: true type: string: {} } socket_receive_buffer_size: { @@ -72,7 +158,8 @@ base: components: sources: dnstap: configuration: { This should not typically needed to be changed. """ - required: false + relevant_when: "mode = \"unix\"" + required: false type: uint: unit: "bytes" } socket_send_buffer_size: { @@ -81,7 +168,109 @@ base: components: sources: dnstap: configuration: { This should not typically needed to be changed. """ - required: false + relevant_when: "mode = \"unix\"" + required: false type: uint: unit: "bytes" } + tls: { + description: "TlsEnableableConfig for `sources`, adding metadata from the client certificate." + relevant_when: "mode = \"tcp\"" + required: false + type: object: options: { + alpn_protocols: { + description: """ + Sets the list of supported ALPN protocols. + + Declare the supported ALPN protocols, which are used during negotiation with peer. They are prioritized in the order + that they are defined. + """ + required: false + type: array: items: type: string: examples: ["h2"] + } + ca_file: { + description: """ + Absolute path to an additional CA certificate file. + + The certificate must be in the DER or PEM (X.509) format. Additionally, the certificate can be provided as an inline string in PEM format. + """ + required: false + type: string: examples: ["/path/to/certificate_authority.crt"] + } + client_metadata_key: { + description: "Event field for client certificate metadata." + required: false + type: string: {} + } + crt_file: { + description: """ + Absolute path to a certificate file used to identify this server. + + The certificate must be in DER, PEM (X.509), or PKCS#12 format. Additionally, the certificate can be provided as + an inline string in PEM format. + + If this is set, and is not a PKCS#12 archive, `key_file` must also be set. + """ + required: false + type: string: examples: ["/path/to/host_certificate.crt"] + } + enabled: { + description: """ + Whether or not to require TLS for incoming or outgoing connections. + + When enabled and used for incoming connections, an identity certificate is also required. See `tls.crt_file` for + more information. + """ + required: false + type: bool: {} + } + key_file: { + description: """ + Absolute path to a private key file used to identify this server. + + The key must be in DER or PEM (PKCS#8) format. Additionally, the key can be provided as an inline string in PEM format. + """ + required: false + type: string: examples: ["/path/to/host_certificate.key"] + } + key_pass: { + description: """ + Passphrase used to unlock the encrypted key file. + + This has no effect unless `key_file` is set. + """ + required: false + type: string: examples: ["${KEY_PASS_ENV_VAR}", "PassWord1"] + } + verify_certificate: { + description: """ + Enables certificate verification. + + If enabled, certificates must not be expired and must be issued by a trusted + issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the + certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and + so on until the verification process reaches a root certificate. + + Relevant for both incoming and outgoing connections. + + Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. + """ + required: false + type: bool: {} + } + verify_hostname: { + description: """ + Enables hostname verification. + + If enabled, the hostname used to connect to the remote host must be present in the TLS certificate presented by + the remote host, either as the Common Name or as an entry in the Subject Alternative Name extension. + + Only relevant for outgoing connections. + + Do NOT set this to `false` unless you understand the risks of not verifying the remote hostname. + """ + required: false + type: bool: {} + } + } + } } diff --git a/website/cue/reference/components/sources/base/socket.cue b/website/cue/reference/components/sources/base/socket.cue index d675cd9f77cb3..68994a0e52a62 100644 --- a/website/cue/reference/components/sources/base/socket.cue +++ b/website/cue/reference/components/sources/base/socket.cue @@ -393,6 +393,16 @@ base: components: sources: socket: configuration: { required: true type: string: examples: ["/path/to/socket"] } + permit_origin: { + description: """ + List of allowed origin IP networks + + By default, all origins are allowed + """ + relevant_when: "mode = \"tcp\"" + required: false + type: array: items: type: string: {} + } port_key: { description: """ Overrides the name of the log field used to add the peer host's port to each event. diff --git a/website/cue/reference/components/sources/dnstap.cue b/website/cue/reference/components/sources/dnstap.cue index 0bf149c18df36..19235f95e0678 100644 --- a/website/cue/reference/components/sources/dnstap.cue +++ b/website/cue/reference/components/sources/dnstap.cue @@ -26,39 +26,51 @@ components: sources: dnstap: { } direction: "incoming" port: 0 - protocols: ["unix"] + protocols: ["unix", "tcp"] socket: "/run/bind/dnstap.sock" - ssl: "disabled" + ssl: "optional" } } - tls: enabled: false + receive_buffer_bytes: { + enabled: true + relevant_when: "mode = `tcp`" + } + keepalive: enabled: true + tls: { + enabled: true + can_verify_certificate: true + can_add_client_metadata: true + enabled_default: false + } } } support: { - targets: { - "x86_64-pc-windows-msv": false - } - requirements: [] warnings: [] notices: [] } configuration: base.components.sources.dnstap.configuration & { - socket_receive_buffer_size: warnings: [ - """ - System-wide setting of maximum socket send buffer size (i.e. value of '/proc/sys/net/core/wmem_max' on Linux) may need adjustment accordingly. - """, - ] + socket_receive_buffer_size: { + warnings: [ + """ + System-wide setting of maximum socket send buffer size (i.e. value of '/proc/sys/net/core/wmem_max' on Linux) may need adjustment accordingly. + """, + ] + } - socket_send_buffer_size: warnings: [ - """ - System-wide setting of maximum socket send buffer size (i.e. value of '/proc/sys/net/core/wmem_max' on Linux) may need adjustment accordingly. - """, - ] + socket_send_buffer_size: { + warnings: [ + """ + System-wide setting of maximum socket send buffer size (i.e. value of '/proc/sys/net/core/wmem_max' on Linux) may need adjustment accordingly. + """, + ] + } - socket_file_mode: type: uint: examples: [0o777, 0o754, 0o777] + socket_file_mode: { + type: uint: examples: [0o777, 0o754, 0o777] + } } output: logs: event: { @@ -882,6 +894,7 @@ components: sources: dnstap: { { title: "Dnstap events for a pair of regular DNS query and response." configuration: { + mode: "unix" max_frame_length: 102400 socket_file_mode: 508 socket_path: "/run/bind/dnstap.sock" @@ -1024,6 +1037,7 @@ components: sources: dnstap: { { title: "Dnstap events for a pair of DNS update request and response." configuration: { + mode: "unix" socket_file_mode: 508 socket_path: "/run/bind/dnstap.sock" socket_receive_buffer_size: 10485760 @@ -1146,7 +1160,7 @@ components: sources: dnstap: { server_uds: { title: "Server Unix Domain Socket (UDS)" body: """ - The `dnstap` source receives dnstap data through a Unix Domain Socket (aka UDS). The + The `dnstap` source can receive dnstap data through a Unix Domain Socket (aka UDS). The path of the UDS must be explicitly specified in the source's configuration. Upon startup, the `dnstap` source creates a new server UDS at the specified path. @@ -1162,6 +1176,7 @@ components: sources: dnstap: { ```toml [sources.my_dnstap_source] type = "dnstap" + mode = "unix" socket_file_mode: 0o774 # Other configs ``` @@ -1196,6 +1211,7 @@ components: sources: dnstap: { ```toml [sources.my_dnstap_source] type = "dnstap" + mode = "unix" socket_receive_buffer_size = 10_485_760 socket_send_buffer_size = 10_485_760 # Other configs From 482ed3cb7a9de9763d7e623c8a691ac4d9911638 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ensar=20Saraj=C4=8Di=C4=87?= Date: Thu, 7 Mar 2024 16:41:19 +0100 Subject: [PATCH 0104/1491] feat(dnsmsg_parser): add support for more record types (HINFO, CSYNC, OPT, missing DNSSEC types) (#19921) * feat(dnsmsg_parser): add support for parsing HINFO records Fixes: #19847 * Add support for DNSSEC KEY RData * Add support for DNSSEC CDNSKEY RData * Add support for DNSSEC CDS Rdata * Rename `format_dnskey` to `format_dnskey_record` * Add support for CSYNC RData type * Add support for OPT RData * Add changelog entry * Fix typo in changelog * Reuse opt parser from edns for OPT RData * Remove @ from authors Co-authored-by: Jesse Szwedko * Fix warnings in dns_message_parser.rs * Fix OPT RData parser --------- Co-authored-by: Jesse Szwedko --- .github/actions/spelling/allow.txt | 2 + ...21_missing_dns_record_types.enhancement.md | 3 + lib/dnsmsg-parser/src/dns_message.rs | 1 - lib/dnsmsg-parser/src/dns_message_parser.rs | 233 ++++++++++++++---- 4 files changed, 186 insertions(+), 53 deletions(-) create mode 100644 changelog.d/19921_missing_dns_record_types.enhancement.md diff --git a/.github/actions/spelling/allow.txt b/.github/actions/spelling/allow.txt index b258ab9021bb6..d764f60fa9e84 100644 --- a/.github/actions/spelling/allow.txt +++ b/.github/actions/spelling/allow.txt @@ -254,6 +254,7 @@ buildname buildroot bytestream callsites +cdnskey cncf codepath codepaths @@ -262,6 +263,7 @@ compiletime coredns corejs coreutils +csync curta daemonset dalek diff --git a/changelog.d/19921_missing_dns_record_types.enhancement.md b/changelog.d/19921_missing_dns_record_types.enhancement.md new file mode 100644 index 0000000000000..d8eca30498423 --- /dev/null +++ b/changelog.d/19921_missing_dns_record_types.enhancement.md @@ -0,0 +1,3 @@ +Added support for more DNS record types (HINFO, CSYNC, OPT, DNSSEC CDS, DNSSEC CDNSKEY, DNSSEC KEY) + +authors: esensar diff --git a/lib/dnsmsg-parser/src/dns_message.rs b/lib/dnsmsg-parser/src/dns_message.rs index 0d40e713b517b..d22bff280631d 100644 --- a/lib/dnsmsg-parser/src/dns_message.rs +++ b/lib/dnsmsg-parser/src/dns_message.rs @@ -6,7 +6,6 @@ pub(super) const RTYPE_MB: u16 = 7; pub(super) const RTYPE_MG: u16 = 8; pub(super) const RTYPE_MR: u16 = 9; pub(super) const RTYPE_WKS: u16 = 11; -pub(super) const RTYPE_HINFO: u16 = 13; pub(super) const RTYPE_MINFO: u16 = 14; pub(super) const RTYPE_RP: u16 = 17; pub(super) const RTYPE_AFSDB: u16 = 18; diff --git a/lib/dnsmsg-parser/src/dns_message_parser.rs b/lib/dnsmsg-parser/src/dns_message_parser.rs index 0f83a4b473c58..73d45404ef813 100644 --- a/lib/dnsmsg-parser/src/dns_message_parser.rs +++ b/lib/dnsmsg-parser/src/dns_message_parser.rs @@ -1,16 +1,19 @@ -use std::fmt::Write as _; use std::str::Utf8Error; +use std::{fmt::Write as _, ops::Deref}; use data_encoding::{BASE32HEX_NOPAD, BASE64, HEXUPPER}; use hickory_proto::{ error::ProtoError, - op::{message::Message as TrustDnsMessage, Edns, Query}, + op::{message::Message as TrustDnsMessage, Query}, rr::{ - dnssec::{rdata::DNSSECRData, Algorithm, SupportedAlgorithms}, + dnssec::{ + rdata::{DNSSECRData, DNSKEY, DS}, + Algorithm, SupportedAlgorithms, + }, rdata::{ caa::Value, opt::{EdnsCode, EdnsOption}, - A, AAAA, NULL, SVCB, + A, AAAA, NULL, OPT, SVCB, }, record_data::RData, resource::Record, @@ -366,20 +369,6 @@ impl DnsMessageParser { dns_message::RTYPE_WKS => self.parse_wks_rdata(rdata.anything()), - dns_message::RTYPE_HINFO => { - let mut decoder = BinDecoder::new(rdata.anything()); - let cpu = parse_character_string(&mut decoder)?; - let os = parse_character_string(&mut decoder)?; - Ok(( - Some(format!( - "\"{}\" \"{}\"", - escape_string_for_text_representation(cpu), - escape_string_for_text_representation(os) - )), - None, - )) - } - dns_message::RTYPE_MINFO => { let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); let rmailbx = parse_domain_name(&mut decoder)?; @@ -532,6 +521,11 @@ fn format_rdata(rdata: &RData) -> DnsParserResult<(Option, Option Ok((Some(ip.to_string()), None)), RData::ANAME(name) => Ok((Some(name.to_string()), None)), RData::CNAME(name) => Ok((Some(name.to_string()), None)), + RData::CSYNC(csync) => { + // Using CSYNC's formatter since not all data is exposed otherwise + let csync_rdata = format!("{}", csync); + Ok((Some(csync_rdata), None)) + } RData::MX(mx) => { let srv_rdata = format!("{} {}", mx.preference(), mx.exchange(),); Ok((Some(srv_rdata), None)) @@ -660,6 +654,16 @@ fn format_rdata(rdata: &RData) -> DnsParserResult<(Option, Option { + let hinfo_data = format!( + r#""{}" "{}""#, + std::str::from_utf8(hinfo.cpu()) + .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })?, + std::str::from_utf8(hinfo.os()) + .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })?, + ); + Ok((Some(hinfo_data), None)) + } RData::HTTPS(https) => { let https_data = format_svcb_record(&https.0); Ok((Some(https_data), None)) @@ -668,36 +672,36 @@ fn format_rdata(rdata: &RData) -> DnsParserResult<(Option, Option { + let parsed = parse_edns_options(opt)?; + let ede_data = parsed.0.iter().map(|entry| EdnsOptionEntry { + opt_code: 15u16, + opt_name: "EDE".to_string(), + opt_data: format!( + "EDE={}({}){}", + entry.info_code(), + entry.purpose().unwrap_or(""), + entry.extra_text().unwrap_or("".to_string()) + ), + }); + let opt_data = parsed + .1 + .into_iter() + .chain(ede_data) + .map(|entry| format!("{}={}", entry.opt_name, entry.opt_data)) + .collect::>() + .join(","); + Ok((Some(opt_data), None)) + } RData::DNSSEC(dnssec) => match dnssec { // See https://tools.ietf.org/html/rfc4034 for details // on dnssec related rdata formats - DNSSECRData::DS(ds) => { - let ds_rdata = format!( - "{} {} {} {}", - ds.key_tag(), - u8::from(ds.algorithm()), - u8::from(ds.digest_type()), - HEXUPPER.encode(ds.digest()) - ); - Ok((Some(ds_rdata), None)) - } - DNSSECRData::DNSKEY(dnskey) => { - let dnskey_rdata = format!( - "{} 3 {} {}", - { - if dnskey.revoke() { - 0b0000_0000_0000_0000 - } else if dnskey.zone_key() && dnskey.secure_entry_point() { - 0b0000_0001_0000_0001 - } else { - 0b0000_0001_0000_0000 - } - }, - u8::from(dnskey.algorithm()), - BASE64.encode(dnskey.public_key()) - ); - Ok((Some(dnskey_rdata), None)) + DNSSECRData::CDS(cds) => Ok((Some(format_ds_record(cds.deref())), None)), + DNSSECRData::DS(ds) => Ok((Some(format_ds_record(ds)), None)), + DNSSECRData::CDNSKEY(cdnskey) => { + Ok((Some(format_dnskey_record(cdnskey.deref())), None)) } + DNSSECRData::DNSKEY(dnskey) => Ok((Some(format_dnskey_record(dnskey)), None)), DNSSECRData::NSEC(nsec) => { let nsec_rdata = format!( "{} {}", @@ -776,6 +780,16 @@ fn format_rdata(rdata: &RData) -> DnsParserResult<(Option, Option { + let key_rdata = format!( + "{} {} {} {}", + key.flags(), + u8::from(key.protocol()), + u8::from(key.algorithm()), + BASE64.encode(key.public_key()) + ); + Ok((Some(key_rdata), None)) + } DNSSECRData::Unknown { code: _, rdata } => Ok((None, Some(rdata.anything().to_vec()))), _ => Err(DnsMessageParserError::SimpleError { cause: format!("Unsupported rdata {:?}", rdata), @@ -807,6 +821,33 @@ fn format_svcb_record(svcb: &SVCB) -> String { ) } +fn format_dnskey_record(dnskey: &DNSKEY) -> String { + format!( + "{} 3 {} {}", + { + if dnskey.revoke() { + 0b0000_0000_0000_0000 + } else if dnskey.zone_key() && dnskey.secure_entry_point() { + 0b0000_0001_0000_0001 + } else { + 0b0000_0001_0000_0000 + } + }, + u8::from(dnskey.algorithm()), + BASE64.encode(dnskey.public_key()) + ) +} + +fn format_ds_record(ds: &DS) -> String { + format!( + "{} {} {} {}", + ds.key_tag(), + u8::from(ds.algorithm()), + u8::from(ds.digest_type()), + HEXUPPER.encode(ds.digest()) + ) +} + fn parse_response_code(rcode: u16) -> Option<&'static str> { match rcode { 0 => Some("NoError"), // 0 NoError No Error [RFC1035] @@ -868,7 +909,7 @@ fn parse_dns_update_message_header(dns_message: &TrustDnsMessage) -> UpdateHeade fn parse_edns(dns_message: &TrustDnsMessage) -> Option> { dns_message.extensions().as_ref().map(|edns| { - parse_edns_options(edns).map(|(ede, rest)| OptPseudoSection { + parse_edns_options(edns.options()).map(|(ede, rest)| OptPseudoSection { extended_rcode: edns.rcode_high(), version: edns.version(), dnssec_ok: edns.dnssec_ok(), @@ -879,9 +920,8 @@ fn parse_edns(dns_message: &TrustDnsMessage) -> Option DnsParserResult<(Vec, Vec)> { +fn parse_edns_options(edns: &OPT) -> DnsParserResult<(Vec, Vec)> { let ede_opts: Vec = edns - .options() .as_ref() .iter() .filter_map(|(_, option)| { @@ -897,7 +937,6 @@ fn parse_edns_options(edns: &Edns) -> DnsParserResult<(Vec, Vec, DnsMessageParserError>>()?; let rest: Vec = edns - .options() .as_ref() .iter() .filter(|(&code, _)| u16::from(code) != EDE_OPTION_CODE) @@ -1175,15 +1214,23 @@ fn format_bytes_as_hex_string(bytes: &[u8]) -> String { #[cfg(test)] mod tests { use std::{ + collections::HashMap, net::{Ipv4Addr, Ipv6Addr}, str::FromStr, }; + #[allow(deprecated)] use hickory_proto::rr::{ dnssec::{ rdata::{ - dnskey::DNSKEY, ds::DS, nsec::NSEC, nsec3::NSEC3, nsec3param::NSEC3PARAM, sig::SIG, - DNSSECRData, RRSIG, + dnskey::DNSKEY, + ds::DS, + key::{KeyTrust, KeyUsage, Protocol, UpdateScope}, + nsec::NSEC, + nsec3::NSEC3, + nsec3param::NSEC3PARAM, + sig::SIG, + DNSSECRData, KEY, RRSIG, }, Algorithm as DNSSEC_Algorithm, DigestType, Nsec3HashAlgorithm, }, @@ -1193,7 +1240,7 @@ mod tests { sshfp::{Algorithm, FingerprintType}, svcb, tlsa::{CertUsage, Matching, Selector}, - CAA, HTTPS, NAPTR, SSHFP, TLSA, TXT, + CAA, CSYNC, HINFO, HTTPS, NAPTR, OPT, SSHFP, TLSA, TXT, }, }; @@ -1315,6 +1362,24 @@ mod tests { assert_eq!(dns_response_message.answer_section[0].rdata_bytes, None); } + #[test] + fn test_parse_response_with_hinfo_rdata() { + let raw_response_message_base64 = + "wS2BgAABAAEAAAAAB3RyYWNrZXIEZGxlcgNvcmcAAP8AAcAMAA0AAQAAC64ACQdSRkM4NDgyAA=="; + let raw_response_message = BASE64 + .decode(raw_response_message_base64.as_bytes()) + .expect("Invalid base64 encoded data."); + let dns_response_message = DnsMessageParser::new(raw_response_message) + .parse_as_query_message() + .expect("Invalid DNS query message."); + assert_eq!( + dns_response_message.answer_section[0].rdata, + Some(r#""RFC8482" """#.to_string()) + ); + assert_eq!(dns_response_message.answer_section[0].record_type_id, 13u16); + assert_eq!(dns_response_message.answer_section[0].rdata_bytes, None); + } + #[test] fn test_format_bytes_as_hex_string() { assert_eq!( @@ -1676,6 +1741,36 @@ mod tests { } } + #[test] + fn test_format_rdata_for_key_type() { + let rdata = RData::DNSSEC(DNSSECRData::KEY(KEY::new( + KeyTrust::NotPrivate, + KeyUsage::Host, + #[allow(deprecated)] + UpdateScope { + zone: false, + strong: false, + unique: true, + general: true, + }, + Protocol::DNSSEC, + DNSSEC_Algorithm::RSASHA256, + vec![ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, + 23, 24, 25, 26, 27, 28, 29, 29, 31, + ], + ))); + let rdata_text = format_rdata(&rdata); + assert!(rdata_text.is_ok()); + if let Ok((parsed, raw_rdata)) = rdata_text { + assert!(raw_rdata.is_none()); + assert_eq!( + "16387 3 8 AAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxwdHR8=", + parsed.unwrap() + ); + } + } + // rsig is a derivation of the SIG record data, but the upstream crate does not handle that with an trait // so there isn't really a great way to reduce code duplication here. #[test] @@ -1783,7 +1878,41 @@ mod tests { #[test] fn test_format_rdata_for_hinfo_type() { - test_format_rdata("BWludGVsBWxpbnV4", 13, "\"intel\" \"linux\""); + let rdata = RData::HINFO(HINFO::new("intel".to_string(), "linux".to_string())); + let rdata_text = format_rdata(&rdata); + assert!(rdata_text.is_ok()); + if let Ok((parsed, raw_rdata)) = rdata_text { + assert!(raw_rdata.is_none()); + assert_eq!(r#""intel" "linux""#, parsed.unwrap()); + } + } + + #[test] + fn test_format_rdata_for_csync_type() { + let types = vec![RecordType::A, RecordType::NS, RecordType::AAAA]; + let rdata = RData::CSYNC(CSYNC::new(123, true, true, types)); + let rdata_text = format_rdata(&rdata); + assert!(rdata_text.is_ok()); + if let Ok((parsed, raw_rdata)) = rdata_text { + assert!(raw_rdata.is_none()); + assert_eq!("123 3 A NS AAAA", parsed.unwrap()); + } + } + + #[test] + fn test_format_rdata_for_opt_type() { + let mut options = HashMap::new(); + options.insert( + EdnsCode::LLQ, + EdnsOption::Unknown(u16::from(EdnsCode::LLQ), vec![0x01; 18]), + ); + let rdata = RData::OPT(OPT::new(options)); + let rdata_text = format_rdata(&rdata); + assert!(rdata_text.is_ok()); + if let Ok((parsed, raw_rdata)) = rdata_text { + assert!(raw_rdata.is_none()); + assert_eq!("LLQ=AQEBAQEBAQEBAQEBAQEBAQEB", parsed.unwrap()); + } } #[test] From d505045620cc5272be54b42fdd01abb8c0486d50 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Thu, 7 Mar 2024 13:56:54 -0800 Subject: [PATCH 0105/1491] docs(statsd source): Update statsd doc to mention timing conversion (#20033) * docs(statsd source): Update statsd doc to mentiont timing conversion Ref: https://github.com/vectordotdev/vector/issues/20019 Signed-off-by: Jesse Szwedko * Update website/cue/reference/components/sources/statsd.cue Co-authored-by: May Lee --------- Signed-off-by: Jesse Szwedko Co-authored-by: May Lee --- website/cue/reference/components/sources/statsd.cue | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/website/cue/reference/components/sources/statsd.cue b/website/cue/reference/components/sources/statsd.cue index 61d289c8ead17..4f378e889d97c 100644 --- a/website/cue/reference/components/sources/statsd.cue +++ b/website/cue/reference/components/sources/statsd.cue @@ -65,6 +65,13 @@ components: sources: statsd: { } how_it_works: { + timings: { + title: "StatsD timings" + body: """ + Incoming timings are emitted as distributions. Timings in milliseconds (`ms`) are + converted to seconds (`s`). + """ + } timestamps: { title: "Timestamps" body: """ From d5c8a77b5751c4d2277cee6ee76a1903873c5873 Mon Sep 17 00:00:00 2001 From: neuronull Date: Thu, 7 Mar 2024 16:32:11 -0700 Subject: [PATCH 0106/1491] enhancement(datadog_agent source): add `parse_ddtags` config setting to parse the `ddtags` log event field into an object (#20003) * implementation * render cue * changelog * spell checker * spell check ruined my fun * cue * feedback * feedback bruce- consistent parsing behavior and cleaner unit tests --- ...atadog_agent_ddtags_parsing.enhancement.md | 4 + src/sources/datadog_agent/logs.rs | 100 +++++++++++++++++- src/sources/datadog_agent/mod.rs | 18 +++- src/sources/datadog_agent/tests.rs | 57 ++++++++++ .../components/sources/base/datadog_agent.cue | 8 ++ 5 files changed, 185 insertions(+), 2 deletions(-) create mode 100644 changelog.d/datadog_agent_ddtags_parsing.enhancement.md diff --git a/changelog.d/datadog_agent_ddtags_parsing.enhancement.md b/changelog.d/datadog_agent_ddtags_parsing.enhancement.md new file mode 100644 index 0000000000000..e68bc346d45cc --- /dev/null +++ b/changelog.d/datadog_agent_ddtags_parsing.enhancement.md @@ -0,0 +1,4 @@ +The `datadog_agent` source now contains a configuration setting `parse_ddtags`, which is disabled by default. + +When enabled, the `ddtags` field (a comma separated list of key-value strings) is parsed and expanded into an +object in the event. diff --git a/src/sources/datadog_agent/logs.rs b/src/sources/datadog_agent/logs.rs index 3492f3f0c4813..7202376d2eac2 100644 --- a/src/sources/datadog_agent/logs.rs +++ b/src/sources/datadog_agent/logs.rs @@ -8,6 +8,8 @@ use vector_lib::codecs::StreamDecodingError; use vector_lib::internal_event::{CountByteSize, InternalEventHandle as _}; use vector_lib::lookup::path; use vector_lib::{config::LegacyKey, EstimatedJsonEncodedSizeOf}; +use vrl::core::Value; +use vrl::value::{KeyString, ObjectMap}; use warp::{filters::BoxedFilter, path as warp_path, path::FullPath, reply::Response, Filter}; use crate::{ @@ -143,12 +145,19 @@ pub(crate) fn decode_log_body( path!("ddsource"), ddsource.clone(), ); + + let ddtags: Value = if source.parse_ddtags { + parse_ddtags(&ddtags) + } else { + ddtags.clone().into() + }; + namespace.insert_source_metadata( source_name, log, Some(LegacyKey::InsertIfEmpty(path!("ddtags"))), path!("ddtags"), - ddtags.clone(), + ddtags, ); namespace.insert_standard_vector_source_metadata( @@ -192,3 +201,92 @@ pub(crate) fn decode_log_body( Ok(decoded) } + +// ddtags input is a string containing a list of tags which +// can include both bare tags and key-value pairs. +// the tag list members are separated by `,` and the +// tag-value pairs are separated by `:`. +// +// The output is an Object regardless of the input string. +// Bare tags are constructed as a k-v pair with a null value. +fn parse_ddtags(ddtags_raw: &Bytes) -> Value { + if ddtags_raw.is_empty() { + return ObjectMap::new().into(); + } + + let ddtags_str = String::from_utf8_lossy(ddtags_raw); + + // The value is a single bare tag + if !ddtags_str.contains(',') && !ddtags_str.contains(':') { + return ObjectMap::from([(KeyString::from(ddtags_str), Value::Null)]).into(); + } + + // There are multiple tags, which could be either bare or pairs + let ddtags_object: ObjectMap = ddtags_str + .split(',') + .filter(|kv| !kv.is_empty()) + .map(|kv| match kv.split_once(':') { + Some((k, v)) => (KeyString::from(k), Value::Bytes(Bytes::from(v.to_string()))), + None => (KeyString::from(kv), Value::Null), + }) + .collect(); + + if ddtags_object.is_empty() && !ddtags_str.is_empty() { + warn!(message = "`parse_ddtags` set to true and Agent log contains non-empty ddtags string, but no tag-value pairs were parsed.") + } + + ddtags_object.into() +} + +#[cfg(test)] +mod tests { + use super::*; + use similar_asserts::assert_eq; + use vrl::value; + + #[test] + fn ddtags_parse_empty() { + let raw = Bytes::from(String::from("")); + let val = parse_ddtags(&raw); + + assert_eq!(val, value!({})); + } + + #[test] + fn ddtags_parse_bare() { + let raw = Bytes::from(String::from("bare")); + let val = parse_ddtags(&raw); + + assert_eq!(val, value!({"bare": null})); + } + + #[test] + fn ddtags_parse_kv_one() { + let raw = Bytes::from(String::from("filename:driver.log")); + let val = parse_ddtags(&raw); + + assert_eq!(val, value!({"filename": "driver.log"})); + } + + #[test] + fn ddtags_parse_kv_multi() { + let raw = Bytes::from(String::from("filename:driver.log,wizard:the_grey")); + let val = parse_ddtags(&raw); + + assert_eq!( + val, + value!({"filename": "driver.log", "wizard": "the_grey"}) + ); + } + + #[test] + fn ddtags_parse_kv_bare_combo() { + let raw = Bytes::from(String::from("filename:driver.log,debug,wizard:the_grey")); + let val = parse_ddtags(&raw); + + assert_eq!( + val, + value!({"filename": "driver.log", "wizard": "the_grey", "debug": null}) + ); + } +} diff --git a/src/sources/datadog_agent/mod.rs b/src/sources/datadog_agent/mod.rs index 255aa22499368..f2097b813e8d2 100644 --- a/src/sources/datadog_agent/mod.rs +++ b/src/sources/datadog_agent/mod.rs @@ -42,6 +42,7 @@ use vector_lib::internal_event::{EventsReceived, Registered}; use vector_lib::lookup::owned_value_path; use vector_lib::tls::MaybeTlsIncomingStream; use vrl::path::OwnedTargetPath; +use vrl::value::kind::Collection; use vrl::value::Kind; use warp::{filters::BoxedFilter, reject::Rejection, reply::Response, Filter, Reply}; @@ -110,6 +111,12 @@ pub struct DatadogAgentConfig { #[serde(default = "crate::serde::default_false")] multiple_outputs: bool, + /// If this is set to `true`, when log events contain the field `ddtags`, the string value that + /// contains a list of key:value pairs set by the Agent is parsed and expanded into an object. + #[configurable(metadata(docs::advanced))] + #[serde(default = "crate::serde::default_false")] + parse_ddtags: bool, + /// The namespace to use for logs. This overrides the global setting. #[serde(default)] #[configurable(metadata(docs::hidden))] @@ -148,6 +155,7 @@ impl GenerateConfig for DatadogAgentConfig { disable_metrics: false, disable_traces: false, multiple_outputs: false, + parse_ddtags: false, log_namespace: Some(false), keepalive: KeepaliveConfig::default(), }) @@ -178,6 +186,7 @@ impl SourceConfig for DatadogAgentConfig { tls.http_protocol_name(), logs_schema_definition, log_namespace, + self.parse_ddtags, ); let listener = tls.bind(&self.address).await?; let acknowledgements = cx.do_acknowledgements(self.acknowledgements); @@ -268,7 +277,11 @@ impl SourceConfig for DatadogAgentConfig { Self::NAME, Some(LegacyKey::InsertIfEmpty(owned_value_path!("ddtags"))), &owned_value_path!("ddtags"), - Kind::bytes(), + if self.parse_ddtags { + Kind::object(Collection::empty().with_unknown(Kind::bytes())).or_undefined() + } else { + Kind::bytes() + }, Some("tags"), ) .with_standard_vector_source_metadata(); @@ -325,6 +338,7 @@ pub(crate) struct DatadogAgentSource { protocol: &'static str, logs_schema_definition: Option>, events_received: Registered, + parse_ddtags: bool, } #[derive(Clone)] @@ -361,6 +375,7 @@ impl DatadogAgentSource { protocol: &'static str, logs_schema_definition: Option, log_namespace: LogNamespace, + parse_ddtags: bool, ) -> Self { Self { api_key_extractor: ApiKeyExtractor { @@ -381,6 +396,7 @@ impl DatadogAgentSource { logs_schema_definition: logs_schema_definition.map(Arc::new), log_namespace, events_received: register!(EventsReceived), + parse_ddtags, } } diff --git a/src/sources/datadog_agent/tests.rs b/src/sources/datadog_agent/tests.rs index bccd5cbf50727..fabf11947d2d9 100644 --- a/src/sources/datadog_agent/tests.rs +++ b/src/sources/datadog_agent/tests.rs @@ -28,6 +28,7 @@ use vector_lib::{ metric_tags, }; use vrl::compiler::value::Collection; +use vrl::value; use vrl::value::{Kind, ObjectMap}; use crate::schema::Definition; @@ -97,6 +98,7 @@ fn test_decode_log_body() { "http", Some(test_logs_schema_definition()), LogNamespace::Legacy, + false, ); let events = decode_log_body(body, api_key, &source).unwrap(); @@ -123,6 +125,58 @@ fn test_decode_log_body() { QuickCheck::new().quickcheck(inner as fn(Vec) -> TestResult); } +#[test] +fn test_decode_log_body_parse_ddtags() { + let log_msgs = [LogMsg { + message: Bytes::from(String::from("message")), + status: Bytes::from(String::from("status")), + timestamp: Utc + .timestamp_millis_opt(1234) + .single() + .expect("invalid timestamp"), + hostname: Bytes::from(String::from("host")), + service: Bytes::from(String::from("service")), + ddsource: Bytes::from(String::from("ddsource")), + ddtags: Bytes::from(String::from("wizard:the_grey,env:staging")), + }]; + + let body = Bytes::from(serde_json::to_string(&log_msgs).unwrap()); + let api_key = None; + let decoder = crate::codecs::Decoder::new( + Framer::Bytes(BytesDecoder::new()), + Deserializer::Bytes(BytesDeserializer), + ); + + let source = DatadogAgentSource::new( + true, + decoder, + "http", + Some(test_logs_schema_definition()), + LogNamespace::Legacy, + true, + ); + + let events = decode_log_body(body, api_key, &source).unwrap(); + + assert_eq!(events.len(), 1); + + let event = events.first().unwrap(); + let log = event.as_log(); + let log_msg = log_msgs[0].clone(); + + assert_eq!(log["message"], log_msg.message.into()); + assert_eq!(log["status"], log_msg.status.into()); + assert_eq!(log["timestamp"], log_msg.timestamp.into()); + assert_eq!(log["hostname"], log_msg.hostname.into()); + assert_eq!(log["service"], log_msg.service.into()); + assert_eq!(log["ddsource"], log_msg.ddsource.into()); + + assert_eq!( + log["ddtags"], + value!({"env": "staging", "wizard": "the_grey"}) + ); +} + #[test] fn test_decode_log_body_empty_object() { let body = Bytes::from("{}"); @@ -138,6 +192,7 @@ fn test_decode_log_body_empty_object() { "http", Some(test_logs_schema_definition()), LogNamespace::Legacy, + false, ); let events = decode_log_body(body, api_key, &source).unwrap(); @@ -1519,6 +1574,7 @@ fn test_config_outputs_with_disabled_data_types() { disable_logs, disable_metrics, disable_traces, + parse_ddtags: false, log_namespace: Some(false), keepalive: Default::default(), }; @@ -1960,6 +2016,7 @@ fn test_config_outputs() { disable_logs: false, disable_metrics: false, disable_traces: false, + parse_ddtags: false, log_namespace: Some(false), keepalive: Default::default(), }; diff --git a/website/cue/reference/components/sources/base/datadog_agent.cue b/website/cue/reference/components/sources/base/datadog_agent.cue index 71942640e701a..48bdbd6d61494 100644 --- a/website/cue/reference/components/sources/base/datadog_agent.cue +++ b/website/cue/reference/components/sources/base/datadog_agent.cue @@ -401,6 +401,14 @@ base: components: sources: datadog_agent: configuration: { required: false type: bool: default: false } + parse_ddtags: { + description: """ + If this is set to `true`, when log events contain the field `ddtags`, the string value that + contains a list of key:value pairs set by the Agent is parsed and expanded into an object. + """ + required: false + type: bool: default: false + } store_api_key: { description: """ If this is set to `true`, when incoming events contain a Datadog API key, it is From 3a495e35d95c040ccc629f3ac1c2f8d696f1404a Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Thu, 7 Mar 2024 17:10:35 -0800 Subject: [PATCH 0107/1491] chore(deps): Bump whoami to 1.5.0 (#20018) * chore(deps): Bump whoami to 1.5.0 Resolves RUSTSEC-2024-0020 Signed-off-by: Jesse Szwedko * regenerate licenses Signed-off-by: Jesse Szwedko --------- Signed-off-by: Jesse Szwedko --- Cargo.lock | 13 ++++++++++--- LICENSE-3rdparty.csv | 1 + 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b95fe9655c6b4..e5d5f5a352b06 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10726,6 +10726,12 @@ version = "0.11.0+wasi-snapshot-preview1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" +[[package]] +name = "wasite" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8dad83b4f25e74f184f64c43b150b91efe7647395b42289f38e50566d82855b" + [[package]] name = "wasm-bindgen" version = "0.2.92" @@ -10865,11 +10871,12 @@ dependencies = [ [[package]] name = "whoami" -version = "1.4.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22fc3756b8a9133049b26c7f61ab35416c130e8c09b660f5b3958b446f52cc50" +checksum = "0fec781d48b41f8163426ed18e8fc2864c12937df9ce54c88ede7bd47270893e" dependencies = [ - "wasm-bindgen", + "redox_syscall 0.4.1", + "wasite", "web-sys", ] diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 001e98fb9f7ec..52eaaa6b918b2 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -632,6 +632,7 @@ walkdir,https://github.com/BurntSushi/walkdir,Unlicense OR MIT,Andrew Gallant warp,https://github.com/seanmonstar/warp,MIT,Sean McArthur wasi,https://github.com/bytecodealliance/wasi,Apache-2.0 WITH LLVM-exception OR Apache-2.0 OR MIT,The Cranelift Project Developers +wasite,https://github.com/ardaku/wasite,Apache-2.0 OR BSL-1.0 OR MIT,The wasite Authors wasm-bindgen,https://github.com/rustwasm/wasm-bindgen,MIT OR Apache-2.0,The wasm-bindgen Developers wasm-bindgen-backend,https://github.com/rustwasm/wasm-bindgen/tree/master/crates/backend,MIT OR Apache-2.0,The wasm-bindgen Developers wasm-bindgen-futures,https://github.com/rustwasm/wasm-bindgen/tree/master/crates/futures,MIT OR Apache-2.0,The wasm-bindgen Developers From 1b87cce1f96ddb89163bc0d6f41d78b403ea47c6 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Fri, 8 Mar 2024 01:46:25 -0800 Subject: [PATCH 0108/1491] fix(aws provider): Enable `credentials-process` for `aws-config` (#20030) Otherwise it outputs this warning: ``` 2024-03-07T11:59:25.573521Z WARN provide_credentials{provider=default_chain}: aws_config::meta::credentials::chain: provider failed to provide credentials provider=Profile error=the credentials provider was not properly configured: ProfileFile provider could not be built: This behavior requires following cargo feature(s) enabled: credentials-process. In order to spawn a subprocess, the credentials-process feature must be enabled. (InvalidConfiguration(InvalidConfiguration { source: "ProfileFile provider could not be built: This behavior requires following cargo feature(s) enabled: credentials-process. In order to spawn a subprocess, the credentials-process feature must be enabled." ``` Signed-off-by: Jesse Szwedko --- Cargo.toml | 2 +- changelog.d/aws_credentials_process.fix.md | 2 ++ 2 files changed, 3 insertions(+), 1 deletion(-) create mode 100644 changelog.d/aws_credentials_process.fix.md diff --git a/Cargo.toml b/Cargo.toml index 758c4979fe84a..500b6c3c093b6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -187,7 +187,7 @@ aws-sdk-kinesis = { version = "1.3.0", default-features = false, features = ["be aws-sdk-sts = { version = "1.3.1", default-features = false, features = ["behavior-version-latest"], optional = true } aws-types = { version = "1.1.7", default-features = false, optional = true } aws-sigv4 = { version = "1.1.7", default-features = false, features = ["sign-http"], optional = true } -aws-config = { version = "1.0.1", default-features = false, features = ["behavior-version-latest"], optional = true } +aws-config = { version = "1.0.1", default-features = false, features = ["behavior-version-latest", "credentials-process"], optional = true } aws-credential-types = { version = "1.1.7", default-features = false, features = ["hardcoded-credentials"], optional = true } aws-smithy-http = { version = "0.60", default-features = false, features = ["event-stream"], optional = true } aws-smithy-types = { version = "1.1.7", default-features = false, optional = true } diff --git a/changelog.d/aws_credentials_process.fix.md b/changelog.d/aws_credentials_process.fix.md new file mode 100644 index 0000000000000..11fa14dbbed4f --- /dev/null +++ b/changelog.d/aws_credentials_process.fix.md @@ -0,0 +1,2 @@ +AWS components again support the use of `credential_process` in AWS config files to load AWS +credentials from an external process. This was a regression in v0.36.0. From 485dea71725511b997586698650e202add499183 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ensar=20Saraj=C4=8Di=C4=87?= Date: Fri, 8 Mar 2024 17:17:23 +0100 Subject: [PATCH 0109/1491] feat(sources): add `lowercase_hostnames` option to `dnstap` source (#20035) * feat(sources): add `lowercase_hostnames` option to `dnstap` source This adds `lowercase_hostnames` option to `dnstap` source, which lowercases all hostnames found in DNS records, for consistency Fixes: #19901 * Add changelog entry * Fix clippy warnings * Default to false for `lowercase_hostnames` without `Option` * Fix `lowercase_hostnames` in tests --- ...0035_dnstap_lowercase_hostnames.feature.md | 4 + lib/dnsmsg-parser/src/dns_message_parser.rs | 653 ++++++++++-------- src/sources/dnstap/mod.rs | 17 +- src/sources/dnstap/parser.rs | 115 ++- .../components/sources/base/dnstap.cue | 5 + 5 files changed, 495 insertions(+), 299 deletions(-) create mode 100644 changelog.d/20035_dnstap_lowercase_hostnames.feature.md diff --git a/changelog.d/20035_dnstap_lowercase_hostnames.feature.md b/changelog.d/20035_dnstap_lowercase_hostnames.feature.md new file mode 100644 index 0000000000000..197a189cf2231 --- /dev/null +++ b/changelog.d/20035_dnstap_lowercase_hostnames.feature.md @@ -0,0 +1,4 @@ +Added `lowercase_hostnames` option to `dnstap` source, to filter hostnames in DNS records and +lowercase them for consistency. + +authors: esensar diff --git a/lib/dnsmsg-parser/src/dns_message_parser.rs b/lib/dnsmsg-parser/src/dns_message_parser.rs index 73d45404ef813..2f3524419c0ae 100644 --- a/lib/dnsmsg-parser/src/dns_message_parser.rs +++ b/lib/dnsmsg-parser/src/dns_message_parser.rs @@ -44,6 +44,27 @@ pub enum DnsMessageParserError { /// Result alias for parsing pub type DnsParserResult = Result; +/// Options for DNS message parser +#[derive(Debug, Default, Clone)] +pub struct DnsParserOptions { + /// Whether hostnames in RData should be lowercased, for consistency + pub lowercase_hostnames: bool, +} + +trait DnsParserOptionsTarget { + fn to_string_with_options(&self, options: &DnsParserOptions) -> String; +} + +impl DnsParserOptionsTarget for Name { + fn to_string_with_options(&self, options: &DnsParserOptions) -> String { + if options.lowercase_hostnames { + self.to_lowercase().to_string() + } else { + self.to_string() + } + } +} + /// A DNS message parser #[derive(Debug)] pub struct DnsMessageParser { @@ -55,6 +76,7 @@ pub struct DnsMessageParser { // contain compressed domain name, and store it here as a member field; for // subsequent invocations of the same call, we simply reuse this copy. raw_message_for_rdata_parsing: Option>, + options: DnsParserOptions, } impl DnsMessageParser { @@ -62,6 +84,15 @@ impl DnsMessageParser { DnsMessageParser { raw_message, raw_message_for_rdata_parsing: None, + options: DnsParserOptions::default(), + } + } + + pub fn with_options(raw_message: Vec, options: DnsParserOptions) -> Self { + DnsMessageParser { + raw_message, + raw_message_for_rdata_parsing: None, + options, } } @@ -118,7 +149,7 @@ impl DnsMessageParser { fn parse_dns_query_question(&self, question: &Query) -> QueryQuestion { QueryQuestion { - name: question.name().to_string(), + name: question.name().to_string_with_options(&self.options), class: question.query_class().to_string(), record_type: format_record_type(question.query_type()), record_type_id: u16::from(question.query_type()), @@ -158,12 +189,12 @@ impl DnsMessageParser { Some(RData::Unknown { code, rdata }) => { self.format_unknown_rdata((*code).into(), rdata) } - Some(rdata) => format_rdata(rdata), + Some(rdata) => self.format_rdata(rdata), None => Ok((Some(String::from("")), None)), // NULL record }?; Ok(DnsRecord { - name: record.name().to_string(), + name: record.name().to_string_with_options(&self.options), class: record.dns_class().to_string(), record_type: format_record_type(record.record_type()), record_type_id: u16::from(record.record_type()), @@ -244,7 +275,7 @@ impl DnsMessageParser { let mut dec = BinDecoder::new(&address_vec); parse_ipv6_address(&mut dec)? }; - let domain_name = parse_domain_name(&mut decoder)?; + let domain_name = Self::parse_domain_name(&mut decoder, &self.options)?; Ok(( Some(format!("{} {} {}", prefix, ipv6_address, domain_name)), None, @@ -350,43 +381,49 @@ impl DnsMessageParser { ) -> DnsParserResult<(Option, Option>)> { match code { dns_message::RTYPE_MB => { + let options = self.options.clone(); let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); - let madname = parse_domain_name(&mut decoder)?; - Ok((Some(madname.to_string()), None)) + let madname = Self::parse_domain_name(&mut decoder, &options)?; + Ok((Some(madname), None)) } dns_message::RTYPE_MG => { + let options = self.options.clone(); let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); - let mgname = parse_domain_name(&mut decoder)?; - Ok((Some(mgname.to_string()), None)) + let mgname = Self::parse_domain_name(&mut decoder, &options)?; + Ok((Some(mgname), None)) } dns_message::RTYPE_MR => { + let options = self.options.clone(); let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); - let newname = parse_domain_name(&mut decoder)?; - Ok((Some(newname.to_string()), None)) + let newname = Self::parse_domain_name(&mut decoder, &options)?; + Ok((Some(newname), None)) } dns_message::RTYPE_WKS => self.parse_wks_rdata(rdata.anything()), dns_message::RTYPE_MINFO => { + let options = self.options.clone(); let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); - let rmailbx = parse_domain_name(&mut decoder)?; - let emailbx = parse_domain_name(&mut decoder)?; + let rmailbx = Self::parse_domain_name(&mut decoder, &options)?; + let emailbx = Self::parse_domain_name(&mut decoder, &options)?; Ok((Some(format!("{} {}", rmailbx, emailbx)), None)) } dns_message::RTYPE_RP => { + let options = self.options.clone(); let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); - let mbox = parse_domain_name(&mut decoder)?; - let txt = parse_domain_name(&mut decoder)?; + let mbox = Self::parse_domain_name(&mut decoder, &options)?; + let txt = Self::parse_domain_name(&mut decoder, &options)?; Ok((Some(format!("{} {}", mbox, txt)), None)) } dns_message::RTYPE_AFSDB => { + let options = self.options.clone(); let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); let subtype = parse_u16(&mut decoder)?; - let hostname = parse_domain_name(&mut decoder)?; + let hostname = Self::parse_domain_name(&mut decoder, &options)?; Ok((Some(format!("{} {}", subtype, hostname)), None)) } @@ -427,9 +464,10 @@ impl DnsMessageParser { } dns_message::RTYPE_RT => { + let options = self.options.clone(); let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); let preference = parse_u16(&mut decoder)?; - let intermediate_host = parse_domain_name(&mut decoder)?; + let intermediate_host = Self::parse_domain_name(&mut decoder, &options)?; Ok((Some(format!("{} {}", preference, intermediate_host)), None)) } @@ -442,19 +480,21 @@ impl DnsMessageParser { } dns_message::RTYPE_PX => { + let options = self.options.clone(); let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); let preference = parse_u16(&mut decoder)?; - let map822 = parse_domain_name(&mut decoder)?; - let mapx400 = parse_domain_name(&mut decoder)?; + let map822 = Self::parse_domain_name(&mut decoder, &options)?; + let mapx400 = Self::parse_domain_name(&mut decoder, &options)?; Ok((Some(format!("{} {} {}", preference, map822, mapx400)), None)) } dns_message::RTYPE_LOC => self.parse_loc_rdata(rdata.anything()), dns_message::RTYPE_KX => { + let options = self.options.clone(); let mut decoder = self.get_rdata_decoder_with_raw_message(rdata.anything()); let preference = parse_u16(&mut decoder)?; - let exchanger = parse_domain_name(&mut decoder)?; + let exchanger = Self::parse_domain_name(&mut decoder, &options)?; Ok((Some(format!("{} {}", preference, exchanger)), None)) } @@ -513,291 +553,305 @@ impl DnsMessageParser { _ => Ok((None, Some(rdata.anything().to_vec()))), } } -} -fn format_rdata(rdata: &RData) -> DnsParserResult<(Option, Option>)> { - match rdata { - RData::A(ip) => Ok((Some(ip.to_string()), None)), - RData::AAAA(ip) => Ok((Some(ip.to_string()), None)), - RData::ANAME(name) => Ok((Some(name.to_string()), None)), - RData::CNAME(name) => Ok((Some(name.to_string()), None)), - RData::CSYNC(csync) => { - // Using CSYNC's formatter since not all data is exposed otherwise - let csync_rdata = format!("{}", csync); - Ok((Some(csync_rdata), None)) - } - RData::MX(mx) => { - let srv_rdata = format!("{} {}", mx.preference(), mx.exchange(),); - Ok((Some(srv_rdata), None)) - } - RData::NULL(null) => Ok((Some(BASE64.encode(null.anything())), None)), - RData::NS(ns) => Ok((Some(ns.to_string()), None)), - RData::OPENPGPKEY(key) => { - if let Ok(key_string) = String::from_utf8(Vec::from(key.public_key())) { - Ok((Some(format!("({})", &key_string)), None)) - } else { - Err(DnsMessageParserError::SimpleError { - cause: String::from("Invalid OPENPGPKEY rdata"), - }) + fn format_rdata(&self, rdata: &RData) -> DnsParserResult<(Option, Option>)> { + match rdata { + RData::A(ip) => Ok((Some(ip.to_string()), None)), + RData::AAAA(ip) => Ok((Some(ip.to_string()), None)), + RData::ANAME(name) => Ok((Some(name.to_string_with_options(&self.options)), None)), + RData::CNAME(name) => Ok((Some(name.to_string_with_options(&self.options)), None)), + RData::CSYNC(csync) => { + // Using CSYNC's formatter since not all data is exposed otherwise + let csync_rdata = format!("{}", csync); + Ok((Some(csync_rdata), None)) } - } - RData::PTR(ptr) => Ok((Some(ptr.to_string()), None)), - RData::SOA(soa) => Ok(( - Some(format!( - "{} {} {} {} {} {} {}", - soa.mname(), - soa.rname(), - soa.serial(), - soa.refresh(), - soa.retry(), - soa.expire(), - soa.minimum() + RData::MX(mx) => { + let srv_rdata = format!( + "{} {}", + mx.preference(), + mx.exchange().to_string_with_options(&self.options), + ); + Ok((Some(srv_rdata), None)) + } + RData::NULL(null) => Ok((Some(BASE64.encode(null.anything())), None)), + RData::NS(ns) => Ok((Some(ns.to_string_with_options(&self.options)), None)), + RData::OPENPGPKEY(key) => { + if let Ok(key_string) = String::from_utf8(Vec::from(key.public_key())) { + Ok((Some(format!("({})", &key_string)), None)) + } else { + Err(DnsMessageParserError::SimpleError { + cause: String::from("Invalid OPENPGPKEY rdata"), + }) + } + } + RData::PTR(ptr) => Ok((Some(ptr.to_string_with_options(&self.options)), None)), + RData::SOA(soa) => Ok(( + Some(format!( + "{} {} {} {} {} {} {}", + soa.mname().to_string_with_options(&self.options), + soa.rname().to_string_with_options(&self.options), + soa.serial(), + soa.refresh(), + soa.retry(), + soa.expire(), + soa.minimum() + )), + None, )), - None, - )), - RData::SRV(srv) => { - let srv_rdata = format!( - "{} {} {} {}", - srv.priority(), - srv.weight(), - srv.port(), - srv.target() - ); - Ok((Some(srv_rdata), None)) - } - RData::TXT(txt) => { - let txt_rdata = txt - .txt_data() - .iter() - .map(|value| { - format!( - "\"{}\"", - escape_string_for_text_representation( - String::from_utf8_lossy(value).to_string() + RData::SRV(srv) => { + let srv_rdata = format!( + "{} {} {} {}", + srv.priority(), + srv.weight(), + srv.port(), + srv.target().to_string_with_options(&self.options) + ); + Ok((Some(srv_rdata), None)) + } + RData::TXT(txt) => { + let txt_rdata = txt + .txt_data() + .iter() + .map(|value| { + format!( + "\"{}\"", + escape_string_for_text_representation( + String::from_utf8_lossy(value).to_string() + ) ) - ) - }) - .collect::>() - .join(" "); - Ok((Some(txt_rdata), None)) - } - RData::CAA(caa) => { - let caa_rdata = format!( - "{} {} \"{}\"", - caa.issuer_critical() as u8, - caa.tag().as_str(), - match caa.value() { - Value::Url(url) => { - url.as_str().to_string() - } - Value::Issuer(option_name, vec_keyvalue) => { - let mut final_issuer = String::new(); - if let Some(name) = option_name { - final_issuer.push_str(&name.to_utf8()); - for keyvalue in vec_keyvalue.iter() { - final_issuer.push_str("; "); - final_issuer.push_str(keyvalue.key()); - final_issuer.push('='); - final_issuer.push_str(keyvalue.value()); + }) + .collect::>() + .join(" "); + Ok((Some(txt_rdata), None)) + } + RData::CAA(caa) => { + let caa_rdata = format!( + "{} {} \"{}\"", + caa.issuer_critical() as u8, + caa.tag().as_str(), + match caa.value() { + Value::Url(url) => { + url.as_str().to_string() + } + Value::Issuer(option_name, vec_keyvalue) => { + let mut final_issuer = String::new(); + if let Some(name) = option_name { + final_issuer.push_str(&name.to_string_with_options(&self.options)); + for keyvalue in vec_keyvalue.iter() { + final_issuer.push_str("; "); + final_issuer.push_str(keyvalue.key()); + final_issuer.push('='); + final_issuer.push_str(keyvalue.value()); + } } + final_issuer.trim_end().to_string() } - final_issuer.trim_end().to_string() + Value::Unknown(unknown) => std::str::from_utf8(unknown) + .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })? + .to_string(), } - Value::Unknown(unknown) => std::str::from_utf8(unknown) - .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })? - .to_string(), - } - ); - Ok((Some(caa_rdata), None)) - } - - RData::TLSA(tlsa) => { - let tlsa_rdata = format!( - "{} {} {} {}", - u8::from(tlsa.cert_usage()), - u8::from(tlsa.selector()), - u8::from(tlsa.matching()), - HEXUPPER.encode(tlsa.cert_data()) - ); - Ok((Some(tlsa_rdata), None)) - } - RData::SSHFP(sshfp) => { - let sshfp_rdata = format!( - "{} {} {}", - Into::::into(sshfp.algorithm()), - Into::::into(sshfp.fingerprint_type()), - HEXUPPER.encode(sshfp.fingerprint()) - ); - Ok((Some(sshfp_rdata), None)) - } - RData::NAPTR(naptr) => { - let naptr_rdata = format!( - r#"{} {} "{}" "{}" "{}" {}"#, - naptr.order(), - naptr.preference(), - escape_string_for_text_representation( - std::str::from_utf8(naptr.flags()) - .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })? - .to_string() - ), - escape_string_for_text_representation( - std::str::from_utf8(naptr.services()) - .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })? - .to_string() - ), - escape_string_for_text_representation( - std::str::from_utf8(naptr.regexp()) - .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })? - .to_string() - ), - naptr.replacement().to_utf8() - ); - Ok((Some(naptr_rdata), None)) - } - RData::HINFO(hinfo) => { - let hinfo_data = format!( - r#""{}" "{}""#, - std::str::from_utf8(hinfo.cpu()) - .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })?, - std::str::from_utf8(hinfo.os()) - .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })?, - ); - Ok((Some(hinfo_data), None)) - } - RData::HTTPS(https) => { - let https_data = format_svcb_record(&https.0); - Ok((Some(https_data), None)) - } - RData::SVCB(svcb) => { - let svcb_data = format_svcb_record(svcb); - Ok((Some(svcb_data), None)) - } - RData::OPT(opt) => { - let parsed = parse_edns_options(opt)?; - let ede_data = parsed.0.iter().map(|entry| EdnsOptionEntry { - opt_code: 15u16, - opt_name: "EDE".to_string(), - opt_data: format!( - "EDE={}({}){}", - entry.info_code(), - entry.purpose().unwrap_or(""), - entry.extra_text().unwrap_or("".to_string()) - ), - }); - let opt_data = parsed - .1 - .into_iter() - .chain(ede_data) - .map(|entry| format!("{}={}", entry.opt_name, entry.opt_data)) - .collect::>() - .join(","); - Ok((Some(opt_data), None)) - } - RData::DNSSEC(dnssec) => match dnssec { - // See https://tools.ietf.org/html/rfc4034 for details - // on dnssec related rdata formats - DNSSECRData::CDS(cds) => Ok((Some(format_ds_record(cds.deref())), None)), - DNSSECRData::DS(ds) => Ok((Some(format_ds_record(ds)), None)), - DNSSECRData::CDNSKEY(cdnskey) => { - Ok((Some(format_dnskey_record(cdnskey.deref())), None)) - } - DNSSECRData::DNSKEY(dnskey) => Ok((Some(format_dnskey_record(dnskey)), None)), - DNSSECRData::NSEC(nsec) => { - let nsec_rdata = format!( - "{} {}", - nsec.next_domain_name(), - nsec.type_bit_maps() - .iter() - .flat_map(|e| format_record_type(*e)) - .collect::>() - .join(" ") - ); - Ok((Some(nsec_rdata), None)) - } - DNSSECRData::NSEC3(nsec3) => { - let nsec3_rdata = format!( - "{} {} {} {} {} {}", - u8::from(nsec3.hash_algorithm()), - nsec3.opt_out() as u8, - nsec3.iterations(), - HEXUPPER.encode(nsec3.salt()), - BASE32HEX_NOPAD.encode(nsec3.next_hashed_owner_name()), - nsec3 - .type_bit_maps() - .iter() - .flat_map(|e| format_record_type(*e)) - .collect::>() - .join(" ") ); - Ok((Some(nsec3_rdata), None)) + Ok((Some(caa_rdata), None)) } - DNSSECRData::NSEC3PARAM(nsec3param) => { - let nsec3param_rdata = format!( + + RData::TLSA(tlsa) => { + let tlsa_rdata = format!( "{} {} {} {}", - u8::from(nsec3param.hash_algorithm()), - nsec3param.opt_out() as u8, - nsec3param.iterations(), - HEXUPPER.encode(nsec3param.salt()), + u8::from(tlsa.cert_usage()), + u8::from(tlsa.selector()), + u8::from(tlsa.matching()), + HEXUPPER.encode(tlsa.cert_data()) ); - Ok((Some(nsec3param_rdata), None)) + Ok((Some(tlsa_rdata), None)) } - - DNSSECRData::SIG(sig) => { - let sig_rdata = format!( - "{} {} {} {} {} {} {} {} {}", - match format_record_type(sig.type_covered()) { - Some(record_type) => record_type, - None => String::from("Unknown record type"), - }, - u8::from(sig.algorithm()), - sig.num_labels(), - sig.original_ttl(), - sig.sig_expiration(), // currently in epoch convert to human readable ? - sig.sig_inception(), // currently in epoch convert to human readable ? - sig.key_tag(), - sig.signer_name(), - BASE64.encode(sig.sig()) + RData::SSHFP(sshfp) => { + let sshfp_rdata = format!( + "{} {} {}", + Into::::into(sshfp.algorithm()), + Into::::into(sshfp.fingerprint_type()), + HEXUPPER.encode(sshfp.fingerprint()) ); - Ok((Some(sig_rdata), None)) + Ok((Some(sshfp_rdata), None)) } - // RSIG is a derivation of SIG but choosing to keep this duplicate code in lieu of the alternative - // which is to allocate to the heap with Box in order to deref. - DNSSECRData::RRSIG(sig) => { - let sig_rdata = format!( - "{} {} {} {} {} {} {} {} {}", - match format_record_type(sig.type_covered()) { - Some(record_type) => record_type, - None => String::from("Unknown record type"), - }, - u8::from(sig.algorithm()), - sig.num_labels(), - sig.original_ttl(), - sig.sig_expiration(), // currently in epoch convert to human readable ? - sig.sig_inception(), // currently in epoch convert to human readable ? - sig.key_tag(), - sig.signer_name(), - BASE64.encode(sig.sig()) + RData::NAPTR(naptr) => { + let naptr_rdata = format!( + r#"{} {} "{}" "{}" "{}" {}"#, + naptr.order(), + naptr.preference(), + escape_string_for_text_representation( + std::str::from_utf8(naptr.flags()) + .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })? + .to_string() + ), + escape_string_for_text_representation( + std::str::from_utf8(naptr.services()) + .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })? + .to_string() + ), + escape_string_for_text_representation( + std::str::from_utf8(naptr.regexp()) + .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })? + .to_string() + ), + naptr.replacement().to_string_with_options(&self.options) ); - Ok((Some(sig_rdata), None)) + Ok((Some(naptr_rdata), None)) } - DNSSECRData::KEY(key) => { - let key_rdata = format!( - "{} {} {} {}", - key.flags(), - u8::from(key.protocol()), - u8::from(key.algorithm()), - BASE64.encode(key.public_key()) + RData::HINFO(hinfo) => { + let hinfo_data = format!( + r#""{}" "{}""#, + std::str::from_utf8(hinfo.cpu()) + .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })?, + std::str::from_utf8(hinfo.os()) + .map_err(|source| DnsMessageParserError::Utf8ParsingError { source })?, ); - Ok((Some(key_rdata), None)) + Ok((Some(hinfo_data), None)) + } + RData::HTTPS(https) => { + let https_data = format_svcb_record(&https.0, &self.options); + Ok((Some(https_data), None)) + } + RData::SVCB(svcb) => { + let svcb_data = format_svcb_record(svcb, &self.options); + Ok((Some(svcb_data), None)) } - DNSSECRData::Unknown { code: _, rdata } => Ok((None, Some(rdata.anything().to_vec()))), + RData::OPT(opt) => { + let parsed = parse_edns_options(opt)?; + let ede_data = parsed.0.iter().map(|entry| EdnsOptionEntry { + opt_code: 15u16, + opt_name: "EDE".to_string(), + opt_data: format!( + "EDE={}({}){}", + entry.info_code(), + entry.purpose().unwrap_or(""), + entry.extra_text().unwrap_or("".to_string()) + ), + }); + let opt_data = parsed + .1 + .into_iter() + .chain(ede_data) + .map(|entry| format!("{}={}", entry.opt_name, entry.opt_data)) + .collect::>() + .join(","); + Ok((Some(opt_data), None)) + } + RData::DNSSEC(dnssec) => match dnssec { + // See https://tools.ietf.org/html/rfc4034 for details + // on dnssec related rdata formats + DNSSECRData::CDS(cds) => Ok((Some(format_ds_record(cds.deref())), None)), + DNSSECRData::DS(ds) => Ok((Some(format_ds_record(ds)), None)), + DNSSECRData::CDNSKEY(cdnskey) => { + Ok((Some(format_dnskey_record(cdnskey.deref())), None)) + } + DNSSECRData::DNSKEY(dnskey) => Ok((Some(format_dnskey_record(dnskey)), None)), + DNSSECRData::NSEC(nsec) => { + let nsec_rdata = format!( + "{} {}", + nsec.next_domain_name() + .to_string_with_options(&self.options), + nsec.type_bit_maps() + .iter() + .flat_map(|e| format_record_type(*e)) + .collect::>() + .join(" ") + ); + Ok((Some(nsec_rdata), None)) + } + DNSSECRData::NSEC3(nsec3) => { + let nsec3_rdata = format!( + "{} {} {} {} {} {}", + u8::from(nsec3.hash_algorithm()), + nsec3.opt_out() as u8, + nsec3.iterations(), + HEXUPPER.encode(nsec3.salt()), + BASE32HEX_NOPAD.encode(nsec3.next_hashed_owner_name()), + nsec3 + .type_bit_maps() + .iter() + .flat_map(|e| format_record_type(*e)) + .collect::>() + .join(" ") + ); + Ok((Some(nsec3_rdata), None)) + } + DNSSECRData::NSEC3PARAM(nsec3param) => { + let nsec3param_rdata = format!( + "{} {} {} {}", + u8::from(nsec3param.hash_algorithm()), + nsec3param.opt_out() as u8, + nsec3param.iterations(), + HEXUPPER.encode(nsec3param.salt()), + ); + Ok((Some(nsec3param_rdata), None)) + } + + DNSSECRData::SIG(sig) => { + let sig_rdata = format!( + "{} {} {} {} {} {} {} {} {}", + match format_record_type(sig.type_covered()) { + Some(record_type) => record_type, + None => String::from("Unknown record type"), + }, + u8::from(sig.algorithm()), + sig.num_labels(), + sig.original_ttl(), + sig.sig_expiration(), // currently in epoch convert to human readable ? + sig.sig_inception(), // currently in epoch convert to human readable ? + sig.key_tag(), + sig.signer_name().to_string_with_options(&self.options), + BASE64.encode(sig.sig()) + ); + Ok((Some(sig_rdata), None)) + } + // RSIG is a derivation of SIG but choosing to keep this duplicate code in lieu of the alternative + // which is to allocate to the heap with Box in order to deref. + DNSSECRData::RRSIG(sig) => { + let sig_rdata = format!( + "{} {} {} {} {} {} {} {} {}", + match format_record_type(sig.type_covered()) { + Some(record_type) => record_type, + None => String::from("Unknown record type"), + }, + u8::from(sig.algorithm()), + sig.num_labels(), + sig.original_ttl(), + sig.sig_expiration(), // currently in epoch convert to human readable ? + sig.sig_inception(), // currently in epoch convert to human readable ? + sig.key_tag(), + sig.signer_name().to_string_with_options(&self.options), + BASE64.encode(sig.sig()) + ); + Ok((Some(sig_rdata), None)) + } + DNSSECRData::KEY(key) => { + let key_rdata = format!( + "{} {} {} {}", + key.flags(), + u8::from(key.protocol()), + u8::from(key.algorithm()), + BASE64.encode(key.public_key()) + ); + Ok((Some(key_rdata), None)) + } + DNSSECRData::Unknown { code: _, rdata } => { + Ok((None, Some(rdata.anything().to_vec()))) + } + _ => Err(DnsMessageParserError::SimpleError { + cause: format!("Unsupported rdata {:?}", rdata), + }), + }, _ => Err(DnsMessageParserError::SimpleError { cause: format!("Unsupported rdata {:?}", rdata), }), - }, - _ => Err(DnsMessageParserError::SimpleError { - cause: format!("Unsupported rdata {:?}", rdata), - }), + } + } + + fn parse_domain_name( + decoder: &mut BinDecoder<'_>, + options: &DnsParserOptions, + ) -> DnsParserResult { + parse_domain_name(decoder).map(|n| n.to_string_with_options(options)) } } @@ -808,11 +862,11 @@ fn format_record_type(record_type: RecordType) -> Option { } } -fn format_svcb_record(svcb: &SVCB) -> String { +fn format_svcb_record(svcb: &SVCB, options: &DnsParserOptions) -> String { format!( "{} {} {}", svcb.svc_priority(), - svcb.target_name(), + svcb.target_name().to_string_with_options(options), svcb.svc_params() .iter() .map(|(key, value)| format!(r#"{}="{}""#, key, value.to_string().trim_end_matches(','))) @@ -1252,6 +1306,17 @@ mod tests { } } + fn format_rdata(rdata: &RData) -> DnsParserResult<(Option, Option>)> { + DnsMessageParser::new(Vec::new()).format_rdata(rdata) + } + + fn format_rdata_with_options( + rdata: &RData, + options: DnsParserOptions, + ) -> DnsParserResult<(Option, Option>)> { + DnsMessageParser::with_options(Vec::new(), options).format_rdata(rdata) + } + #[test] fn test_parse_as_query_message() { let raw_dns_message = "szgAAAABAAAAAAAAAmg1B2V4YW1wbGUDY29tAAAGAAE="; @@ -1501,6 +1566,24 @@ mod tests { } } + #[test] + fn test_format_rdata_for_cname_type_downcase() { + let rdata = RData::CNAME(hickory_proto::rr::rdata::CNAME( + Name::from_str("WWW.Example.Com.").unwrap(), + )); + let rdata_text = format_rdata_with_options( + &rdata, + DnsParserOptions { + lowercase_hostnames: true, + }, + ); + assert!(rdata_text.is_ok()); + if let Ok((parsed, raw_rdata)) = rdata_text { + assert!(raw_rdata.is_none()); + assert_eq!("www.example.com.", parsed.unwrap()); + } + } + #[test] fn test_format_rdata_for_txt_type() { let rdata = RData::TXT(TXT::new(vec![ diff --git a/src/sources/dnstap/mod.rs b/src/sources/dnstap/mod.rs index 60d109cd11290..6f5b85c61becc 100644 --- a/src/sources/dnstap/mod.rs +++ b/src/sources/dnstap/mod.rs @@ -1,6 +1,7 @@ use std::path::PathBuf; use base64::prelude::{Engine as _, BASE64_STANDARD}; +use dnsmsg_parser::dns_message_parser::DnsParserOptions; use vector_lib::event::{Event, LogEvent}; use vector_lib::internal_event::{ ByteSize, BytesReceived, InternalEventHandle, Protocol, Registered, @@ -67,6 +68,10 @@ pub struct DnstapConfig { /// Maximum number of frames that can be processed concurrently. pub max_frame_handling_tasks: Option, + /// Whether to downcase all DNSTAP hostnames received for consistency + #[serde(default = "crate::serde::default_false")] + pub lowercase_hostnames: bool, + /// The namespace to use for logs. This overrides the global settings. #[configurable(metadata(docs::hidden))] #[serde(default)] @@ -162,6 +167,7 @@ impl Default for DnstapConfig { raw_data_only: None, multithreaded: None, max_frame_handling_tasks: None, + lowercase_hostnames: false, log_namespace: None, } } @@ -222,6 +228,7 @@ struct CommonFrameHandler { timestamp_key: Option, source_type_key: Option, bytes_received: Registered, + lowercase_hostnames: bool, log_namespace: LogNamespace, } @@ -245,6 +252,7 @@ impl CommonFrameHandler { timestamp_key: timestamp_key.cloned(), source_type_key: source_type_key.cloned(), bytes_received: register!(BytesReceived::from(Protocol::from("protobuf"))), + lowercase_hostnames: config.lowercase_hostnames, log_namespace, } } @@ -283,7 +291,13 @@ impl FrameHandler for CommonFrameHandler { (PathPrefix::Event, &DNSTAP_VALUE_PATHS.raw_data), BASE64_STANDARD.encode(&frame), ); - } else if let Err(err) = DnstapParser::parse(&mut log_event, frame) { + } else if let Err(err) = DnstapParser::parse( + &mut log_event, + frame, + DnsParserOptions { + lowercase_hostnames: self.lowercase_hostnames, + }, + ) { emit!(DnstapParseError { error: format!("Dnstap protobuf decode error {:?}.", err) }); @@ -440,6 +454,7 @@ mod integration_tests { raw_data_only: Some(raw_data), multithreaded: Some(false), max_frame_handling_tasks: Some(100000), + lowercase_hostnames: false, log_namespace: None, } .build(SourceContext::new_test(sender, None)) diff --git a/src/sources/dnstap/parser.rs b/src/sources/dnstap/parser.rs index 6067b1a96c7f5..f54853ca327cc 100644 --- a/src/sources/dnstap/parser.rs +++ b/src/sources/dnstap/parser.rs @@ -8,7 +8,7 @@ use std::{ use base64::prelude::{Engine as _, BASE64_STANDARD}; use bytes::Bytes; use chrono::{TimeZone, Utc}; -use dnsmsg_parser::ede::EDE; +use dnsmsg_parser::{dns_message_parser::DnsParserOptions, ede::EDE}; use hickory_proto::{ rr::domain::Name, serialize::binary::{BinDecodable, BinDecoder}, @@ -95,7 +95,11 @@ impl DnstapParser { event.insert((PathPrefix::Event, prefix.concat(path)), value) } - pub fn parse(event: &mut LogEvent, frame: Bytes) -> Result<()> { + pub fn parse( + event: &mut LogEvent, + frame: Bytes, + parsing_options: DnsParserOptions, + ) -> Result<()> { //parse frame with dnstap protobuf let proto_msg = Dnstap::decode(frame.clone())?; let root = owned_value_path!(); @@ -145,7 +149,9 @@ impl DnstapParser { if dnstap_data_type == "Message" { if let Some(message) = proto_msg.message { - if let Err(err) = DnstapParser::parse_dnstap_message(event, &root, message) { + if let Err(err) = + DnstapParser::parse_dnstap_message(event, &root, message, parsing_options) + { emit!(DnstapParseWarning { error: &err }); need_raw_data = true; DnstapParser::insert( @@ -180,6 +186,7 @@ impl DnstapParser { event: &mut LogEvent, prefix: impl ValuePath<'a>, dnstap_message: DnstapMessage, + parsing_options: DnsParserOptions, ) -> Result<()> { if let Some(socket_family) = dnstap_message.socket_family { DnstapParser::parse_dnstap_message_socket_family( @@ -249,6 +256,7 @@ impl DnstapParser { prefix.clone(), dnstap_message_type_id, dnstap_message, + parsing_options, )?; Ok(()) @@ -259,11 +267,13 @@ impl DnstapParser { prefix: impl ValuePath<'a>, dnstap_message_type_id: i32, dnstap_message: DnstapMessage, + parsing_options: DnsParserOptions, ) -> Result<()> { match dnstap_message_type_id { 1..=12 => { if let Some(query_message) = dnstap_message.query_message { - let mut query_message_parser = DnsMessageParser::new(query_message); + let mut query_message_parser = + DnsMessageParser::with_options(query_message, parsing_options.clone()); if let Err(error) = DnstapParser::parse_dns_query_message( event, prefix.concat(&DNSTAP_VALUE_PATHS.request_message), @@ -280,7 +290,8 @@ impl DnstapParser { } if let Some(response_message) = dnstap_message.response_message { - let mut response_message_parser = DnsMessageParser::new(response_message); + let mut response_message_parser = + DnsMessageParser::with_options(response_message, parsing_options); if let Err(error) = DnstapParser::parse_dns_query_message( event, prefix.concat(&DNSTAP_VALUE_PATHS.response_message), @@ -298,8 +309,10 @@ impl DnstapParser { } 13 | 14 => { if let Some(update_request_message) = dnstap_message.query_message { - let mut update_request_message_parser = - DnsMessageParser::new(update_request_message); + let mut update_request_message_parser = DnsMessageParser::with_options( + update_request_message, + parsing_options.clone(), + ); if let Err(error) = DnstapParser::parse_dns_update_message( event, &DNSTAP_VALUE_PATHS.request_message, @@ -317,7 +330,7 @@ impl DnstapParser { if let Some(update_response_message) = dnstap_message.response_message { let mut update_response_message_parser = - DnsMessageParser::new(update_response_message); + DnsMessageParser::with_options(update_response_message, parsing_options); if let Err(error) = DnstapParser::parse_dns_update_message( event, &DNSTAP_VALUE_PATHS.response_message, @@ -1004,6 +1017,7 @@ mod tests { use super::*; use crate::event::Value; use chrono::DateTime; + use dnsmsg_parser::dns_message_parser::DnsParserOptions; use std::collections::BTreeMap; #[test] @@ -1015,7 +1029,11 @@ mod tests { let dnstap_data = BASE64_STANDARD .decode(raw_dnstap_data) .expect("Invalid base64 encoded data."); - let parse_result = DnstapParser::parse(&mut log_event, Bytes::from(dnstap_data)); + let parse_result = DnstapParser::parse( + &mut log_event, + Bytes::from(dnstap_data), + DnsParserOptions::default(), + ); assert!(parse_result.is_ok()); let expected_map: BTreeMap<&str, Value> = BTreeMap::from([ @@ -1104,6 +1122,65 @@ mod tests { } } + #[test] + fn test_parse_dnstap_data_lowercase_hostnames() { + let mut log_event = LogEvent::default(); + let mut lowercase_log_event = LogEvent::default(); + let raw_dnstap_data = "Cgw2NzNiNWZiZWI5MmESMkJJTkQgOS4xOC4yMS0xK3VidW50dTIyLjA0LjErZGViLnN1cnkub3JnKzEtVWJ1bnR1cqkBCAYQARgBIgQKWQUeKgQKWQUqMMitAjg1YLXQp68GbZ9tBw9ygwGInoGAAAEABAAAAAEGVmVjdG9yA0RldgAAAQABwAwAAQABAAAAPAAEEvVWOMAMAAEAAQAAADwABBL1VnnADAABAAEAAAA8AAQS9VYSwAwAAQABAAAAPAAEEvVWWQAAKQTQAAAAAAAcAAoAGERDbSN8uKngAQAAAGXp6DXs0fbpv0n9F3gB"; + let dnstap_data = BASE64_STANDARD + .decode(raw_dnstap_data) + .expect("Invalid base64 encoded data."); + let parse_result = DnstapParser::parse( + &mut lowercase_log_event, + Bytes::from(dnstap_data.clone()), + DnsParserOptions { + lowercase_hostnames: true, + }, + ); + let no_lowercase_result = DnstapParser::parse( + &mut log_event, + Bytes::from(dnstap_data), + DnsParserOptions::default(), + ); + assert!(parse_result.is_ok()); + assert!(no_lowercase_result.is_ok()); + + let no_lowercase_expected: BTreeMap<&str, Value> = BTreeMap::from([ + ("dataType", Value::Bytes(Bytes::from("Message"))), + ("dataTypeId", Value::Integer(1)), + ( + "responseData.answers[0].domainName", + Value::Bytes(Bytes::from("Vector.Dev.")), + ), + ( + "responseData.question[0].domainName", + Value::Bytes(Bytes::from("Vector.Dev.")), + ), + ]); + let expected_map: BTreeMap<&str, Value> = BTreeMap::from([ + ("dataType", Value::Bytes(Bytes::from("Message"))), + ("dataTypeId", Value::Integer(1)), + ( + "responseData.answers[0].domainName", + Value::Bytes(Bytes::from("vector.dev.")), + ), + ( + "responseData.question[0].domainName", + Value::Bytes(Bytes::from("vector.dev.")), + ), + ]); + + // The maps need to contain identical keys and values. + for (exp_key, exp_value) in no_lowercase_expected { + let value = log_event.get(exp_key).unwrap(); + assert_eq!(*value, exp_value); + } + for (exp_key, exp_value) in expected_map { + let value = lowercase_log_event.get(exp_key).unwrap(); + assert_eq!(*value, exp_value); + } + } + #[test] fn test_parse_dnstap_data_with_ede_options() { let mut log_event = LogEvent::default(); @@ -1111,7 +1188,11 @@ mod tests { let dnstap_data = BASE64_STANDARD .decode(raw_dnstap_data) .expect("Invalid base64 encoded data."); - let parse_result = DnstapParser::parse(&mut log_event, Bytes::from(dnstap_data)); + let parse_result = DnstapParser::parse( + &mut log_event, + Bytes::from(dnstap_data), + DnsParserOptions::default(), + ); assert!(parse_result.is_ok()); let expected_map: BTreeMap<&str, Value> = BTreeMap::from([ @@ -1144,7 +1225,11 @@ mod tests { let dnstap_data = BASE64_STANDARD .decode(raw_dnstap_data) .expect("Invalid base64 encoded data."); - let parse_result = DnstapParser::parse(&mut log_event, Bytes::from(dnstap_data)); + let parse_result = DnstapParser::parse( + &mut log_event, + Bytes::from(dnstap_data), + DnsParserOptions::default(), + ); assert!(parse_result.is_ok()); let expected_map: BTreeMap<&str, Value> = BTreeMap::from([ @@ -1227,8 +1312,12 @@ mod tests { #[test] fn test_parse_dnstap_data_with_invalid_data() { let mut log_event = LogEvent::default(); - let e = DnstapParser::parse(&mut log_event, Bytes::from(vec![1, 2, 3])) - .expect_err("Expected TrustDnsError."); + let e = DnstapParser::parse( + &mut log_event, + Bytes::from(vec![1, 2, 3]), + DnsParserOptions::default(), + ) + .expect_err("Expected TrustDnsError."); assert!(e.to_string().contains("Protobuf message")); } diff --git a/website/cue/reference/components/sources/base/dnstap.cue b/website/cue/reference/components/sources/base/dnstap.cue index 83b40963f4379..3b4fca30d9b16 100644 --- a/website/cue/reference/components/sources/base/dnstap.cue +++ b/website/cue/reference/components/sources/base/dnstap.cue @@ -41,6 +41,11 @@ base: components: sources: dnstap: configuration: { type: uint: unit: "seconds" } } + lowercase_hostnames: { + description: "Whether to downcase all DNSTAP hostnames received for consistency" + required: false + type: bool: default: false + } max_connection_duration_secs: { description: """ Maximum duration to keep each connection open. Connections open for longer than this duration are closed. From d07a435a3ef0002919a3d2d140843b998f9689d3 Mon Sep 17 00:00:00 2001 From: Artur Malchanau Date: Fri, 8 Mar 2024 17:25:33 +0100 Subject: [PATCH 0110/1491] fix(compression): Fix gzip and zlib performance degradation (#20032) * fix(compression): Fix gzip and zlib performance degradation Fix gzip and zlib performance degradation caused by: * flate2 v1.0.28 started to resize its input buffer up to its capacity and back to the actual number of bytes written. * Some sinks are writing to Compressor without buffering, resulting in frequent small writes to the flate2 writer. Within 32KB of input buffer in flate2, this causes an excessive number of memset operations and degraded sink throughput. This fix introduces a wrapper buffer in front of gzip and zlib writers to accumulate data before calling the write function of the underlying writer. Signed-off-by: Artur Malchanau * Add a link to the comment with more context. --------- Signed-off-by: Artur Malchanau --- .../20032_gzip_zlib_performance.fix.md | 3 + src/sinks/util/compressor.rs | 97 ++++++++++++------- 2 files changed, 66 insertions(+), 34 deletions(-) create mode 100644 changelog.d/20032_gzip_zlib_performance.fix.md diff --git a/changelog.d/20032_gzip_zlib_performance.fix.md b/changelog.d/20032_gzip_zlib_performance.fix.md new file mode 100644 index 0000000000000..ef58f4384c7d7 --- /dev/null +++ b/changelog.d/20032_gzip_zlib_performance.fix.md @@ -0,0 +1,3 @@ +Fixed gzip and zlib compression performance degradation introduced in v0.34.0. + +authors: Hexta diff --git a/src/sinks/util/compressor.rs b/src/sinks/util/compressor.rs index f6d3c01ccca87..8eeed45c9a1d8 100644 --- a/src/sinks/util/compressor.rs +++ b/src/sinks/util/compressor.rs @@ -1,14 +1,19 @@ -use std::io; +use std::{io, io::BufWriter}; use bytes::{BufMut, BytesMut}; use flate2::write::{GzEncoder, ZlibEncoder}; use super::{snappy::SnappyEncoder, zstd::ZstdEncoder, Compression}; +const GZIP_INPUT_BUFFER_CAPACITY: usize = 4_096; +const ZLIB_INPUT_BUFFER_CAPACITY: usize = 4_096; + +const OUTPUT_BUFFER_CAPACITY: usize = 1_024; + enum Writer { Plain(bytes::buf::Writer), - Gzip(GzEncoder>), - Zlib(ZlibEncoder>), + Gzip(BufWriter>>), + Zlib(BufWriter>>), Zstd(ZstdEncoder>), Snappy(SnappyEncoder>), } @@ -17,21 +22,69 @@ impl Writer { pub fn get_ref(&self) -> &BytesMut { match self { Writer::Plain(inner) => inner.get_ref(), - Writer::Gzip(inner) => inner.get_ref().get_ref(), - Writer::Zlib(inner) => inner.get_ref().get_ref(), + Writer::Gzip(inner) => inner.get_ref().get_ref().get_ref(), + Writer::Zlib(inner) => inner.get_ref().get_ref().get_ref(), Writer::Zstd(inner) => inner.get_ref().get_ref(), Writer::Snappy(inner) => inner.get_ref().get_ref(), } } + + pub fn into_inner(self) -> BytesMut { + match self { + Writer::Plain(writer) => writer, + Writer::Gzip(writer) => writer + .into_inner() + .expect("BufWriter writer should not fail to finish") + .finish() + .expect("gzip writer should not fail to finish"), + Writer::Zlib(writer) => writer + .into_inner() + .expect("BufWriter writer should not fail to finish") + .finish() + .expect("zlib writer should not fail to finish"), + Writer::Zstd(writer) => writer + .finish() + .expect("zstd writer should not fail to finish"), + Writer::Snappy(writer) => writer + .finish() + .expect("snappy writer should not fail to finish"), + } + .into_inner() + } + + pub fn finish(self) -> io::Result { + let buf = match self { + Writer::Plain(writer) => writer, + Writer::Gzip(writer) => writer.into_inner()?.finish()?, + Writer::Zlib(writer) => writer.into_inner()?.finish()?, + Writer::Zstd(writer) => writer.finish()?, + Writer::Snappy(writer) => writer.finish()?, + } + .into_inner(); + + Ok(buf) + } } impl From for Writer { fn from(compression: Compression) -> Self { - let writer = BytesMut::with_capacity(1_024).writer(); + let writer = BytesMut::with_capacity(OUTPUT_BUFFER_CAPACITY).writer(); match compression { Compression::None => Writer::Plain(writer), - Compression::Gzip(level) => Writer::Gzip(GzEncoder::new(writer, level.as_flate2())), - Compression::Zlib(level) => Writer::Zlib(ZlibEncoder::new(writer, level.as_flate2())), + // Buffering writes to the underlying Encoder writer + // to avoid Vec-trashing and expensive memset syscalls. + // https://github.com/rust-lang/flate2-rs/issues/395#issuecomment-1975088152 + Compression::Gzip(level) => Writer::Gzip(BufWriter::with_capacity( + GZIP_INPUT_BUFFER_CAPACITY, + GzEncoder::new(writer, level.as_flate2()), + )), + // Buffering writes to the underlying Encoder writer + // to avoid Vec-trashing and expensive memset syscalls. + // https://github.com/rust-lang/flate2-rs/issues/395#issuecomment-1975088152 + Compression::Zlib(level) => Writer::Zlib(BufWriter::with_capacity( + ZLIB_INPUT_BUFFER_CAPACITY, + ZlibEncoder::new(writer, level.as_flate2()), + )), Compression::Zstd(level) => { let encoder = ZstdEncoder::new(writer, level.into()) .expect("Zstd encoder should not fail on init."); @@ -98,16 +151,7 @@ impl Compressor { /// If the compressor encounters an I/O error while finalizing the payload, an error /// variant will be returned. pub fn finish(self) -> io::Result { - let buf = match self.inner { - Writer::Plain(writer) => writer, - Writer::Gzip(writer) => writer.finish()?, - Writer::Zlib(writer) => writer.finish()?, - Writer::Zstd(writer) => writer.finish()?, - Writer::Snappy(writer) => writer.finish()?, - } - .into_inner(); - - Ok(buf) + self.inner.finish() } /// Consumes the compressor, returning the internal buffer used by the compressor. @@ -120,22 +164,7 @@ impl Compressor { /// /// Consider using `finish` if catching these scenarios is important. pub fn into_inner(self) -> BytesMut { - match self.inner { - Writer::Plain(writer) => writer, - Writer::Gzip(writer) => writer - .finish() - .expect("gzip writer should not fail to finish"), - Writer::Zlib(writer) => writer - .finish() - .expect("zlib writer should not fail to finish"), - Writer::Zstd(writer) => writer - .finish() - .expect("zstd writer should not fail to finish"), - Writer::Snappy(writer) => writer - .finish() - .expect("snappy writer should not fail to finish"), - } - .into_inner() + self.inner.into_inner() } } From c0fe642ec4186e1f0f53c6cb93451fa83cee10d5 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Fri, 8 Mar 2024 11:24:36 -0800 Subject: [PATCH 0111/1491] chore: Remove used changelog entries Signed-off-by: Jesse Szwedko --- .../17013_s3_filename_extension.fix.md | 1 - ...0_remove_warning_for_unused_outputs.fix.md | 1 - ...9639_unit_test_vrl_metadata.enhancement.md | 3 -- ...ontext_stream_sink_request_building.fix.md | 36 ------------------- changelog.d/datadog_logs_batching.fix.md | 2 -- changelog.d/gelf_at_character.enhancement.md | 2 -- .../graphql_endpoint_toggle.enhancement.md | 3 -- 7 files changed, 48 deletions(-) delete mode 100644 changelog.d/17013_s3_filename_extension.fix.md delete mode 100644 changelog.d/19370_remove_warning_for_unused_outputs.fix.md delete mode 100644 changelog.d/19639_unit_test_vrl_metadata.enhancement.md delete mode 100644 changelog.d/19712_propagate_tracing_span_context_stream_sink_request_building.fix.md delete mode 100644 changelog.d/datadog_logs_batching.fix.md delete mode 100644 changelog.d/gelf_at_character.enhancement.md delete mode 100644 changelog.d/graphql_endpoint_toggle.enhancement.md diff --git a/changelog.d/17013_s3_filename_extension.fix.md b/changelog.d/17013_s3_filename_extension.fix.md deleted file mode 100644 index 12770687e4d35..0000000000000 --- a/changelog.d/17013_s3_filename_extension.fix.md +++ /dev/null @@ -1 +0,0 @@ -Fixed an issue where the `aws_s3` sink adds a trailing period to the s3 key when the `filename_extension` is empty. diff --git a/changelog.d/19370_remove_warning_for_unused_outputs.fix.md b/changelog.d/19370_remove_warning_for_unused_outputs.fix.md deleted file mode 100644 index 2bd3377b7b0b4..0000000000000 --- a/changelog.d/19370_remove_warning_for_unused_outputs.fix.md +++ /dev/null @@ -1 +0,0 @@ -Removed warnings for unused outputs in `datadog_agent` source when the corresponding output is disabled in the source config. diff --git a/changelog.d/19639_unit_test_vrl_metadata.enhancement.md b/changelog.d/19639_unit_test_vrl_metadata.enhancement.md deleted file mode 100644 index dc03333a5f3be..0000000000000 --- a/changelog.d/19639_unit_test_vrl_metadata.enhancement.md +++ /dev/null @@ -1,3 +0,0 @@ -Unit tests can now populate event metadata with the `% = ...` syntax. - -authors: GreyTeardrop diff --git a/changelog.d/19712_propagate_tracing_span_context_stream_sink_request_building.fix.md b/changelog.d/19712_propagate_tracing_span_context_stream_sink_request_building.fix.md deleted file mode 100644 index 8bc7a2229da0b..0000000000000 --- a/changelog.d/19712_propagate_tracing_span_context_stream_sink_request_building.fix.md +++ /dev/null @@ -1,36 +0,0 @@ -The following metrics now correctly have the `component_kind`, `component_type`, and `component_id` tags: - - `component_errors_total` - - `component_discarded_events_total` - -For the following sinks: - - `splunk_hec` - - `clickhouse` - - `loki` - - `redis` - - `azure_blob` - - `azure_monitor_logs` - - `webhdfs` - - `appsignal` - - `amqp` - - `aws_kinesis` - - `statsd` - - `honeycomb` - - `gcp_stackdriver_metrics` - - `gcs_chronicle_unstructured` - - `gcp_stackdriver_logs` - - `gcp_pubsub` - - `gcp_cloud_storage` - - `nats` - - `http` - - `kafka` - - `new_relic` - - `datadog_metrics` - - `datadog_traces` - - `datadog_events` - - `databend` - - `prometheus_remote_write` - - `pulsar` - - `aws_s3` - - `aws_sqs` - - `aws_sns` - - `elasticsearch` diff --git a/changelog.d/datadog_logs_batching.fix.md b/changelog.d/datadog_logs_batching.fix.md deleted file mode 100644 index 8816210c00c41..0000000000000 --- a/changelog.d/datadog_logs_batching.fix.md +++ /dev/null @@ -1,2 +0,0 @@ -Fixed an issue where the `datadog_logs` sink could produce a request larger than the allowed API -limit. diff --git a/changelog.d/gelf_at_character.enhancement.md b/changelog.d/gelf_at_character.enhancement.md deleted file mode 100644 index 081add0d20466..0000000000000 --- a/changelog.d/gelf_at_character.enhancement.md +++ /dev/null @@ -1,2 +0,0 @@ -Gracefully accept `@` characters in labels when decoding GELF. -authors: MartinEmrich diff --git a/changelog.d/graphql_endpoint_toggle.enhancement.md b/changelog.d/graphql_endpoint_toggle.enhancement.md deleted file mode 100644 index 0336ae4f61f48..0000000000000 --- a/changelog.d/graphql_endpoint_toggle.enhancement.md +++ /dev/null @@ -1,3 +0,0 @@ -Added a boolean `graphql` field to the api configuration to allow disabling the graphql endpoint. - -Note that the `playground` endpoint will now only be enabled if the `graphql` endpoint is also enabled. From 55a962a3c55d7b9437ec6b4b36ca42172bc9b953 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Fri, 8 Mar 2024 10:26:04 -0800 Subject: [PATCH 0112/1491] chore(deps): Update VRL to v0.12.0 (#20037) Signed-off-by: Jesse Szwedko --- Cargo.lock | 25 ++++++++++++++++--------- Cargo.toml | 2 +- 2 files changed, 17 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e5d5f5a352b06..932faea180ecf 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1417,6 +1417,12 @@ version = "0.21.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d297deb1925b89f2ccc13d7635fa0714f12c87adce1c75356b39ca9b7178567" +[[package]] +name = "base64" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9475866fec1451be56a3c2400fd081ff546538961565ccb5b7142cbd22bc7a51" + [[package]] name = "base64-simd" version = "0.8.0" @@ -5567,12 +5573,13 @@ dependencies = [ [[package]] name = "nix" -version = "0.27.1" +version = "0.28.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2eb04e9c688eff1c89d72b407f168cf79bb9e867a9d3323ed6c01519eb9cc053" +checksum = "ab2156c4fce2f8df6c499cc1c763e4394b7482525bf2a9701c9d79d215f519e4" dependencies = [ "bitflags 2.4.1", "cfg-if", + "cfg_aliases", "libc", ] @@ -7919,9 +7926,9 @@ dependencies = [ [[package]] name = "rustyline" -version = "13.0.0" +version = "14.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02a2d683a4ac90aeef5b1013933f6d977bd37d51ff3f4dad829d4931a7e6be86" +checksum = "7803e8936da37efd9b6d4478277f4b2b9bb5cdb37a113e8d63222e58da647e63" dependencies = [ "bitflags 2.4.1", "cfg-if", @@ -7929,11 +7936,11 @@ dependencies = [ "libc", "log", "memchr", - "nix 0.27.1", + "nix 0.28.0", "unicode-segmentation", "unicode-width", "utf8parse", - "winapi", + "windows-sys 0.52.0", ] [[package]] @@ -10539,15 +10546,15 @@ checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" [[package]] name = "vrl" -version = "0.11.0" +version = "0.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e125a431e61be8819cd967dce0b610290d51b30e287df9a0aeb07afa9a9a719" +checksum = "d8bfe806485288b36a6024f820b9874176130497673ab5a1935cb09ea0af88c6" dependencies = [ "aes", "ansi_term", "arbitrary", "base16", - "base64 0.21.7", + "base64 0.22.0", "bytes 1.5.0", "cbc", "cfb-mode", diff --git a/Cargo.toml b/Cargo.toml index 500b6c3c093b6..24795499393e1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -135,7 +135,7 @@ proptest-derive = "0.4.0" serde_json = { version = "1.0.114", default-features = false, features = ["raw_value", "std"] } serde = { version = "1.0.197", default-features = false, features = ["alloc", "derive", "rc"] } toml = { version = "0.8.10", default-features = false, features = ["display", "parse"] } -vrl = { version = "0.11.0", features = ["arbitrary", "cli", "test", "test_framework"] } +vrl = { version = "0.12.0", features = ["arbitrary", "cli", "test", "test_framework"] } [dependencies] pin-project.workspace = true From c83e36dd447ef9a4ebe8270bc295743ca3053bb6 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 8 Mar 2024 18:58:13 +0000 Subject: [PATCH 0113/1491] chore(deps): Bump the clap group with 1 update (#20026) Bumps the clap group with 1 update: [clap](https://github.com/clap-rs/clap). Updates `clap` from 4.5.1 to 4.5.2 - [Release notes](https://github.com/clap-rs/clap/releases) - [Changelog](https://github.com/clap-rs/clap/blob/master/CHANGELOG.md) - [Commits](https://github.com/clap-rs/clap/compare/clap_complete-v4.5.1...v4.5.2) --- updated-dependencies: - dependency-name: clap dependency-type: direct:production update-type: version-update:semver-patch dependency-group: clap ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 28 ++++++++++++++-------------- Cargo.toml | 2 +- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 932faea180ecf..cca629f71bde6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1986,9 +1986,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.1" +version = "4.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c918d541ef2913577a0f9566e9ce27cb35b6df072075769e0b26cb5a554520da" +checksum = "b230ab84b0ffdf890d5a10abdbc8b83ae1c4918275daea1ab8801f71536b2651" dependencies = [ "clap_builder", "clap_derive", @@ -2000,15 +2000,15 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb9b20c0dd58e4c2e991c8d203bbeb76c11304d1011659686b5b644bc29aa478" dependencies = [ - "clap 4.5.1", + "clap 4.5.2", "log", ] [[package]] name = "clap_builder" -version = "4.5.1" +version = "4.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f3e7391dad68afb0c2ede1bf619f579a3dc9c2ec67f089baa397123a2f3d1eb" +checksum = "ae129e2e766ae0ec03484e609954119f123cc1fe650337e155d03b022f24f7b4" dependencies = [ "anstream", "anstyle", @@ -2023,7 +2023,7 @@ version = "4.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "885e4d7d5af40bfb99ae6f9433e292feac98d452dcb3ec3d25dfe7552b77da8c" dependencies = [ - "clap 4.5.1", + "clap 4.5.2", ] [[package]] @@ -2360,7 +2360,7 @@ dependencies = [ "anes", "cast", "ciborium", - "clap 4.5.1", + "clap 4.5.2", "criterion-plot", "futures 0.3.30", "is-terminal", @@ -9973,7 +9973,7 @@ dependencies = [ "anyhow", "cached", "chrono", - "clap 4.5.1", + "clap 4.5.2", "clap-verbosity-flag", "clap_complete", "confy", @@ -10051,7 +10051,7 @@ dependencies = [ "chrono", "chrono-tz", "cidr-utils 0.6.1", - "clap 4.5.1", + "clap 4.5.2", "colored", "console-subscriber", "criterion", @@ -10202,7 +10202,7 @@ dependencies = [ "anyhow", "async-trait", "chrono", - "clap 4.5.1", + "clap 4.5.2", "futures 0.3.30", "graphql_client", "indoc", @@ -10225,7 +10225,7 @@ dependencies = [ "async-trait", "bytecheck", "bytes 1.5.0", - "clap 4.5.1", + "clap 4.5.2", "crc32fast", "criterion", "crossbeam-queue", @@ -10485,7 +10485,7 @@ dependencies = [ name = "vector-vrl-cli" version = "0.1.0" dependencies = [ - "clap 4.5.1", + "clap 4.5.2", "vector-vrl-functions", "vrl", ] @@ -10504,7 +10504,7 @@ dependencies = [ "ansi_term", "chrono", "chrono-tz", - "clap 4.5.1", + "clap 4.5.2", "enrichment", "glob", "prettydiff", @@ -10564,7 +10564,7 @@ dependencies = [ "chrono", "chrono-tz", "cidr-utils 0.6.1", - "clap 4.5.1", + "clap 4.5.2", "codespan-reporting", "community-id", "crypto_secretbox", diff --git a/Cargo.toml b/Cargo.toml index 24795499393e1..fedd444bf8387 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -127,7 +127,7 @@ members = [ [workspace.dependencies] chrono = { version = "0.4.34", default-features = false, features = ["clock", "serde"] } -clap = { version = "4.5.1", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } +clap = { version = "4.5.2", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } indexmap = { version = "2.2.5", default-features = false, features = ["serde", "std"] } pin-project = { version = "1.1.5", default-features = false } proptest = "1.4" From bd2f0a33e75e624bb75cb2c311bcbfa620ab699a Mon Sep 17 00:00:00 2001 From: Hugo Hromic Date: Fri, 8 Mar 2024 20:17:13 +0100 Subject: [PATCH 0114/1491] feat(website): integrate Cargo package dependency info (#19933) * feat(website): integrate Cargo package dependency info * added a Makefile target to copy Cargo data into Hugo data directory * added a partial to obtain Cargo package info by name * added a shortcode to obtain OpenSSL version in full or major/minor formats * updated TLS configuration page to use the new shortcode * Update website/README.md Co-authored-by: Heston Hoffman * Update website/layouts/shortcodes/openssl-version.html Co-authored-by: Devin Ford --------- Co-authored-by: Heston Hoffman Co-authored-by: Devin Ford --- website/.gitignore | 3 ++- website/Makefile | 20 +++++++++++-------- website/README.md | 4 ++++ website/config.toml | 1 - .../en/docs/reference/configuration/tls.md | 16 +++++++-------- website/layouts/partials/cargo-package.html | 1 + .../layouts/shortcodes/openssl-version.html | 3 +++ 7 files changed, 30 insertions(+), 18 deletions(-) create mode 100644 website/layouts/partials/cargo-package.html create mode 100644 website/layouts/shortcodes/openssl-version.html diff --git a/website/.gitignore b/website/.gitignore index 32db260e9d968..584385fbba996 100644 --- a/website/.gitignore +++ b/website/.gitignore @@ -7,7 +7,8 @@ assets/jsconfig.json # JavaScript, Sass, CSS, etc. node_modules/ -# CUE-generated assets +# Assets generated by the Makefile +data/cargo-lock.toml data/docs.json # htmltest generated diff --git a/website/Makefile b/website/Makefile index a65beb151b3f2..a949aef5f8b9f 100644 --- a/website/Makefile +++ b/website/Makefile @@ -6,11 +6,15 @@ export SERVER_BIND ?= 127.0.0.1 export SERVER_PORT ?= 1313 clean: - rm -rf public resources data/docs.json + rm -rf public resources data/cargo-lock.toml data/docs.json setup: yarn +# Copy Cargo data such as the dependency information file +cargo-data: + cp -f ../Cargo.lock data/cargo-lock.toml + # Build JSON from CUE sources cue-build: ${CUE} build @@ -36,7 +40,7 @@ config-examples: structured-data: cue-build config-examples -serve: clean setup structured-data +serve: clean setup cargo-data structured-data hugo server \ --bind $(SERVER_BIND) \ --port $(SERVER_PORT) \ @@ -50,7 +54,7 @@ production-build: --environment "production" \ --minify -ci-production-build: setup structured-data production-build run-link-checker algolia +ci-production-build: setup cargo-data structured-data production-build run-link-checker algolia # Preview site preview-build: @@ -61,7 +65,7 @@ preview-build: --minify cp ./custom-headers/_preview ./public/_headers -ci-preview-build: setup structured-data preview-build run-link-checker algolia +ci-preview-build: setup cargo-data structured-data preview-build run-link-checker algolia run-link-checker: htmltest @@ -80,16 +84,16 @@ algolia: # Useful for locally debugging issues that arise only on the deployed production site run-production-site-locally: - make setup structured-data production-build + make setup cargo-data structured-data production-build python3 -m http.server $(SERVER_PORT) --bind $(SERVER_BIND) --directory ./public # Local dev build with no link checking and no Yarn dependency fetching -quick-build: clean structured-data production-build +quick-build: clean cargo-data structured-data production-build # Full local builds without Algolia updates (for debugging, link checking, etc.) -local-production-build: clean setup structured-data production-build run-link-checker +local-production-build: clean setup cargo-data structured-data production-build run-link-checker -local-preview-build: clean setup structured-data preview-build run-link-checker +local-preview-build: clean setup cargo-data structured-data preview-build run-link-checker # Generate Lighthouse scores locally lighthouse-report: diff --git a/website/README.md b/website/README.md index 27d884c0a744c..1cf87fefb1f3b 100644 --- a/website/README.md +++ b/website/README.md @@ -32,6 +32,10 @@ The `master` branch, on the other hand, often contains unreleased, "nightly" cha vector.dev is built using the [Hugo] static site generator. The site configuration is in [`config.toml`](./config.toml). The standard Hugo [directory structure] is obeyed. +### Cargo data + +Some pages in the Vector documentation rely on dependency information such as version numbers found in the [`../Cargo.lock`](../Cargo.lock) file. Whenever you build the Vector site, the `../Cargo.lock` file is copied into `data/cargo-lock.toml` so it can be used in conjunction with Hugo's templating system to build HTML. + ### Structured data The Vector documentation relies heavily on structured data supplied using the [CUE] configuration and data validation language. Uses of CUE data include the docs for Vector's many [components] and the docs for [Vector Remap Language][vrl]. diff --git a/website/config.toml b/website/config.toml index a188ef60da331..67b21dd0eb4f6 100644 --- a/website/config.toml +++ b/website/config.toml @@ -33,7 +33,6 @@ undertagline = "Collect, transform, and route all your observability data with o subtagline = "Vector is deployed over 1,000,000 times per month by Fortune 500 companies and startups" alpine_js_version = "2.8.2" ionicons_version = "5.4.0" -openssl_version = "3.2" site_logo = "img/vector-open-graph.png" display_banner = true # Whether to display the top banner in layouts/partials/banner.html favicon = "favicon.ico" diff --git a/website/content/en/docs/reference/configuration/tls.md b/website/content/en/docs/reference/configuration/tls.md index 7ad6de895d948..e71edbb8c68e7 100644 --- a/website/content/en/docs/reference/configuration/tls.md +++ b/website/content/en/docs/reference/configuration/tls.md @@ -10,7 +10,7 @@ aliases: [ Vector implements cryptography and secure communication using the [OpenSSL][openssl] library. In particular, the official Vector binaries are statically linked against OpenSSL version -{{< param openssl_version >}} and do not use any OpenSSL library installed on the running system. +{{< openssl-version >}} and do not use any OpenSSL library installed on the running system. **Note**: OpenSSL recognizes a number of [environment variables][openssl-env] independently of Vector. @@ -146,12 +146,12 @@ configuration file for it, Vector can also use them directly with the above envi [cli]: /docs/reference/cli [fips-140-2]: https://en.wikipedia.org/wiki/FIPS_140-2 [openssl]: https://www.openssl.org/ -[openssl-config]: https://www.openssl.org/docs/man{{< param openssl_version >}}/man5/config.html -[openssl-env]: https://www.openssl.org/docs/man{{< param openssl_version >}}/man7/openssl-env.html +[openssl-config]: https://www.openssl.org/docs/man{{< openssl-version majMin=true >}}/man5/config.html +[openssl-env]: https://www.openssl.org/docs/man{{< openssl-version majMin=true >}}/man7/openssl-env.html [openssl-fips]: https://github.com/openssl/openssl/blob/master/README-FIPS.md -[openssl-fips-module]: https://www.openssl.org/docs/man{{< param openssl_version >}}/man7/fips_module.html +[openssl-fips-module]: https://www.openssl.org/docs/man{{< openssl-version majMin=true >}}/man7/fips_module.html [openssl-probe]: https://github.com/alexcrichton/openssl-probe -[openssl-providers]: https://www.openssl.org/docs/man{{< param openssl_version >}}/man7/provider.html -[openssl-providers-default]: https://www.openssl.org/docs/man{{< param openssl_version >}}/man7/OSSL_PROVIDER-default.html -[openssl-providers-fips]: https://www.openssl.org/docs/man{{< param openssl_version >}}/man7/OSSL_PROVIDER-FIPS.html -[openssl-providers-legacy]: https://www.openssl.org/docs/man{{< param openssl_version >}}/man7/OSSL_PROVIDER-legacy.html +[openssl-providers]: https://www.openssl.org/docs/man{{< openssl-version majMin=true >}}/man7/provider.html +[openssl-providers-default]: https://www.openssl.org/docs/man{{< openssl-version majMin=true >}}/man7/OSSL_PROVIDER-default.html +[openssl-providers-fips]: https://www.openssl.org/docs/man{{< openssl-version majMin=true >}}/man7/OSSL_PROVIDER-FIPS.html +[openssl-providers-legacy]: https://www.openssl.org/docs/man{{< openssl-version majMin=true >}}/man7/OSSL_PROVIDER-legacy.html diff --git a/website/layouts/partials/cargo-package.html b/website/layouts/partials/cargo-package.html new file mode 100644 index 0000000000000..744f68b9668dc --- /dev/null +++ b/website/layouts/partials/cargo-package.html @@ -0,0 +1 @@ +{{ return index (where (index site.Data "cargo-lock").package "name" .) 0 }} diff --git a/website/layouts/shortcodes/openssl-version.html b/website/layouts/shortcodes/openssl-version.html new file mode 100644 index 0000000000000..fc5cf0208a239 --- /dev/null +++ b/website/layouts/shortcodes/openssl-version.html @@ -0,0 +1,3 @@ +{{- $package := partialCached "cargo-package" "openssl-src" -}} +{{- $openssl_version := index (strings.Split $package.version "+") 1 -}} +{{- with .Get "majMin" -}}{{- with strings.Split $openssl_version "." -}}{{- index . 0 -}}.{{- index . 1 -}}{{- end -}}{{- else -}}{{- $openssl_version -}}{{- end -}} From 37a19fab442b06be3dc73c6962578e2f083f9d88 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Fri, 8 Mar 2024 11:54:22 -0800 Subject: [PATCH 0115/1491] chore(dev): Remove mention of handwriting changelog for patch release (#20040) This is automated now :) Signed-off-by: Jesse Szwedko --- .github/ISSUE_TEMPLATE/patch-release.md | 3 --- 1 file changed, 3 deletions(-) diff --git a/.github/ISSUE_TEMPLATE/patch-release.md b/.github/ISSUE_TEMPLATE/patch-release.md index ac6dd224ec6a2..2beb8531dbd9a 100644 --- a/.github/ISSUE_TEMPLATE/patch-release.md +++ b/.github/ISSUE_TEMPLATE/patch-release.md @@ -13,9 +13,6 @@ Before the release: - If any merge conflicts occur, attempt to solve them and if needed enlist the aid of those familiar with the conflicting commits. - [ ] Run `cargo vdev build release-cue` to generate a new cue file for the release - [ ] Add `changelog` key to generated cue file - - [ ] `git log --no-merges --cherry-pick --right-only ...` - - [ ] Should be hand-written list of changes - ([example](https://github.com/vectordotdev/vector/blob/9fecdc8b5c45c613de2d01d4d2aee22be3a2e570/website/cue/reference/releases/0.19.0.cue#L44)) - [ ] Add description key to the generated cue file with a description of the release (see previous releases for examples). - [ ] Update version number in `distribution/install.sh` From 6731b44d2ff7c91b37120bb6e7b0433f540d23aa Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Fri, 8 Mar 2024 12:13:14 -0800 Subject: [PATCH 0116/1491] chore(releasing): Add missing changelog entries (#20041) Signed-off-by: Jesse Szwedko --- changelog.d/aws_assume_role.fix.md | 1 + changelog.d/aws_region_autodetect.fix.md | 1 + changelog.d/kafka_rebalance_panic.fix.md | 1 + 3 files changed, 3 insertions(+) create mode 100644 changelog.d/aws_assume_role.fix.md create mode 100644 changelog.d/aws_region_autodetect.fix.md create mode 100644 changelog.d/kafka_rebalance_panic.fix.md diff --git a/changelog.d/aws_assume_role.fix.md b/changelog.d/aws_assume_role.fix.md new file mode 100644 index 0000000000000..8162126787065 --- /dev/null +++ b/changelog.d/aws_assume_role.fix.md @@ -0,0 +1 @@ +AWS components again support the use of `assume_role`. This was a regression in v0.36.0. diff --git a/changelog.d/aws_region_autodetect.fix.md b/changelog.d/aws_region_autodetect.fix.md new file mode 100644 index 0000000000000..a25ef77f7ea4b --- /dev/null +++ b/changelog.d/aws_region_autodetect.fix.md @@ -0,0 +1 @@ +AWS components again support auto-detection of the region. This was a regression in v0.36.0. diff --git a/changelog.d/kafka_rebalance_panic.fix.md b/changelog.d/kafka_rebalance_panic.fix.md new file mode 100644 index 0000000000000..4385380f543b2 --- /dev/null +++ b/changelog.d/kafka_rebalance_panic.fix.md @@ -0,0 +1 @@ +The `kafka` sink avoids panicking during a rebalance event. This was a regression in v0.36.0. From 56f167629049f879429506ce34321b534cfd79da Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Mar 2024 08:58:23 -0600 Subject: [PATCH 0117/1491] chore(deps): Bump base64 from 0.21.7 to 0.22.0 (#19999) Bumps [base64](https://github.com/marshallpierce/rust-base64) from 0.21.7 to 0.22.0. - [Changelog](https://github.com/marshallpierce/rust-base64/blob/master/RELEASE-NOTES.md) - [Commits](https://github.com/marshallpierce/rust-base64/compare/v0.21.7...v0.22.0) --- updated-dependencies: - dependency-name: base64 dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 4 ++-- lib/vector-core/Cargo.toml | 2 +- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index cca629f71bde6..57139982ce339 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10043,7 +10043,7 @@ dependencies = [ "azure_identity", "azure_storage", "azure_storage_blobs", - "base64 0.21.7", + "base64 0.22.0", "bloomy", "bollard", "bytes 1.5.0", @@ -10356,7 +10356,7 @@ version = "0.1.0" dependencies = [ "async-graphql", "async-trait", - "base64 0.21.7", + "base64 0.22.0", "bitmask-enum", "bytes 1.5.0", "chrono", diff --git a/Cargo.toml b/Cargo.toml index fedd444bf8387..e9ea13bbbf0a9 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -254,7 +254,7 @@ arc-swap = { version = "1.7", default-features = false, optional = true } async-compression = { version = "0.4.6", default-features = false, features = ["tokio", "gzip", "zstd"], optional = true } apache-avro = { version = "0.16.0", default-features = false, optional = true } axum = { version = "0.6.20", default-features = false } -base64 = { version = "0.21.7", default-features = false, optional = true } +base64 = { version = "0.22.0", default-features = false, optional = true } bloomy = { version = "1.2.0", default-features = false, optional = true } bollard = { version = "0.16.0", default-features = false, features = ["ssl", "chrono"], optional = true } bytes = { version = "1.5.0", default-features = false, features = ["serde"] } @@ -368,7 +368,7 @@ azure_core = { version = "0.17", default-features = false, features = ["enable_r azure_identity = { version = "0.17", default-features = false, features = ["enable_reqwest"] } azure_storage_blobs = { version = "0.17", default-features = false, features = ["azurite_workaround"] } azure_storage = { version = "0.17", default-features = false } -base64 = "0.21.7" +base64 = "0.22.0" criterion = { version = "0.5.1", features = ["html_reports", "async_tokio"] } itertools = { version = "0.12.1", default-features = false, features = ["use_alloc"] } libc = "0.2.153" diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index 2e909658b50b3..9d427d75d54e5 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -75,7 +75,7 @@ schannel = "0.1.23" prost-build = "0.12" [dev-dependencies] -base64 = "0.21.7" +base64 = "0.22.0" chrono-tz = { version = "0.8.6", default-features = false } criterion = { version = "0.5.1", features = ["html_reports"] } env-test-util = "1.0.1" From 7e011047c39d2e15e5d0f41a2605c843c1189b97 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Fri, 8 Mar 2024 11:19:38 -0800 Subject: [PATCH 0118/1491] chore(releasing): Prepare v0.36.1 release Signed-off-by: Jesse Szwedko --- changelog.d/aws_assume_role.fix.md | 1 - changelog.d/aws_region_autodetect.fix.md | 1 - changelog.d/kafka_rebalance_panic.fix.md | 1 - distribution/install.sh | 2 +- website/content/en/releases/0.36.1.md | 4 ++ website/cue/reference/releases/0.36.0.cue | 15 ++++++ website/cue/reference/releases/0.36.1.cue | 63 +++++++++++++++++++++++ website/cue/reference/versions.cue | 1 + 8 files changed, 84 insertions(+), 4 deletions(-) delete mode 100644 changelog.d/aws_assume_role.fix.md delete mode 100644 changelog.d/aws_region_autodetect.fix.md delete mode 100644 changelog.d/kafka_rebalance_panic.fix.md create mode 100644 website/content/en/releases/0.36.1.md create mode 100644 website/cue/reference/releases/0.36.1.cue diff --git a/changelog.d/aws_assume_role.fix.md b/changelog.d/aws_assume_role.fix.md deleted file mode 100644 index 8162126787065..0000000000000 --- a/changelog.d/aws_assume_role.fix.md +++ /dev/null @@ -1 +0,0 @@ -AWS components again support the use of `assume_role`. This was a regression in v0.36.0. diff --git a/changelog.d/aws_region_autodetect.fix.md b/changelog.d/aws_region_autodetect.fix.md deleted file mode 100644 index a25ef77f7ea4b..0000000000000 --- a/changelog.d/aws_region_autodetect.fix.md +++ /dev/null @@ -1 +0,0 @@ -AWS components again support auto-detection of the region. This was a regression in v0.36.0. diff --git a/changelog.d/kafka_rebalance_panic.fix.md b/changelog.d/kafka_rebalance_panic.fix.md deleted file mode 100644 index 4385380f543b2..0000000000000 --- a/changelog.d/kafka_rebalance_panic.fix.md +++ /dev/null @@ -1 +0,0 @@ -The `kafka` sink avoids panicking during a rebalance event. This was a regression in v0.36.0. diff --git a/distribution/install.sh b/distribution/install.sh index 75f206d1a8c5b..b069b3dddd1f4 100755 --- a/distribution/install.sh +++ b/distribution/install.sh @@ -13,7 +13,7 @@ set -u # If PACKAGE_ROOT is unset or empty, default it. PACKAGE_ROOT="${PACKAGE_ROOT:-"https://packages.timber.io/vector"}" # If VECTOR_VERSION is unset or empty, default it. -VECTOR_VERSION="${VECTOR_VERSION:-"0.36.0"}" +VECTOR_VERSION="${VECTOR_VERSION:-"0.36.1"}" _divider="--------------------------------------------------------------------------------" _prompt=">>>" _indent=" " diff --git a/website/content/en/releases/0.36.1.md b/website/content/en/releases/0.36.1.md new file mode 100644 index 0000000000000..ef655c68d30b5 --- /dev/null +++ b/website/content/en/releases/0.36.1.md @@ -0,0 +1,4 @@ +--- +title: Vector v0.36.1 release notes +weight: 21 +--- diff --git a/website/cue/reference/releases/0.36.0.cue b/website/cue/reference/releases/0.36.0.cue index ad72d305c32d8..b4415928c4b11 100644 --- a/website/cue/reference/releases/0.36.0.cue +++ b/website/cue/reference/releases/0.36.0.cue @@ -22,6 +22,21 @@ releases: "0.36.0": { instructions on how to migrate. """ + known_issues: [ + """ + AWS components don't support use of `credentials_process` in AWS configs. Fixed in v0.36.1. + """, + """ + AWS components don't support auto-detection of region . Fixed in v0.36.1. + """, + """ + AWS components don't support use of `assume_role`. Fixed in v0.36.1. + """, + """ + The `kafka` sink occasionally panics during rebalance events. Fixed in v0.36.1. + """, + ] + changelog: [ { type: "feat" diff --git a/website/cue/reference/releases/0.36.1.cue b/website/cue/reference/releases/0.36.1.cue new file mode 100644 index 0000000000000..9dabe4a377d4c --- /dev/null +++ b/website/cue/reference/releases/0.36.1.cue @@ -0,0 +1,63 @@ +package metadata + +releases: "0.36.1": { + date: "2024-03-11" + codename: "" + + whats_next: [] + + description: """ + This patch release contains fixes for regressions in 0.36.0. + + **Note:** Please see the release notes for [`v0.36.0`](/releases/0.36.0/) for additional changes if upgrading from + `v0.35.X`. In particular, see the upgrade guide for breaking changes. + """ + + changelog: [ + { + type: "fix" + description: """ + Fixed gzip and zlib compression performance degradation introduced in v0.34.0. + """ + contributors: ["Hexta"] + }, + { + type: "fix" + description: """ + AWS components again support the use of `assume_role`. This was a regression in v0.36.0. + """ + }, + { + type: "fix" + description: """ + AWS components again support the use of `credential_process` in AWS config files to load AWS + credentials from an external process. This was a regression in v0.36.0. + """ + }, + { + type: "fix" + description: """ + AWS components again support auto-detection of the region. This was a regression in v0.36.0. + """ + }, + { + type: "fix" + description: """ + The `kafka` sink avoids panicking during a rebalance event. This was a regression in v0.36.0. + """ + }, + ] + + commits: [ + {sha: "a10a137394bda91a97bf6d1731459615af2869ad", date: "2024-02-17 20:44:07 UTC", description: "0.36 changelog fixes", pr_number: 19875, scopes: ["releases website"], type: "chore", breaking_change: false, author: "hdhoang", files_count: 2, insertions_count: 2, deletions_count: 2}, + {sha: "3057ccfd7e0f58b615d756ca6541b5604053cef4", date: "2024-02-21 05:49:55 UTC", description: "Fix `drop_on_abort` docs", pr_number: 19918, scopes: ["remap transform"], type: "docs", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 6, deletions_count: 10}, + {sha: "f1f8c1bc998ef98215dba117335b74e8e5e57b68", date: "2024-02-22 15:48:21 UTC", description: "bump openssl version used for links in docs", pr_number: 19880, scopes: ["website"], type: "chore", breaking_change: false, author: "Hugo Hromic", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "9def84e0de3831f0add61c9b2cb4e880fcf8aa7d", date: "2024-02-29 09:31:36 UTC", description: "determine region using our http client", pr_number: 19972, scopes: ["aws service"], type: "fix", breaking_change: false, author: "Stephen Wakely", files_count: 1, insertions_count: 25, deletions_count: 3}, + {sha: "54bcee72242d06eacd355451ed62ee1029925a81", date: "2024-03-06 22:45:13 UTC", description: "Update lockfree-object-pool to 0.1.5", pr_number: 20001, scopes: ["deps"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "e4951cc447d8a3b4896c4603a962651350b6ac37", date: "2024-03-08 09:10:35 UTC", description: "Bump whoami to 1.5.0", pr_number: 20018, scopes: ["deps"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 11, deletions_count: 3}, + {sha: "28760fbcdade2353feb506a51ef7288a570d6ca6", date: "2024-03-08 17:46:25 UTC", description: "Enable `credentials-process` for `aws-config`", pr_number: 20030, scopes: ["aws provider"], type: "fix", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 3, deletions_count: 1}, + {sha: "63133311baa0df60d08e22bb1e4bec858438e268", date: "2024-03-09 09:25:33 UTC", description: "Fix gzip and zlib performance degradation", pr_number: 20032, scopes: ["compression"], type: "fix", breaking_change: false, author: "Artur Malchanau", files_count: 2, insertions_count: 66, deletions_count: 34}, + {sha: "a8cd2a2df1df26de9e14d51cb84bc0bdd443a195", date: "2024-03-05 22:34:02 UTC", description: "Update mio", pr_number: 20005, scopes: ["deps"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "1ea58e47cadc4acc9d554a60653e76cbdd034105", date: "2024-03-09 04:13:14 UTC", description: "Add missing changelog entries", pr_number: 20041, scopes: ["releasing"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 3, insertions_count: 3, deletions_count: 0}, + ] +} diff --git a/website/cue/reference/versions.cue b/website/cue/reference/versions.cue index a2e8b95e5c1ca..268a507d4f03f 100644 --- a/website/cue/reference/versions.cue +++ b/website/cue/reference/versions.cue @@ -2,6 +2,7 @@ package metadata // This has to be maintained manually because there's currently no way to sort versions programmatically versions: [string, ...string] & [ + "0.36.1", "0.36.0", "0.35.1", "0.35.0", From 04f78584d7dd10e98d81e3065fbb17483009d60f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ensar=20Saraj=C4=8Di=C4=87?= Date: Mon, 11 Mar 2024 17:31:33 +0100 Subject: [PATCH 0119/1491] test(dnsmsg_parser): fix tests for currently unknown rdata types (#20052) This changes tests for RData type currently unknown to `hickory_proto` to use a common mechanism that is used for known types too. The goal of this is to use a similar code path to an actual use case (which will first consider known types and then try to treat them as unknown). In #19921 implementation for HINFO was added and there was a test that has treated HINFO as an unknown type that was run successfully, but in real application use case, it would fail for such records, since it would never get treated as an unknown type. This will help spot if any new types ,that are already supported by vector, have been added to `hickory_proto`. Related: #19921 --- lib/dnsmsg-parser/src/dns_message_parser.rs | 22 ++++++++++++++++----- 1 file changed, 17 insertions(+), 5 deletions(-) diff --git a/lib/dnsmsg-parser/src/dns_message_parser.rs b/lib/dnsmsg-parser/src/dns_message_parser.rs index 2f3524419c0ae..c4398e736661e 100644 --- a/lib/dnsmsg-parser/src/dns_message_parser.rs +++ b/lib/dnsmsg-parser/src/dns_message_parser.rs @@ -184,7 +184,7 @@ impl DnsMessageParser { .collect::, _>>() } - fn parse_dns_record(&mut self, record: &Record) -> DnsParserResult { + pub(crate) fn parse_dns_record(&mut self, record: &Record) -> DnsParserResult { let record_data = match record.data() { Some(RData::Unknown { code, rdata }) => { self.format_unknown_rdata((*code).into(), rdata) @@ -1297,6 +1297,7 @@ mod tests { CAA, CSYNC, HINFO, HTTPS, NAPTR, OPT, SSHFP, TLSA, TXT, }, }; + use hickory_proto::serialize::binary::Restrict; use super::*; @@ -2200,11 +2201,22 @@ mod tests { let raw_rdata = BASE64 .decode(raw_data.as_bytes()) .expect("Invalid base64 encoded rdata."); - let record_rdata = NULL::with(raw_rdata); - let rdata_text = - DnsMessageParser::new(Vec::::new()).format_unknown_rdata(code, &record_rdata); + let mut decoder = BinDecoder::new(&raw_rdata); + let record = Record::from_rdata( + Name::new(), + 1, + RData::read( + &mut decoder, + RecordType::from(code), + Restrict::new(raw_rdata.len() as u16), + ) + .unwrap(), + ); + let rdata_text = DnsMessageParser::new(Vec::::new()) + .parse_dns_record(&record) + .map(|r| r.rdata); assert!(rdata_text.is_ok()); - assert_eq!(expected_output, rdata_text.unwrap().0.unwrap()); + assert_eq!(expected_output, rdata_text.unwrap().unwrap()); } fn test_format_rdata_with_compressed_domain_names( From 7c596b4825e26c5640d4fe4e96ca7e6a471f57cc Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Mon, 11 Mar 2024 09:57:24 -0700 Subject: [PATCH 0120/1491] chore(releasing): Fix formatting of v0.36.1 release Signed-off-by: Jesse Szwedko --- website/cue/reference/releases/0.36.1.cue | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/website/cue/reference/releases/0.36.1.cue b/website/cue/reference/releases/0.36.1.cue index 9dabe4a377d4c..a275fc5aee709 100644 --- a/website/cue/reference/releases/0.36.1.cue +++ b/website/cue/reference/releases/0.36.1.cue @@ -7,10 +7,10 @@ releases: "0.36.1": { whats_next: [] description: """ - This patch release contains fixes for regressions in 0.36.0. + This patch release contains fixes for regressions in 0.36.0. - **Note:** Please see the release notes for [`v0.36.0`](/releases/0.36.0/) for additional changes if upgrading from - `v0.35.X`. In particular, see the upgrade guide for breaking changes. + **Note:** Please see the release notes for [`v0.36.0`](/releases/0.36.0/) for additional changes if upgrading from + `v0.35.X`. In particular, see the upgrade guide for breaking changes. """ changelog: [ From 6d0961347b7c36115da101ab993f66a532493a16 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ensar=20Saraj=C4=8Di=C4=87?= Date: Mon, 11 Mar 2024 17:55:07 +0100 Subject: [PATCH 0121/1491] docs(vrl): add docs for new validate flag in punycode functions (#19923) * docs(vrl): add docs for new validate flag in punycode functions Related: https://github.com/vectordotdev/vrl/pull/709 * Update validate flag description Co-authored-by: Brett Blue <84536271+brett0000FF@users.noreply.github.com> --------- Co-authored-by: Brett Blue <84536271+brett0000FF@users.noreply.github.com> Co-authored-by: Jesse Szwedko --- .../reference/remap/functions/decode_punycode.cue | 14 ++++++++++++++ .../reference/remap/functions/encode_punycode.cue | 14 ++++++++++++++ 2 files changed, 28 insertions(+) diff --git a/website/cue/reference/remap/functions/decode_punycode.cue b/website/cue/reference/remap/functions/decode_punycode.cue index b2d2dde433e86..0f0734e233e6e 100644 --- a/website/cue/reference/remap/functions/decode_punycode.cue +++ b/website/cue/reference/remap/functions/decode_punycode.cue @@ -13,6 +13,13 @@ remap: functions: decode_punycode: { required: true type: ["string"] }, + { + name: "validate" + description: "If enabled, checks if the input string is a valid domain name." + required: false + type: ["boolean"] + default: true + }, ] internal_failure_reasons: [ "`value` is not valid `punycode`", @@ -34,5 +41,12 @@ remap: functions: decode_punycode: { """ return: "www.cafe.com" }, + { + title: "Ignore validation" + source: """ + decode_punycode!("xn--8hbb.xn--fiba.xn--8hbf.xn--eib.", validate: false) + """ + return: "١٠.٦٦.٣٠.٥." + }, ] } diff --git a/website/cue/reference/remap/functions/encode_punycode.cue b/website/cue/reference/remap/functions/encode_punycode.cue index 13e5415f99638..5ca6df5f0a2ba 100644 --- a/website/cue/reference/remap/functions/encode_punycode.cue +++ b/website/cue/reference/remap/functions/encode_punycode.cue @@ -13,6 +13,13 @@ remap: functions: encode_punycode: { required: true type: ["string"] }, + { + name: "validate" + description: "Whether to validate the input string to check if it is a valid domain name." + required: false + type: ["boolean"] + default: true + }, ] internal_failure_reasons: [ "`value` can not be encoded to `punycode`", @@ -41,5 +48,12 @@ remap: functions: encode_punycode: { """ return: "www.cafe.com" }, + { + title: "Ignore validation" + source: """ + encode_punycode!("xn--8hbb.xn--fiba.xn--8hbf.xn--eib.", validate: false) + """ + return: "xn--8hbb.xn--fiba.xn--8hbf.xn--eib." + }, ] } From cbcb874a9944801e8a89d42e44ecf551db55071a Mon Sep 17 00:00:00 2001 From: Ning Sun Date: Tue, 12 Mar 2024 00:59:16 +0800 Subject: [PATCH 0122/1491] feat(greptimedb sink): improve tls support for greptimedb sink (#20006) * feat: update tls support for greptimedb * fix: corrected health check for tls connection * chore: add changelog fragment for #20006 * Update changelog.d/20006_improve_greptimedb_tls.enhancement.md Co-authored-by: Jesse Szwedko --------- Co-authored-by: Jesse Szwedko --- Cargo.lock | 2 +- Cargo.toml | 2 +- ...0006_improve_greptimedb_tls.enhancement.md | 3 + src/sinks/greptimedb/mod.rs | 12 +-- src/sinks/greptimedb/service.rs | 79 ++++++++----------- 5 files changed, 41 insertions(+), 57 deletions(-) create mode 100644 changelog.d/20006_improve_greptimedb_tls.enhancement.md diff --git a/Cargo.lock b/Cargo.lock index 57139982ce339..c2626d1fd56a1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3700,7 +3700,7 @@ dependencies = [ [[package]] name = "greptimedb-client" version = "0.1.0" -source = "git+https://github.com/GreptimeTeam/greptimedb-ingester-rust.git?rev=4cb19ec47eeaf634c451d9ae438dac445a8a3dce#4cb19ec47eeaf634c451d9ae438dac445a8a3dce" +source = "git+https://github.com/GreptimeTeam/greptimedb-ingester-rust.git?rev=d21dbcff680139ed2065b62100bac3123da7c789#d21dbcff680139ed2065b62100bac3123da7c789" dependencies = [ "dashmap", "enum_dispatch", diff --git a/Cargo.toml b/Cargo.toml index e9ea13bbbf0a9..fbabba3ad2811 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -247,7 +247,7 @@ hex = { version = "0.4.3", default-features = false, optional = true } sha2 = { version = "0.10.8", default-features = false, optional = true } # GreptimeDB -greptimedb-client = { git = "https://github.com/GreptimeTeam/greptimedb-ingester-rust.git", rev = "4cb19ec47eeaf634c451d9ae438dac445a8a3dce", optional = true } +greptimedb-client = { git = "https://github.com/GreptimeTeam/greptimedb-ingester-rust.git", rev = "d21dbcff680139ed2065b62100bac3123da7c789", optional = true } # External libs arc-swap = { version = "1.7", default-features = false, optional = true } diff --git a/changelog.d/20006_improve_greptimedb_tls.enhancement.md b/changelog.d/20006_improve_greptimedb_tls.enhancement.md new file mode 100644 index 0000000000000..9f92f508948fe --- /dev/null +++ b/changelog.d/20006_improve_greptimedb_tls.enhancement.md @@ -0,0 +1,3 @@ +Improves TLS support for greptimedb sink. `tls.ca_file` is no longer required for enabling TLS. Just use `tls = {}` in toml configuration when your server is hosting a public CA. + +authors: sunng87 diff --git a/src/sinks/greptimedb/mod.rs b/src/sinks/greptimedb/mod.rs index 0c264b2f0b996..a8429b67c249d 100644 --- a/src/sinks/greptimedb/mod.rs +++ b/src/sinks/greptimedb/mod.rs @@ -19,8 +19,6 @@ //! - Distribution, Histogram and Summary, Sketch: Statistical attributes like //! `sum`, `count`, "max", "min", quantiles and buckets are stored as columns. //! -use greptimedb_client::Client; -use snafu::Snafu; use vector_lib::sensitive_string::SensitiveString; use crate::sinks::prelude::*; @@ -139,19 +137,11 @@ impl SinkConfig for GreptimeDBConfig { } fn healthcheck(config: &GreptimeDBConfig) -> crate::Result { - let client = Client::with_urls(vec![&config.endpoint]); + let client = service::new_client_from_config(config)?; Ok(async move { client.health_check().await.map_err(|error| error.into()) }.boxed()) } -#[derive(Debug, Snafu)] -pub enum GreptimeDBConfigError { - #[snafu(display("greptimedb TLS Config Error: missing key"))] - TlsMissingKey, - #[snafu(display("greptimedb TLS Config Error: missing cert"))] - TlsMissingCert, -} - #[cfg(test)] mod tests { use indoc::indoc; diff --git a/src/sinks/greptimedb/service.rs b/src/sinks/greptimedb/service.rs index 46516c745f917..a36fde75c4684 100644 --- a/src/sinks/greptimedb/service.rs +++ b/src/sinks/greptimedb/service.rs @@ -12,7 +12,7 @@ use crate::sinks::prelude::*; use super::batch::GreptimeDBBatchSizer; use super::request_builder::metric_to_insert_request; -use super::{GreptimeDBConfig, GreptimeDBConfigError}; +use super::GreptimeDBConfig; #[derive(Clone, Default)] pub(super) struct GreptimeDBRetryLogic; @@ -103,20 +103,42 @@ pub struct GreptimeDBService { client: Arc, } +pub(crate) fn new_client_from_config(config: &GreptimeDBConfig) -> crate::Result { + if let Some(tls_config) = &config.tls { + let channel_config = ChannelConfig { + client_tls: Some(try_from_tls_config(tls_config)?), + ..Default::default() + }; + Ok(Client::with_manager_and_urls( + ChannelManager::with_tls_config(channel_config).map_err(Box::new)?, + vec![&config.endpoint], + )) + } else { + Ok(Client::with_urls(vec![&config.endpoint])) + } +} + +fn try_from_tls_config(tls_config: &TlsConfig) -> crate::Result { + if tls_config.key_pass.is_some() + || tls_config.alpn_protocols.is_some() + || tls_config.verify_certificate.is_some() + || tls_config.verify_hostname.is_some() + { + warn!( + message = "TlsConfig: key_pass, alpn_protocols, verify_certificate and verify_hostname are not supported by greptimedb client at the moment." + ); + } + + Ok(ClientTlsOption { + server_ca_cert_path: tls_config.ca_file.clone(), + client_cert_path: tls_config.crt_file.clone(), + client_key_path: tls_config.key_file.clone(), + }) +} + impl GreptimeDBService { pub fn try_new(config: &GreptimeDBConfig) -> crate::Result { - let grpc_client = if let Some(tls_config) = &config.tls { - let channel_config = ChannelConfig { - client_tls: Self::try_from_tls_config(tls_config)?, - ..Default::default() - }; - Client::with_manager_and_urls( - ChannelManager::with_tls_config(channel_config).map_err(Box::new)?, - vec![&config.endpoint], - ) - } else { - Client::with_urls(vec![&config.endpoint]) - }; + let grpc_client = new_client_from_config(config)?; let mut client = Database::new_with_dbname(&config.dbname, grpc_client); @@ -131,37 +153,6 @@ impl GreptimeDBService { client: Arc::new(client), }) } - - fn try_from_tls_config(tls_config: &TlsConfig) -> crate::Result> { - if let Some(ca_path) = tls_config.ca_file.as_ref() { - let cert_path = tls_config - .crt_file - .as_ref() - .ok_or(GreptimeDBConfigError::TlsMissingCert)?; - let key_path = tls_config - .key_file - .as_ref() - .ok_or(GreptimeDBConfigError::TlsMissingKey)?; - - if tls_config.key_pass.is_some() - || tls_config.alpn_protocols.is_some() - || tls_config.verify_certificate.is_some() - || tls_config.verify_hostname.is_some() - { - warn!( - message = "TlsConfig: key_pass, alpn_protocols, verify_certificate and verify_hostname are not supported by greptimedb client at the moment." - ); - } - - Ok(Some(ClientTlsOption { - server_ca_cert_path: ca_path.clone(), - client_key_path: key_path.clone(), - client_cert_path: cert_path.clone(), - })) - } else { - Ok(None) - } - } } impl Service for GreptimeDBService { From d2aca62f1edcedd76bb818dc936a54b0928b0786 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Mon, 11 Mar 2024 12:11:01 -0700 Subject: [PATCH 0123/1491] fix(docs): Use `component_kind` rather than `kind` for Hugo (#20058) `kind` is a reserved attribute: https://gohugo.io/templates/section-templates/#page-kinds. Newer versions of hugo (>= 0.122.0) start returning errors like: ``` sinks/mqtt.md:1:1": unknown kind "sink" in front matter ``` Ref: https://github.com/vectordotdev/vector/pull/20048#discussion_r1520134528 Signed-off-by: Jesse Szwedko --- .../content/en/docs/reference/configuration/sinks/amqp.md | 2 +- .../en/docs/reference/configuration/sinks/appsignal.md | 2 +- .../reference/configuration/sinks/aws_cloudwatch_logs.md | 2 +- .../reference/configuration/sinks/aws_cloudwatch_metrics.md | 2 +- .../reference/configuration/sinks/aws_kinesis_firehose.md | 2 +- .../reference/configuration/sinks/aws_kinesis_streams.md | 2 +- .../content/en/docs/reference/configuration/sinks/aws_s3.md | 2 +- .../en/docs/reference/configuration/sinks/aws_sns.md | 2 +- .../en/docs/reference/configuration/sinks/aws_sqs.md | 2 +- .../content/en/docs/reference/configuration/sinks/axiom.md | 2 +- .../en/docs/reference/configuration/sinks/azure_blob.md | 2 +- .../reference/configuration/sinks/azure_monitor_logs.md | 2 +- .../en/docs/reference/configuration/sinks/blackhole.md | 2 +- .../en/docs/reference/configuration/sinks/clickhouse.md | 2 +- .../en/docs/reference/configuration/sinks/console.md | 2 +- .../en/docs/reference/configuration/sinks/databend.md | 2 +- .../en/docs/reference/configuration/sinks/datadog_events.md | 2 +- .../en/docs/reference/configuration/sinks/datadog_logs.md | 2 +- .../docs/reference/configuration/sinks/datadog_metrics.md | 2 +- .../en/docs/reference/configuration/sinks/datadog_traces.md | 2 +- .../en/docs/reference/configuration/sinks/elasticsearch.md | 2 +- .../content/en/docs/reference/configuration/sinks/file.md | 2 +- .../configuration/sinks/gcp_chronicle_unstructured.md | 2 +- .../docs/reference/configuration/sinks/gcp_cloud_storage.md | 2 +- .../en/docs/reference/configuration/sinks/gcp_pubsub.md | 2 +- .../reference/configuration/sinks/gcp_stackdriver_logs.md | 2 +- .../configuration/sinks/gcp_stackdriver_metrics.md | 2 +- .../en/docs/reference/configuration/sinks/greptimedb.md | 2 +- .../en/docs/reference/configuration/sinks/honeycomb.md | 2 +- .../content/en/docs/reference/configuration/sinks/http.md | 2 +- .../en/docs/reference/configuration/sinks/humio_logs.md | 2 +- .../en/docs/reference/configuration/sinks/humio_metrics.md | 2 +- .../en/docs/reference/configuration/sinks/influxdb_logs.md | 2 +- .../docs/reference/configuration/sinks/influxdb_metrics.md | 2 +- .../content/en/docs/reference/configuration/sinks/kafka.md | 2 +- .../content/en/docs/reference/configuration/sinks/loki.md | 2 +- .../content/en/docs/reference/configuration/sinks/mezmo.md | 2 +- .../content/en/docs/reference/configuration/sinks/mqtt.md | 2 +- .../content/en/docs/reference/configuration/sinks/nats.md | 2 +- .../en/docs/reference/configuration/sinks/new_relic.md | 2 +- .../en/docs/reference/configuration/sinks/papertrail.md | 2 +- .../reference/configuration/sinks/prometheus_exporter.md | 2 +- .../configuration/sinks/prometheus_remote_write.md | 2 +- .../content/en/docs/reference/configuration/sinks/pulsar.md | 2 +- .../content/en/docs/reference/configuration/sinks/redis.md | 2 +- .../en/docs/reference/configuration/sinks/sematext_logs.md | 2 +- .../docs/reference/configuration/sinks/sematext_metrics.md | 2 +- .../content/en/docs/reference/configuration/sinks/socket.md | 2 +- .../docs/reference/configuration/sinks/splunk_hec_logs.md | 2 +- .../reference/configuration/sinks/splunk_hec_metrics.md | 2 +- .../content/en/docs/reference/configuration/sinks/statsd.md | 2 +- .../content/en/docs/reference/configuration/sinks/vector.md | 2 +- .../en/docs/reference/configuration/sinks/webhdfs.md | 2 +- .../en/docs/reference/configuration/sinks/websocket.md | 2 +- .../content/en/docs/reference/configuration/sources/amqp.md | 2 +- .../docs/reference/configuration/sources/apache_metrics.md | 2 +- .../docs/reference/configuration/sources/aws_ecs_metrics.md | 2 +- .../reference/configuration/sources/aws_kinesis_firehose.md | 2 +- .../en/docs/reference/configuration/sources/aws_s3.md | 2 +- .../en/docs/reference/configuration/sources/aws_sqs.md | 2 +- .../docs/reference/configuration/sources/datadog_agent.md | 2 +- .../en/docs/reference/configuration/sources/demo_logs.md | 2 +- .../en/docs/reference/configuration/sources/dnstap.md | 2 +- .../en/docs/reference/configuration/sources/docker_logs.md | 2 +- .../reference/configuration/sources/eventstoredb_metrics.md | 2 +- .../content/en/docs/reference/configuration/sources/exec.md | 2 +- .../content/en/docs/reference/configuration/sources/file.md | 2 +- .../docs/reference/configuration/sources/file_descriptor.md | 2 +- .../en/docs/reference/configuration/sources/fluent.md | 2 +- .../en/docs/reference/configuration/sources/gcp_pubsub.md | 2 +- .../en/docs/reference/configuration/sources/heroku_logs.md | 2 +- .../en/docs/reference/configuration/sources/host_metrics.md | 2 +- .../en/docs/reference/configuration/sources/http_client.md | 2 +- .../en/docs/reference/configuration/sources/http_server.md | 2 +- .../docs/reference/configuration/sources/internal_logs.md | 2 +- .../reference/configuration/sources/internal_metrics.md | 2 +- .../en/docs/reference/configuration/sources/journald.md | 2 +- .../en/docs/reference/configuration/sources/kafka.md | 2 +- .../docs/reference/configuration/sources/kubernetes_logs.md | 2 +- .../en/docs/reference/configuration/sources/logstash.md | 2 +- .../docs/reference/configuration/sources/mongodb_metrics.md | 2 +- .../content/en/docs/reference/configuration/sources/nats.md | 2 +- .../docs/reference/configuration/sources/nginx_metrics.md | 2 +- .../docs/reference/configuration/sources/opentelemetry.md | 2 +- .../reference/configuration/sources/postgresql_metrics.md | 2 +- .../configuration/sources/prometheus_pushgateway.md | 2 +- .../configuration/sources/prometheus_remote_write.md | 2 +- .../reference/configuration/sources/prometheus_scrape.md | 2 +- .../en/docs/reference/configuration/sources/pulsar.md | 2 +- .../en/docs/reference/configuration/sources/redis.md | 2 +- .../en/docs/reference/configuration/sources/socket.md | 2 +- .../en/docs/reference/configuration/sources/splunk_hec.md | 2 +- .../en/docs/reference/configuration/sources/statsd.md | 2 +- .../en/docs/reference/configuration/sources/stdin.md | 2 +- .../en/docs/reference/configuration/sources/syslog.md | 2 +- .../en/docs/reference/configuration/sources/vector.md | 2 +- .../en/docs/reference/configuration/transforms/aggregate.md | 2 +- .../reference/configuration/transforms/aws_ec2_metadata.md | 2 +- .../en/docs/reference/configuration/transforms/dedupe.md | 2 +- .../en/docs/reference/configuration/transforms/filter.md | 2 +- .../reference/configuration/transforms/log_to_metric.md | 2 +- .../en/docs/reference/configuration/transforms/lua.md | 2 +- .../reference/configuration/transforms/metric_to_log.md | 2 +- .../en/docs/reference/configuration/transforms/reduce.md | 2 +- .../en/docs/reference/configuration/transforms/remap.md | 2 +- .../en/docs/reference/configuration/transforms/route.md | 2 +- .../en/docs/reference/configuration/transforms/sample.md | 2 +- .../configuration/transforms/tag_cardinality_limit.md | 2 +- .../en/docs/reference/configuration/transforms/throttle.md | 2 +- website/layouts/shortcodes/components.html | 6 +++--- 110 files changed, 112 insertions(+), 112 deletions(-) diff --git a/website/content/en/docs/reference/configuration/sinks/amqp.md b/website/content/en/docs/reference/configuration/sinks/amqp.md index 768957a1ca7dc..f6d13f4ddf254 100644 --- a/website/content/en/docs/reference/configuration/sinks/amqp.md +++ b/website/content/en/docs/reference/configuration/sinks/amqp.md @@ -1,7 +1,7 @@ --- title: AMQP description: Send events to [AMQP 0.9.1](https://www.amqp.org/specification/0-9-1/amqp-org-download) compatible brokers like RabbitMQ -kind: sink +component_kind: sink layout: component tags: ["amqp", "rabbitmq", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/appsignal.md b/website/content/en/docs/reference/configuration/sinks/appsignal.md index b6167a2de4f0f..6d463ca5d1093 100644 --- a/website/content/en/docs/reference/configuration/sinks/appsignal.md +++ b/website/content/en/docs/reference/configuration/sinks/appsignal.md @@ -1,7 +1,7 @@ --- title: AppSignal description: Deliver events to [AppSignal](https://www.appsignal.com/) -kind: sink +component_kind: sink layout: component tags: ["appsignal", "component", "sink", "logs", "metrics"] aliases: ["/docs/reference/configuration/sinks/appsignal"] diff --git a/website/content/en/docs/reference/configuration/sinks/aws_cloudwatch_logs.md b/website/content/en/docs/reference/configuration/sinks/aws_cloudwatch_logs.md index 5cd9dd9557d0b..a294007e26ca1 100644 --- a/website/content/en/docs/reference/configuration/sinks/aws_cloudwatch_logs.md +++ b/website/content/en/docs/reference/configuration/sinks/aws_cloudwatch_logs.md @@ -1,7 +1,7 @@ --- title: AWS Cloudwatch logs description: Publish log events to [AWS Cloudwatch Logs](https://docs.aws.amazon.com/AmazonCloudWatch/latest/logs/WhatIsCloudWatchLogs.html) -kind: sink +component_kind: sink layout: component tags: ["aws", "cloudwatch", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/aws_cloudwatch_metrics.md b/website/content/en/docs/reference/configuration/sinks/aws_cloudwatch_metrics.md index 08f17a52740eb..a8ce3fd98bc83 100644 --- a/website/content/en/docs/reference/configuration/sinks/aws_cloudwatch_metrics.md +++ b/website/content/en/docs/reference/configuration/sinks/aws_cloudwatch_metrics.md @@ -1,7 +1,7 @@ --- title: AWS Cloudwatch metrics description: Publish metric events to [AWS Cloudwatch Metrics](https://docs.aws.amazon.com/AmazonCloudWatch/latest/monitoring/working_with_metrics.html) -kind: sink +component_kind: sink layout: component tags: ["aws", "cloudwatch", "component", "sink", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/aws_kinesis_firehose.md b/website/content/en/docs/reference/configuration/sinks/aws_kinesis_firehose.md index 85876b7d3eab5..878f1101a3dbd 100644 --- a/website/content/en/docs/reference/configuration/sinks/aws_kinesis_firehose.md +++ b/website/content/en/docs/reference/configuration/sinks/aws_kinesis_firehose.md @@ -1,7 +1,7 @@ --- title: AWS Kinesis Data Firehose logs description: Publish logs to [AWS Kinesis Data Firehose](https://aws.amazon.com/kinesis/data-firehose) topics -kind: sink +component_kind: sink layout: component tags: ["aws", "kinesis", "firehose", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/aws_kinesis_streams.md b/website/content/en/docs/reference/configuration/sinks/aws_kinesis_streams.md index 4822dd01537e6..220930a48ae4d 100644 --- a/website/content/en/docs/reference/configuration/sinks/aws_kinesis_streams.md +++ b/website/content/en/docs/reference/configuration/sinks/aws_kinesis_streams.md @@ -1,7 +1,7 @@ --- title: AWS Kinesis Streams logs description: Publish logs to [AWS Kinesis Streams](https://aws.amazon.com/kinesis/data-streams) topics -kind: sink +component_kind: sink layout: component tags: ["aws", "kinesis", "streams", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/aws_s3.md b/website/content/en/docs/reference/configuration/sinks/aws_s3.md index 8a3859f88f54d..c7e89fb018daa 100644 --- a/website/content/en/docs/reference/configuration/sinks/aws_s3.md +++ b/website/content/en/docs/reference/configuration/sinks/aws_s3.md @@ -1,7 +1,7 @@ --- title: AWS S3 description: Store observability events in the [AWS S3](https://aws.amazon.com/s3/) object storage system -kind: sink +component_kind: sink layout: component tags: ["aws", "s3", "component", "sink", "storage"] aliases: ["/docs/reference/sinks/s3"] diff --git a/website/content/en/docs/reference/configuration/sinks/aws_sns.md b/website/content/en/docs/reference/configuration/sinks/aws_sns.md index 53b573d38ac30..e26af4ed23e69 100644 --- a/website/content/en/docs/reference/configuration/sinks/aws_sns.md +++ b/website/content/en/docs/reference/configuration/sinks/aws_sns.md @@ -1,7 +1,7 @@ --- title: AWS SNS description: Publish observability events to [Simple Notification Service](https://aws.amazon.com/sns/) topics -kind: sink +component_kind: sink layout: component tags: ["aws", "sns", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/aws_sqs.md b/website/content/en/docs/reference/configuration/sinks/aws_sqs.md index 383bf752cebee..4d5dfc8c200e5 100644 --- a/website/content/en/docs/reference/configuration/sinks/aws_sqs.md +++ b/website/content/en/docs/reference/configuration/sinks/aws_sqs.md @@ -1,7 +1,7 @@ --- title: AWS SQS description: Publish observability events to [Simple Queue Service](https://aws.amazon.com/sqs/) topics -kind: sink +component_kind: sink layout: component tags: ["aws", "sqs", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/axiom.md b/website/content/en/docs/reference/configuration/sinks/axiom.md index 9ccfb9d142645..48f753fd18173 100644 --- a/website/content/en/docs/reference/configuration/sinks/axiom.md +++ b/website/content/en/docs/reference/configuration/sinks/axiom.md @@ -1,7 +1,7 @@ --- title: Axiom description: Deliver log events to [Axiom](https://axiom.co) -kind: sink +component_kind: sink layout: component tags: ["axiom", "component", "sink", "logs", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/azure_blob.md b/website/content/en/docs/reference/configuration/sinks/azure_blob.md index f0dfbe75b3ff2..1817c2837ee83 100644 --- a/website/content/en/docs/reference/configuration/sinks/azure_blob.md +++ b/website/content/en/docs/reference/configuration/sinks/azure_blob.md @@ -1,7 +1,7 @@ --- title: Azure Blob Storage description: Store your observability data in [Azure Blob Storage](https://azure.microsoft.com/en-us/services/storage/blobs/) -kind: sink +component_kind: sink layout: component tags: ["azure", "blob", "storage", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/azure_monitor_logs.md b/website/content/en/docs/reference/configuration/sinks/azure_monitor_logs.md index 61bdca126e3b8..b9e0e1933fb67 100644 --- a/website/content/en/docs/reference/configuration/sinks/azure_monitor_logs.md +++ b/website/content/en/docs/reference/configuration/sinks/azure_monitor_logs.md @@ -1,7 +1,7 @@ --- title: Azure Monitor Logs description: Publish log events to the [Azure Monitor Logs](https://docs.microsoft.com/en-us/azure/azure-monitor/logs/data-platform-logs) service -kind: sink +component_kind: sink layout: component tags: ["azure", "monitor", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/blackhole.md b/website/content/en/docs/reference/configuration/sinks/blackhole.md index 78dcd53b0af68..786da7a7cb290 100644 --- a/website/content/en/docs/reference/configuration/sinks/blackhole.md +++ b/website/content/en/docs/reference/configuration/sinks/blackhole.md @@ -1,7 +1,7 @@ --- title: Blackhole description: Send observability events nowhere, which can be useful for debugging purposes -kind: sink +component_kind: sink layout: component tags: ["blackhole", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/clickhouse.md b/website/content/en/docs/reference/configuration/sinks/clickhouse.md index 49599a05022a8..121a7484948bb 100644 --- a/website/content/en/docs/reference/configuration/sinks/clickhouse.md +++ b/website/content/en/docs/reference/configuration/sinks/clickhouse.md @@ -1,7 +1,7 @@ --- title: ClickHouse description: Deliver log data to the [ClickHouse](https://clickhouse.com) database -kind: sink +component_kind: sink layout: component tags: ["clickhouse", "component", "sink", "storage", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/console.md b/website/content/en/docs/reference/configuration/sinks/console.md index c9ae17b7c6bc5..bff6a418a473e 100644 --- a/website/content/en/docs/reference/configuration/sinks/console.md +++ b/website/content/en/docs/reference/configuration/sinks/console.md @@ -1,7 +1,7 @@ --- title: Console description: Display observability events in the console, which can be useful for debugging purposes -kind: sink +component_kind: sink layout: component tags: ["console", "component", "sink", "debug"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/databend.md b/website/content/en/docs/reference/configuration/sinks/databend.md index 0b44bc28b0a7b..97cbc12a2c8bd 100644 --- a/website/content/en/docs/reference/configuration/sinks/databend.md +++ b/website/content/en/docs/reference/configuration/sinks/databend.md @@ -1,7 +1,7 @@ --- title: Databend description: Deliver log data to the [Databend](https://databend.rs) database -kind: sink +component_kind: sink layout: component tags: ["datafuselabs", "databend", "component", "sink", "storage", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/datadog_events.md b/website/content/en/docs/reference/configuration/sinks/datadog_events.md index 1d188f39a023c..d0dbb2a7c8a7e 100644 --- a/website/content/en/docs/reference/configuration/sinks/datadog_events.md +++ b/website/content/en/docs/reference/configuration/sinks/datadog_events.md @@ -1,7 +1,7 @@ --- title: Datadog events description: Publish observability events to the [Datadog](https://docs.datadoghq.com) [Events API](https://docs.datadoghq.com/api/latest/events) -kind: sink +component_kind: sink layout: component tags: ["datadog", "component", "sink", "events"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/datadog_logs.md b/website/content/en/docs/reference/configuration/sinks/datadog_logs.md index 56badc89182ab..916a94884a572 100644 --- a/website/content/en/docs/reference/configuration/sinks/datadog_logs.md +++ b/website/content/en/docs/reference/configuration/sinks/datadog_logs.md @@ -1,7 +1,7 @@ --- title: Datadog logs description: Publish log events to [Datadog](https://docs.datadoghq.com) -kind: sink +component_kind: sink layout: component tags: ["datadog", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/datadog_metrics.md b/website/content/en/docs/reference/configuration/sinks/datadog_metrics.md index e8ad75a437099..aa0eefe57a7bc 100644 --- a/website/content/en/docs/reference/configuration/sinks/datadog_metrics.md +++ b/website/content/en/docs/reference/configuration/sinks/datadog_metrics.md @@ -1,7 +1,7 @@ --- title: Datadog metrics description: Publish metric events to [Datadog](https://docs.datadoghq.com) -kind: sink +component_kind: sink layout: component tags: ["datadog", "component", "sink", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/datadog_traces.md b/website/content/en/docs/reference/configuration/sinks/datadog_traces.md index 410a38b6d6325..c55bbb89dc055 100644 --- a/website/content/en/docs/reference/configuration/sinks/datadog_traces.md +++ b/website/content/en/docs/reference/configuration/sinks/datadog_traces.md @@ -1,7 +1,7 @@ --- title: Datadog traces description: Publish traces to [Datadog](https://docs.datadoghq.com) -kind: sink +component_kind: sink layout: component tags: ["datadog", "component", "sink", "traces"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/elasticsearch.md b/website/content/en/docs/reference/configuration/sinks/elasticsearch.md index 3771f5cc9e7d3..fbd2f681ed8ac 100644 --- a/website/content/en/docs/reference/configuration/sinks/elasticsearch.md +++ b/website/content/en/docs/reference/configuration/sinks/elasticsearch.md @@ -1,7 +1,7 @@ --- title: Elasticsearch description: Index observability events in [Elasticsearch](https://www.elastic.co/elasticsearch) -kind: sink +component_kind: sink layout: component tags: ["elasticsearch", "component", "sink", "search", "storage"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/file.md b/website/content/en/docs/reference/configuration/sinks/file.md index d2fb6fcf3482e..9dc726a812bb3 100644 --- a/website/content/en/docs/reference/configuration/sinks/file.md +++ b/website/content/en/docs/reference/configuration/sinks/file.md @@ -1,7 +1,7 @@ --- title: File description: Output observability events into files -kind: sink +component_kind: sink layout: component tags: ["file", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/gcp_chronicle_unstructured.md b/website/content/en/docs/reference/configuration/sinks/gcp_chronicle_unstructured.md index e84e7811261a1..4412979533c17 100644 --- a/website/content/en/docs/reference/configuration/sinks/gcp_chronicle_unstructured.md +++ b/website/content/en/docs/reference/configuration/sinks/gcp_chronicle_unstructured.md @@ -2,7 +2,7 @@ title: GCP Chronicle Unstructured description: Store unstructured log events in [Google Chronicle](https://cloud.google.com/chronicle/docs/overview) short: GCP Chronicle Unstructured -kind: sink +component_kind: sink layout: component tags: ["gcp", "chronicle", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/gcp_cloud_storage.md b/website/content/en/docs/reference/configuration/sinks/gcp_cloud_storage.md index 1d28c6435d3ca..9c4080441a673 100644 --- a/website/content/en/docs/reference/configuration/sinks/gcp_cloud_storage.md +++ b/website/content/en/docs/reference/configuration/sinks/gcp_cloud_storage.md @@ -2,7 +2,7 @@ title: GCP Cloud Storage (GCS) description: Store observability events in GCP [Cloud Storage](https://cloud.google.com/storage) short: GCP Cloud Storage -kind: sink +component_kind: sink layout: component tags: ["gcp", "gcs", "cloud storage", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/gcp_pubsub.md b/website/content/en/docs/reference/configuration/sinks/gcp_pubsub.md index 42cbc51e20049..13c0c69e90b8d 100644 --- a/website/content/en/docs/reference/configuration/sinks/gcp_pubsub.md +++ b/website/content/en/docs/reference/configuration/sinks/gcp_pubsub.md @@ -1,7 +1,7 @@ --- title: GCP PubSub description: Publish observability events to GCP's [PubSub](https://cloud.google.com/pubsub) messaging system -kind: sink +component_kind: sink layout: component tags: ["gcp", "pubsub", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/gcp_stackdriver_logs.md b/website/content/en/docs/reference/configuration/sinks/gcp_stackdriver_logs.md index 5d0ef2fa0eeab..4d309ec0ff279 100644 --- a/website/content/en/docs/reference/configuration/sinks/gcp_stackdriver_logs.md +++ b/website/content/en/docs/reference/configuration/sinks/gcp_stackdriver_logs.md @@ -2,7 +2,7 @@ title: GCP Operations (formerly Stackdriver) logs description: Deliver logs to GCP's [Cloud Operations](https://cloud.google.com/products/operations) suite short: GCP Stackdriver -kind: sink +component_kind: sink layout: component tags: ["gcp", "stackdriver", "operations", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/gcp_stackdriver_metrics.md b/website/content/en/docs/reference/configuration/sinks/gcp_stackdriver_metrics.md index 9e310e2076382..7280b91e86d80 100644 --- a/website/content/en/docs/reference/configuration/sinks/gcp_stackdriver_metrics.md +++ b/website/content/en/docs/reference/configuration/sinks/gcp_stackdriver_metrics.md @@ -1,7 +1,7 @@ --- title: GCP Cloud Monitoring (formerly Stackdriver) description: Deliver metrics to GCP's [Cloud Monitoring](https://cloud.google.com/monitoring) system -kind: sink +component_kind: sink layout: component tags: ["gcp", "stackdriver", "operations", "component", "sink", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/greptimedb.md b/website/content/en/docs/reference/configuration/sinks/greptimedb.md index 57aebf227d2f4..f8d0bd087305f 100644 --- a/website/content/en/docs/reference/configuration/sinks/greptimedb.md +++ b/website/content/en/docs/reference/configuration/sinks/greptimedb.md @@ -1,7 +1,7 @@ --- title: GreptimeDB description: Writes metric data to [GreptimeDB](https://github.com/greptimeteam/greptimedb) -kind: sink +component_kind: sink layout: component tags: ["greptimedb", "component", "sink", "storage", "time-series", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/honeycomb.md b/website/content/en/docs/reference/configuration/sinks/honeycomb.md index 94b6fc9788f78..6920897be67e6 100644 --- a/website/content/en/docs/reference/configuration/sinks/honeycomb.md +++ b/website/content/en/docs/reference/configuration/sinks/honeycomb.md @@ -1,7 +1,7 @@ --- title: Honeycomb description: Deliver log events to [Honeycomb](https://www.honeycomb.io) -kind: sink +component_kind: sink layout: component tags: ["honeycomb", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/http.md b/website/content/en/docs/reference/configuration/sinks/http.md index 0cd99a2472b1a..230f72696dd9d 100644 --- a/website/content/en/docs/reference/configuration/sinks/http.md +++ b/website/content/en/docs/reference/configuration/sinks/http.md @@ -1,7 +1,7 @@ --- title: HTTP description: Deliver observability data to an [HTTP](https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol) server -kind: sink +component_kind: sink layout: component tags: ["http", "client", "component", "sink", "logs", "metrics", "traces"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/humio_logs.md b/website/content/en/docs/reference/configuration/sinks/humio_logs.md index bea4ded3b5ac0..34e7be1f82ee9 100644 --- a/website/content/en/docs/reference/configuration/sinks/humio_logs.md +++ b/website/content/en/docs/reference/configuration/sinks/humio_logs.md @@ -1,7 +1,7 @@ --- title: Humio logs description: Deliver log event data to [Humio](https://humio.com) -kind: sink +component_kind: sink layout: component tags: ["humio", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/humio_metrics.md b/website/content/en/docs/reference/configuration/sinks/humio_metrics.md index 4bbdac9820120..774f081cc6ef6 100644 --- a/website/content/en/docs/reference/configuration/sinks/humio_metrics.md +++ b/website/content/en/docs/reference/configuration/sinks/humio_metrics.md @@ -1,7 +1,7 @@ --- title: Humio metrics description: Deliver metric event data to [Humio](https://humio.com) -kind: sink +component_kind: sink layout: component tags: ["humio", "component", "sink", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/influxdb_logs.md b/website/content/en/docs/reference/configuration/sinks/influxdb_logs.md index df33588a51db7..2c532c8268526 100644 --- a/website/content/en/docs/reference/configuration/sinks/influxdb_logs.md +++ b/website/content/en/docs/reference/configuration/sinks/influxdb_logs.md @@ -1,7 +1,7 @@ --- title: InfluxDB logs description: Deliver log event data to [InfluxDB](https://influxdata.com) -kind: sink +component_kind: sink layout: component tags: ["influxdb", "influx", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/influxdb_metrics.md b/website/content/en/docs/reference/configuration/sinks/influxdb_metrics.md index 9397e4a4d1645..85b1965c2dde2 100644 --- a/website/content/en/docs/reference/configuration/sinks/influxdb_metrics.md +++ b/website/content/en/docs/reference/configuration/sinks/influxdb_metrics.md @@ -1,7 +1,7 @@ --- title: InfluxDB metrics description: Deliver metric event data to [InfluxDB](https://influxdata.com) -kind: sink +component_kind: sink layout: component tags: ["influxdb", "influx", "component", "sink", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/kafka.md b/website/content/en/docs/reference/configuration/sinks/kafka.md index 1c391e2dc6c1a..c9d575c0cac7b 100644 --- a/website/content/en/docs/reference/configuration/sinks/kafka.md +++ b/website/content/en/docs/reference/configuration/sinks/kafka.md @@ -1,7 +1,7 @@ --- title: Kafka description: Publish observability data to [Apache Kafka](https://kafka.apache.org) topics -kind: sink +component_kind: sink layout: component tags: ["kafka", "apache", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/loki.md b/website/content/en/docs/reference/configuration/sinks/loki.md index 2e726fdd09c9a..eec2763de6732 100644 --- a/website/content/en/docs/reference/configuration/sinks/loki.md +++ b/website/content/en/docs/reference/configuration/sinks/loki.md @@ -1,7 +1,7 @@ --- title: Loki description: Deliver log event data to the [Loki](https://grafana.com/oss/loki) aggregation system -kind: sink +component_kind: sink layout: component tags: ["loki", "grafana", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/mezmo.md b/website/content/en/docs/reference/configuration/sinks/mezmo.md index 60729c5e75a40..fdf83706be8cb 100644 --- a/website/content/en/docs/reference/configuration/sinks/mezmo.md +++ b/website/content/en/docs/reference/configuration/sinks/mezmo.md @@ -1,7 +1,7 @@ --- title: Mezmo (formerly LogDNA) description: Deliver log event data to [Mezmo](https://mezmo.com) -kind: sink +component_kind: sink layout: component tags: ["logdna", "mezmo", "component", "sink", "logs"] aliases: ["/docs/reference/configuration/sinks/logdna"] diff --git a/website/content/en/docs/reference/configuration/sinks/mqtt.md b/website/content/en/docs/reference/configuration/sinks/mqtt.md index f71f4319b8500..3fd1d2bc070d6 100644 --- a/website/content/en/docs/reference/configuration/sinks/mqtt.md +++ b/website/content/en/docs/reference/configuration/sinks/mqtt.md @@ -1,7 +1,7 @@ --- title: MQTT description: Deliver observability event data to an [MQTT](https://mqtt.org) broker -kind: sink +component_kind: sink layout: component tags: ["mqtt", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/nats.md b/website/content/en/docs/reference/configuration/sinks/nats.md index 465f0f12a86d8..166f137941cbb 100644 --- a/website/content/en/docs/reference/configuration/sinks/nats.md +++ b/website/content/en/docs/reference/configuration/sinks/nats.md @@ -1,7 +1,7 @@ --- title: NATS description: Publish observability data to subjects on the [NATS](https://nats.io) messaging system -kind: sink +component_kind: sink layout: component tags: ["nats", "pubsub", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/new_relic.md b/website/content/en/docs/reference/configuration/sinks/new_relic.md index d15c8968bfb70..885b6f90ff677 100644 --- a/website/content/en/docs/reference/configuration/sinks/new_relic.md +++ b/website/content/en/docs/reference/configuration/sinks/new_relic.md @@ -1,7 +1,7 @@ --- title: New Relic description: Deliver events to [New Relic](https://newrelic.com) -kind: sink +component_kind: sink layout: component tags: ["new relic", "newrelic", "component", "sink", "logs", "metrics"] aliases: ["/docs/reference/configuration/sinks/new_relic_logs"] diff --git a/website/content/en/docs/reference/configuration/sinks/papertrail.md b/website/content/en/docs/reference/configuration/sinks/papertrail.md index 2b8db5e12b11a..49bf7685c533b 100644 --- a/website/content/en/docs/reference/configuration/sinks/papertrail.md +++ b/website/content/en/docs/reference/configuration/sinks/papertrail.md @@ -1,7 +1,7 @@ --- title: Papertrail description: Deliver log events to [Papertrail](https://www.solarwinds.com/papertrail) from SolarWinds -kind: sink +component_kind: sink layout: component tags: ["papertrail", "solarwinds", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/prometheus_exporter.md b/website/content/en/docs/reference/configuration/sinks/prometheus_exporter.md index 496ffd2bc0b30..cbfcfcc5f8f91 100644 --- a/website/content/en/docs/reference/configuration/sinks/prometheus_exporter.md +++ b/website/content/en/docs/reference/configuration/sinks/prometheus_exporter.md @@ -1,7 +1,7 @@ --- title: Prometheus Exporter description: Output metric events to a [Prometheus exporter](https://prometheus.io/docs/instrumenting/exporters) running on the host -kind: sink +component_kind: sink layout: component tags: ["prometheus", "exporter", "component", "sink", "metrics"] aliases: ["/docs/reference/sinks/prometheus"] diff --git a/website/content/en/docs/reference/configuration/sinks/prometheus_remote_write.md b/website/content/en/docs/reference/configuration/sinks/prometheus_remote_write.md index 7a7dfdf026c85..41415dd66f773 100644 --- a/website/content/en/docs/reference/configuration/sinks/prometheus_remote_write.md +++ b/website/content/en/docs/reference/configuration/sinks/prometheus_remote_write.md @@ -1,7 +1,7 @@ --- title: Prometheus remote write description: Deliver metric data to a [Prometheus remote write](https://prometheus.io/docs/practices/remote_write) endpoint -kind: sink +component_kind: sink layout: component tags: ["prometheus", "remote write", "component", "sink", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/pulsar.md b/website/content/en/docs/reference/configuration/sinks/pulsar.md index 0a21aa69ec0dd..997080ba8429d 100644 --- a/website/content/en/docs/reference/configuration/sinks/pulsar.md +++ b/website/content/en/docs/reference/configuration/sinks/pulsar.md @@ -1,7 +1,7 @@ --- title: Pulsar description: Publish observability events to [Apache Pulsar](https://pulsar.apache.org) topics -kind: sink +component_kind: sink layout: component tags: ["pulsar", "apache", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/redis.md b/website/content/en/docs/reference/configuration/sinks/redis.md index dfeee71bd3bf6..e5714ebb7b76b 100644 --- a/website/content/en/docs/reference/configuration/sinks/redis.md +++ b/website/content/en/docs/reference/configuration/sinks/redis.md @@ -1,7 +1,7 @@ --- title: Redis description: Publish observability data to [Redis](https://redis.io). -kind: sink +component_kind: sink layout: component tags: ["redis", "pubsub", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/sematext_logs.md b/website/content/en/docs/reference/configuration/sinks/sematext_logs.md index 2fd9c0f7468cf..dcbdd4072fb01 100644 --- a/website/content/en/docs/reference/configuration/sinks/sematext_logs.md +++ b/website/content/en/docs/reference/configuration/sinks/sematext_logs.md @@ -1,7 +1,7 @@ --- title: Sematext logs description: Publish log events to [Sematext](https://sematext.com) -kind: sink +component_kind: sink layout: component tags: ["sematext", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/sematext_metrics.md b/website/content/en/docs/reference/configuration/sinks/sematext_metrics.md index c09ce4ca7bc32..0c3ff5db900f6 100644 --- a/website/content/en/docs/reference/configuration/sinks/sematext_metrics.md +++ b/website/content/en/docs/reference/configuration/sinks/sematext_metrics.md @@ -1,7 +1,7 @@ --- title: Sematext metrics description: Publish metric events to [Sematext](https://sematext.com) -kind: sink +component_kind: sink layout: component tags: ["sematext", "component", "sink", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/socket.md b/website/content/en/docs/reference/configuration/sinks/socket.md index e467661b2b48d..529880e5f6017 100644 --- a/website/content/en/docs/reference/configuration/sinks/socket.md +++ b/website/content/en/docs/reference/configuration/sinks/socket.md @@ -1,7 +1,7 @@ --- title: Socket description: Deliver logs to a remote socket endpoint -kind: sink +component_kind: sink layout: component tags: ["socket", "remote", "component", "sink", "logs"] aliases: ["/docs/reference/sinks/tcp"] diff --git a/website/content/en/docs/reference/configuration/sinks/splunk_hec_logs.md b/website/content/en/docs/reference/configuration/sinks/splunk_hec_logs.md index f016036e302fd..9babc2b5cd787 100644 --- a/website/content/en/docs/reference/configuration/sinks/splunk_hec_logs.md +++ b/website/content/en/docs/reference/configuration/sinks/splunk_hec_logs.md @@ -1,7 +1,7 @@ --- title: Splunk HEC logs description: Deliver log data to Splunk's [HTTP Event Collector](https://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector) -kind: sink +component_kind: sink layout: component tags: ["splunk", "hec", "http event collector", "component", "sink", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/splunk_hec_metrics.md b/website/content/en/docs/reference/configuration/sinks/splunk_hec_metrics.md index 27d0906af4081..86d546ea92c62 100644 --- a/website/content/en/docs/reference/configuration/sinks/splunk_hec_metrics.md +++ b/website/content/en/docs/reference/configuration/sinks/splunk_hec_metrics.md @@ -1,7 +1,7 @@ --- title: Splunk HEC metrics description: Deliver metric data to Splunk's [HTTP Event Collector](https://docs.splunk.com/Documentation/Splunk/latest/Data/UsetheHTTPEventCollector) -kind: sink +component_kind: sink layout: component tags: ["splunk", "hec", "http event collector", "component", "sink", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/statsd.md b/website/content/en/docs/reference/configuration/sinks/statsd.md index 1a0d37cc11055..a357c9259c821 100644 --- a/website/content/en/docs/reference/configuration/sinks/statsd.md +++ b/website/content/en/docs/reference/configuration/sinks/statsd.md @@ -1,7 +1,7 @@ --- title: StatsD description: Deliver metric data to a [StatsD](https://github.com/statsd/statsd) aggregator -kind: sink +component_kind: sink layout: component tags: ["statsd", "component", "sink", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/vector.md b/website/content/en/docs/reference/configuration/sinks/vector.md index 04e1b02244d89..020b0c8f5293b 100644 --- a/website/content/en/docs/reference/configuration/sinks/vector.md +++ b/website/content/en/docs/reference/configuration/sinks/vector.md @@ -1,7 +1,7 @@ --- title: Vector description: Relay observability data to another Vector instance -kind: sink +component_kind: sink layout: component tags: ["vector", "instance", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/webhdfs.md b/website/content/en/docs/reference/configuration/sinks/webhdfs.md index 78738ec08c8cc..836d5ee3c1d23 100644 --- a/website/content/en/docs/reference/configuration/sinks/webhdfs.md +++ b/website/content/en/docs/reference/configuration/sinks/webhdfs.md @@ -1,7 +1,7 @@ --- title: WebHDFS description: Output observability events into WebHDFS -kind: sink +component_kind: sink layout: component tags: ["webhdfs", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sinks/websocket.md b/website/content/en/docs/reference/configuration/sinks/websocket.md index d7685043efdbb..c18d9c11d148f 100644 --- a/website/content/en/docs/reference/configuration/sinks/websocket.md +++ b/website/content/en/docs/reference/configuration/sinks/websocket.md @@ -1,7 +1,7 @@ --- title: Websocket description: Deliver observability event data to a websocket listener -kind: sink +component_kind: sink layout: component tags: ["websocket", "component", "sink"] --- diff --git a/website/content/en/docs/reference/configuration/sources/amqp.md b/website/content/en/docs/reference/configuration/sources/amqp.md index e1c16dd9a9d7d..7ef47a55e1c75 100644 --- a/website/content/en/docs/reference/configuration/sources/amqp.md +++ b/website/content/en/docs/reference/configuration/sources/amqp.md @@ -1,7 +1,7 @@ --- title: AMQP description: Collect events from [AMQP 0.9.1](https://www.amqp.org/specification/0-9-1/amqp-org-download) compatible brokers like RabbitMQ -kind: source +component_kind: source layout: component tags: ["amqp", "rabbitmq", "component", "source"] --- diff --git a/website/content/en/docs/reference/configuration/sources/apache_metrics.md b/website/content/en/docs/reference/configuration/sources/apache_metrics.md index 19f7d255cfedd..8b2962dbd2031 100644 --- a/website/content/en/docs/reference/configuration/sources/apache_metrics.md +++ b/website/content/en/docs/reference/configuration/sources/apache_metrics.md @@ -2,7 +2,7 @@ title: Apache HTTP server (HTTPD) metrics description: Collect metrics from Apache's [HTTPD](https://httpd.apache.org) server short: Apache Metrics -kind: source +component_kind: source layout: component tags: ["apache", "http", "component", "source", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sources/aws_ecs_metrics.md b/website/content/en/docs/reference/configuration/sources/aws_ecs_metrics.md index e14a4e1487cda..a28d973479aee 100644 --- a/website/content/en/docs/reference/configuration/sources/aws_ecs_metrics.md +++ b/website/content/en/docs/reference/configuration/sources/aws_ecs_metrics.md @@ -3,7 +3,7 @@ title: AWS ECS metrics description: > Collect Docker container stats for tasks running in [AWS ECS](https://aws.amazon.com/ecs) and [AWS Fargate](https://aws.amazon.com/fargate) -kind: source +component_kind: source layout: component tags: ["aws", "ecs", "docker", "fargate", "container", "component", "source", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sources/aws_kinesis_firehose.md b/website/content/en/docs/reference/configuration/sources/aws_kinesis_firehose.md index 366c54902ab50..725d092e32845 100644 --- a/website/content/en/docs/reference/configuration/sources/aws_kinesis_firehose.md +++ b/website/content/en/docs/reference/configuration/sources/aws_kinesis_firehose.md @@ -1,7 +1,7 @@ --- title: AWS Kinesis Firehose description: Collect logs from [AWS Kinesis Firehose](https://aws.amazon.com/kinesis/data-firehose) -kind: source +component_kind: source layout: component tags: ["aws", "kinesis", "firehose", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/aws_s3.md b/website/content/en/docs/reference/configuration/sources/aws_s3.md index 881b09d2bf28e..037db1dc32731 100644 --- a/website/content/en/docs/reference/configuration/sources/aws_s3.md +++ b/website/content/en/docs/reference/configuration/sources/aws_s3.md @@ -1,7 +1,7 @@ --- title: AWS S3 description: Collect logs from [AWS S3](https://aws.amazon.com/s3) -kind: source +component_kind: source layout: component tags: ["aws", "s3", "storage", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/aws_sqs.md b/website/content/en/docs/reference/configuration/sources/aws_sqs.md index 2742e684b907c..e9a01faa24ca0 100644 --- a/website/content/en/docs/reference/configuration/sources/aws_sqs.md +++ b/website/content/en/docs/reference/configuration/sources/aws_sqs.md @@ -1,7 +1,7 @@ --- title: AWS SQS description: Collect logs from [AWS SQS](https://aws.amazon.com/sqs) -kind: source +component_kind: source layout: component tags: ["aws", "sqs", "queue", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/datadog_agent.md b/website/content/en/docs/reference/configuration/sources/datadog_agent.md index 1a5f23392d02a..d1be4df03c706 100644 --- a/website/content/en/docs/reference/configuration/sources/datadog_agent.md +++ b/website/content/en/docs/reference/configuration/sources/datadog_agent.md @@ -1,7 +1,7 @@ --- title: Datadog agent description: Receive logs, metrics, and traces collected by a [Datadog Agent](https://docs.datadoghq.com/agent) -kind: source +component_kind: source layout: component tags: ["datadog", "agent", "component", "source", "logs", "metrics", "traces"] --- diff --git a/website/content/en/docs/reference/configuration/sources/demo_logs.md b/website/content/en/docs/reference/configuration/sources/demo_logs.md index 289599ac185e4..1b35b0ea80911 100644 --- a/website/content/en/docs/reference/configuration/sources/demo_logs.md +++ b/website/content/en/docs/reference/configuration/sources/demo_logs.md @@ -1,7 +1,7 @@ --- title: Demo Logs description: Generate fake log events, which can be useful for testing and demos -kind: source +component_kind: source layout: component tags: ["demo", "random", "fake", "component", "source"] --- diff --git a/website/content/en/docs/reference/configuration/sources/dnstap.md b/website/content/en/docs/reference/configuration/sources/dnstap.md index a1e12d42556be..2c896256c6c8b 100644 --- a/website/content/en/docs/reference/configuration/sources/dnstap.md +++ b/website/content/en/docs/reference/configuration/sources/dnstap.md @@ -1,7 +1,7 @@ --- title: dnstap description: Collect DNS logs from a [dnstap](https://dnstap.info)-compatible server -kind: source +component_kind: source layout: component tags: ["dnstap", "dns", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/docker_logs.md b/website/content/en/docs/reference/configuration/sources/docker_logs.md index 07f43918ef484..948362c208ab4 100644 --- a/website/content/en/docs/reference/configuration/sources/docker_logs.md +++ b/website/content/en/docs/reference/configuration/sources/docker_logs.md @@ -1,7 +1,7 @@ --- title: Docker logs description: Collect logs from [Docker](https://docker.com) -kind: source +component_kind: source layout: component tags: ["docker", "container", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/eventstoredb_metrics.md b/website/content/en/docs/reference/configuration/sources/eventstoredb_metrics.md index 5d476f6c28712..fd0a6e9d40fc6 100644 --- a/website/content/en/docs/reference/configuration/sources/eventstoredb_metrics.md +++ b/website/content/en/docs/reference/configuration/sources/eventstoredb_metrics.md @@ -1,7 +1,7 @@ --- title: EventStoreDB metrics description: Receive metrics from collected by a [EventStoreDB](https://www.eventstore.com/) -kind: source +component_kind: source layout: component tags: ["eventstore", "component", "source", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sources/exec.md b/website/content/en/docs/reference/configuration/sources/exec.md index 075b97a65d720..748cb42e1e897 100644 --- a/website/content/en/docs/reference/configuration/sources/exec.md +++ b/website/content/en/docs/reference/configuration/sources/exec.md @@ -1,7 +1,7 @@ --- title: Exec description: Collect output from a process running on the host -kind: source +component_kind: source layout: component tags: ["exec", "process", "component", "source"] --- diff --git a/website/content/en/docs/reference/configuration/sources/file.md b/website/content/en/docs/reference/configuration/sources/file.md index 4099c97853f47..e5bf4df0ab462 100644 --- a/website/content/en/docs/reference/configuration/sources/file.md +++ b/website/content/en/docs/reference/configuration/sources/file.md @@ -1,7 +1,7 @@ --- title: File description: Collect logs from [files](https://en.wikipedia.org/wiki/File_system) -kind: source +component_kind: source layout: component tags: ["file", "component", "source"] --- diff --git a/website/content/en/docs/reference/configuration/sources/file_descriptor.md b/website/content/en/docs/reference/configuration/sources/file_descriptor.md index bd28278c46479..ce28fd242f76d 100644 --- a/website/content/en/docs/reference/configuration/sources/file_descriptor.md +++ b/website/content/en/docs/reference/configuration/sources/file_descriptor.md @@ -1,7 +1,7 @@ --- title: File Descriptor description: Collect logs from a [file descriptor](https://en.wikipedia.org/wiki/File_descriptor) -kind: source +component_kind: source layout: component tags: ["file_descriptor", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/fluent.md b/website/content/en/docs/reference/configuration/sources/fluent.md index 409d320adfb89..bbb81f63da401 100644 --- a/website/content/en/docs/reference/configuration/sources/fluent.md +++ b/website/content/en/docs/reference/configuration/sources/fluent.md @@ -1,7 +1,7 @@ --- title: Fluent description: Collect logs from a [Fluentd](https://www.fluentd.org) or [Fluent Bit](https://fluentbit.io) agent -kind: source +component_kind: source layout: component tags: ["fluentd", "fluent", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/gcp_pubsub.md b/website/content/en/docs/reference/configuration/sources/gcp_pubsub.md index 5b7e448b160e0..019245d0b95e8 100644 --- a/website/content/en/docs/reference/configuration/sources/gcp_pubsub.md +++ b/website/content/en/docs/reference/configuration/sources/gcp_pubsub.md @@ -1,7 +1,7 @@ --- title: GCP PubSub description: Fetch observability events from GCP's [PubSub](https://cloud.google.com/pubsub) messaging system -kind: source +component_kind: source layout: component tags: ["gcp", "pubsub", "component", "source"] --- diff --git a/website/content/en/docs/reference/configuration/sources/heroku_logs.md b/website/content/en/docs/reference/configuration/sources/heroku_logs.md index 7592244145d35..0a2fb27a981ae 100644 --- a/website/content/en/docs/reference/configuration/sources/heroku_logs.md +++ b/website/content/en/docs/reference/configuration/sources/heroku_logs.md @@ -1,7 +1,7 @@ --- title: Heroku Logplex description: Collect logs from Heroku's [Logplex](https://devcenter.heroku.com/articles/logplex), the router responsible for receiving logs from your Heroku apps -kind: source +component_kind: source layout: component tags: ["heroku", "logplex", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/host_metrics.md b/website/content/en/docs/reference/configuration/sources/host_metrics.md index 0cac0897d1541..54f0b489f0322 100644 --- a/website/content/en/docs/reference/configuration/sources/host_metrics.md +++ b/website/content/en/docs/reference/configuration/sources/host_metrics.md @@ -1,7 +1,7 @@ --- title: Host metrics description: Collect metric data from the local system -kind: source +component_kind: source layout: component tags: ["vector", "host", "local", "component", "source", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sources/http_client.md b/website/content/en/docs/reference/configuration/sources/http_client.md index 897ca9d7f16e8..8ae92585c2819 100644 --- a/website/content/en/docs/reference/configuration/sources/http_client.md +++ b/website/content/en/docs/reference/configuration/sources/http_client.md @@ -1,7 +1,7 @@ --- title: HTTP Client description: Pull observability data from an [HTTP](https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol) server at a configured interval -kind: source +component_kind: source layout: component tags: ["http", "client", "scrape", "component", "source", "logs", "metrics", "traces"] --- diff --git a/website/content/en/docs/reference/configuration/sources/http_server.md b/website/content/en/docs/reference/configuration/sources/http_server.md index 91de6f9fc7485..bc34221a81a74 100644 --- a/website/content/en/docs/reference/configuration/sources/http_server.md +++ b/website/content/en/docs/reference/configuration/sources/http_server.md @@ -1,7 +1,7 @@ --- title: HTTP Server description: Receive observability data from an [HTTP client request](https://en.wikipedia.org/wiki/Hypertext_Transfer_Protocol#Client_request) -kind: source +component_kind: source layout: component tags: ["http", "server", "component", "source", "logs", "metrics", "traces"] aliases: ["/docs/reference/configuration/sources/http"] diff --git a/website/content/en/docs/reference/configuration/sources/internal_logs.md b/website/content/en/docs/reference/configuration/sources/internal_logs.md index 09ee3e86e5bcb..58ac06f7ec222 100644 --- a/website/content/en/docs/reference/configuration/sources/internal_logs.md +++ b/website/content/en/docs/reference/configuration/sources/internal_logs.md @@ -1,7 +1,7 @@ --- title: Internal logs description: Expose all log and trace messages emitted by the running Vector instance -kind: source +component_kind: source layout: component tags: ["vector", "instance", "local", "internal", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/internal_metrics.md b/website/content/en/docs/reference/configuration/sources/internal_metrics.md index 9fcd6281c3683..ed7743577c5f8 100644 --- a/website/content/en/docs/reference/configuration/sources/internal_metrics.md +++ b/website/content/en/docs/reference/configuration/sources/internal_metrics.md @@ -1,7 +1,7 @@ --- title: Internal metrics description: Access to the metrics produced by Vector itself and process them in your Vector pipeline -kind: source +component_kind: source layout: component tags: ["vector", "instance", "local", "internal", "component", "source", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sources/journald.md b/website/content/en/docs/reference/configuration/sources/journald.md index 3e7b60de6ae30..edcc1916fc1ef 100644 --- a/website/content/en/docs/reference/configuration/sources/journald.md +++ b/website/content/en/docs/reference/configuration/sources/journald.md @@ -1,7 +1,7 @@ --- title: JournalD description: Collect logs from [JournalD](https://www.freedesktop.org/software/systemd/man/systemd-journald.service.html) -kind: source +component_kind: source layout: component tags: ["journald", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/kafka.md b/website/content/en/docs/reference/configuration/sources/kafka.md index 09c32cb6126c8..d7a6fe53f0c5c 100644 --- a/website/content/en/docs/reference/configuration/sources/kafka.md +++ b/website/content/en/docs/reference/configuration/sources/kafka.md @@ -1,7 +1,7 @@ --- title: Kafka description: Collect observability data from [Apache Kafka](https://kafka.apache.org) topics -kind: source +component_kind: source layout: component tags: ["kafka", "apache", "component", "source"] --- diff --git a/website/content/en/docs/reference/configuration/sources/kubernetes_logs.md b/website/content/en/docs/reference/configuration/sources/kubernetes_logs.md index 79c7b9b65dc09..78366c7b4eee0 100644 --- a/website/content/en/docs/reference/configuration/sources/kubernetes_logs.md +++ b/website/content/en/docs/reference/configuration/sources/kubernetes_logs.md @@ -1,7 +1,7 @@ --- title: Kubernetes logs description: Collect logs from [Kubernetes](https://kubernetes.io) Nodes -kind: source +component_kind: source layout: component tags: ["kubernetes", "k8s", "node", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/logstash.md b/website/content/en/docs/reference/configuration/sources/logstash.md index 6a57ce260bfcf..a08ca89107225 100644 --- a/website/content/en/docs/reference/configuration/sources/logstash.md +++ b/website/content/en/docs/reference/configuration/sources/logstash.md @@ -1,7 +1,7 @@ --- title: Logstash description: Collect logs from a [Logstash](https://www.elastic.co/logstash) agent -kind: source +component_kind: source layout: component tags: ["logstash", "elastic", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/mongodb_metrics.md b/website/content/en/docs/reference/configuration/sources/mongodb_metrics.md index 1ce5d51356acb..3ecd69fda0eca 100644 --- a/website/content/en/docs/reference/configuration/sources/mongodb_metrics.md +++ b/website/content/en/docs/reference/configuration/sources/mongodb_metrics.md @@ -1,7 +1,7 @@ --- title: MongoDB metrics description: Collect metrics from the [MongoDB](https://mongodb.com) database -kind: source +component_kind: source layout: component tags: ["mongodb", "mongo", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/nats.md b/website/content/en/docs/reference/configuration/sources/nats.md index 4890cc15b9c6b..dc5cddcf200eb 100644 --- a/website/content/en/docs/reference/configuration/sources/nats.md +++ b/website/content/en/docs/reference/configuration/sources/nats.md @@ -1,7 +1,7 @@ --- title: NATS description: Read observability data from subjects on the [NATS](https://nats.io) messaging system -kind: source +component_kind: source layout: component tags: ["nats", "component", "source"] --- diff --git a/website/content/en/docs/reference/configuration/sources/nginx_metrics.md b/website/content/en/docs/reference/configuration/sources/nginx_metrics.md index 5994ecd891a08..1fe3f1f33b692 100644 --- a/website/content/en/docs/reference/configuration/sources/nginx_metrics.md +++ b/website/content/en/docs/reference/configuration/sources/nginx_metrics.md @@ -1,7 +1,7 @@ --- title: NGINX metrics description: Collect metrics from [NGINX](https://nginx.com) -kind: source +component_kind: source layout: component tags: ["nginx", "http", "component", "source", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sources/opentelemetry.md b/website/content/en/docs/reference/configuration/sources/opentelemetry.md index e89dea0e891cd..981acd3aee1b3 100644 --- a/website/content/en/docs/reference/configuration/sources/opentelemetry.md +++ b/website/content/en/docs/reference/configuration/sources/opentelemetry.md @@ -1,7 +1,7 @@ --- title: OpenTelemetry description: Receive [OTLP](https://opentelemetry.io/docs/reference/specification/protocol/otlp/) data through gRPC or HTTP. -kind: source +component_kind: source layout: component tags: ["opentelemetry", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/postgresql_metrics.md b/website/content/en/docs/reference/configuration/sources/postgresql_metrics.md index cf586c51419e5..f72680a292806 100644 --- a/website/content/en/docs/reference/configuration/sources/postgresql_metrics.md +++ b/website/content/en/docs/reference/configuration/sources/postgresql_metrics.md @@ -1,7 +1,7 @@ --- title: PostgreSQL metrics description: Collect metrics from the [PostgreSQL](https://postgresql.org) database -kind: source +component_kind: source layout: component tags: ["postgresql", "postgres", "component", "source", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sources/prometheus_pushgateway.md b/website/content/en/docs/reference/configuration/sources/prometheus_pushgateway.md index b199ec7030c9e..6aa904ea16584 100644 --- a/website/content/en/docs/reference/configuration/sources/prometheus_pushgateway.md +++ b/website/content/en/docs/reference/configuration/sources/prometheus_pushgateway.md @@ -1,7 +1,7 @@ --- title: Prometheus Pushgateway description: Collect metrics from [Prometheus](https://prometheus.io) -kind: source +component_kind: source layout: component tags: ["prometheus", "pushgateway", "component", "source", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sources/prometheus_remote_write.md b/website/content/en/docs/reference/configuration/sources/prometheus_remote_write.md index 4bf8dcb14c456..04a7e708269aa 100644 --- a/website/content/en/docs/reference/configuration/sources/prometheus_remote_write.md +++ b/website/content/en/docs/reference/configuration/sources/prometheus_remote_write.md @@ -1,7 +1,7 @@ --- title: Prometheus remote write description: Collect metrics from [Prometheus](https://prometheus.io) -kind: source +component_kind: source layout: component tags: ["prometheus", "remote write", "component", "source", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sources/prometheus_scrape.md b/website/content/en/docs/reference/configuration/sources/prometheus_scrape.md index 0387807a5ceab..95873fad6fed0 100644 --- a/website/content/en/docs/reference/configuration/sources/prometheus_scrape.md +++ b/website/content/en/docs/reference/configuration/sources/prometheus_scrape.md @@ -1,7 +1,7 @@ --- title: Prometheus scrape description: Collect metrics via the [Prometheus](https://prometheus.io) client -kind: source +component_kind: source layout: component tags: ["prometheus", "scrape", "component", "source", "metrics"] aliases: ["/docs/reference/sources/prometheus"] diff --git a/website/content/en/docs/reference/configuration/sources/pulsar.md b/website/content/en/docs/reference/configuration/sources/pulsar.md index 2b0512eb1ada8..4085f9ad59691 100644 --- a/website/content/en/docs/reference/configuration/sources/pulsar.md +++ b/website/content/en/docs/reference/configuration/sources/pulsar.md @@ -1,7 +1,7 @@ --- title: Pulsar description: Collect observability events from [Apache Pulsar](https://pulsar.apache.org) topics -kind: source +component_kind: source layout: component tags: ["pulsar", "apache", "component", "source"] --- diff --git a/website/content/en/docs/reference/configuration/sources/redis.md b/website/content/en/docs/reference/configuration/sources/redis.md index 7faa987af04e6..63ef31bd547fc 100644 --- a/website/content/en/docs/reference/configuration/sources/redis.md +++ b/website/content/en/docs/reference/configuration/sources/redis.md @@ -1,7 +1,7 @@ --- title: Redis description: Collect observability data from Redis. -kind: source +component_kind: source layout: component tags: ["redis", "component", "source"] --- diff --git a/website/content/en/docs/reference/configuration/sources/socket.md b/website/content/en/docs/reference/configuration/sources/socket.md index 43002350235aa..91c459736c2f8 100644 --- a/website/content/en/docs/reference/configuration/sources/socket.md +++ b/website/content/en/docs/reference/configuration/sources/socket.md @@ -1,7 +1,7 @@ --- title: Socket description: Collect logs using the [socket](https://en.wikipedia.org/wiki/Network_socket) client -kind: source +component_kind: source layout: component tags: ["socket", "component", "source", "logs"] aliases: ["/docs/reference/sources/tcp", "/docs/reference/sources/udp"] diff --git a/website/content/en/docs/reference/configuration/sources/splunk_hec.md b/website/content/en/docs/reference/configuration/sources/splunk_hec.md index f450930368324..d0928b9c15f30 100644 --- a/website/content/en/docs/reference/configuration/sources/splunk_hec.md +++ b/website/content/en/docs/reference/configuration/sources/splunk_hec.md @@ -2,7 +2,7 @@ title: Splunk HTTP Event Collector (HEC) description: Receive logs from [Splunk](https://splunk.com) short: Splunk HEC -kind: source +component_kind: source layout: component tags: ["splunk", "hec", "http event collector", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/statsd.md b/website/content/en/docs/reference/configuration/sources/statsd.md index 1987da5bc34ae..aadb531430b67 100644 --- a/website/content/en/docs/reference/configuration/sources/statsd.md +++ b/website/content/en/docs/reference/configuration/sources/statsd.md @@ -1,7 +1,7 @@ --- title: StatsD description: Collect metrics emitted via [StatsD](https://github.com/statsd/statsd) protocol -kind: source +component_kind: source layout: component tags: ["statsd", "component", "source", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/sources/stdin.md b/website/content/en/docs/reference/configuration/sources/stdin.md index d7f6669af15be..65c652a9ff54e 100644 --- a/website/content/en/docs/reference/configuration/sources/stdin.md +++ b/website/content/en/docs/reference/configuration/sources/stdin.md @@ -1,7 +1,7 @@ --- title: stdin description: Collect logs sent via [stdin](https://en.wikipedia.org/wiki/Standard_streams#Standard_input_(stdin)) -kind: source +component_kind: source layout: component tags: ["stdin", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/syslog.md b/website/content/en/docs/reference/configuration/sources/syslog.md index 49056c36bd4db..435dee9ed48b2 100644 --- a/website/content/en/docs/reference/configuration/sources/syslog.md +++ b/website/content/en/docs/reference/configuration/sources/syslog.md @@ -1,7 +1,7 @@ --- title: Syslog description: Collect logs sent via [Syslog](https://en.wikipedia.org/wiki/Syslog) -kind: source +component_kind: source layout: component tags: ["syslog", "component", "source", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/sources/vector.md b/website/content/en/docs/reference/configuration/sources/vector.md index 824bf6b6563e4..797a1d38557c1 100644 --- a/website/content/en/docs/reference/configuration/sources/vector.md +++ b/website/content/en/docs/reference/configuration/sources/vector.md @@ -1,7 +1,7 @@ --- title: Vector description: Collect observability data from another Vector instance -kind: source +component_kind: source layout: component tags: ["vector", "instance", "component", "source"] --- diff --git a/website/content/en/docs/reference/configuration/transforms/aggregate.md b/website/content/en/docs/reference/configuration/transforms/aggregate.md index 3c9a975ebebeb..147a5dc4e6c1e 100644 --- a/website/content/en/docs/reference/configuration/transforms/aggregate.md +++ b/website/content/en/docs/reference/configuration/transforms/aggregate.md @@ -1,7 +1,7 @@ --- title: Aggregate description: Aggregate metrics passing through a topology -kind: transform +component_kind: transform layout: component tags: ["aggregate", "component", "transform"] --- diff --git a/website/content/en/docs/reference/configuration/transforms/aws_ec2_metadata.md b/website/content/en/docs/reference/configuration/transforms/aws_ec2_metadata.md index 968263822ae64..a7c7b3e39c0b6 100644 --- a/website/content/en/docs/reference/configuration/transforms/aws_ec2_metadata.md +++ b/website/content/en/docs/reference/configuration/transforms/aws_ec2_metadata.md @@ -1,7 +1,7 @@ --- title: AWS EC2 metadata description: Parse metadata emitted by [AWS EC2](https://aws.amazon.com/ec2) instances -kind: transform +component_kind: transform layout: component tags: ["aws", "ec2", "metadata", "component", "transform"] --- diff --git a/website/content/en/docs/reference/configuration/transforms/dedupe.md b/website/content/en/docs/reference/configuration/transforms/dedupe.md index e4ab9ad7ab8d3..9fb9f6ab60a21 100644 --- a/website/content/en/docs/reference/configuration/transforms/dedupe.md +++ b/website/content/en/docs/reference/configuration/transforms/dedupe.md @@ -2,7 +2,7 @@ title: Dedupe events description: Deduplicate logs passing through a topology short: Dedupe -kind: transform +component_kind: transform layout: component tags: ["dedupe", "deduplicate", "component", "transform"] --- diff --git a/website/content/en/docs/reference/configuration/transforms/filter.md b/website/content/en/docs/reference/configuration/transforms/filter.md index 1be94188c03a6..f6e8d42885962 100644 --- a/website/content/en/docs/reference/configuration/transforms/filter.md +++ b/website/content/en/docs/reference/configuration/transforms/filter.md @@ -1,7 +1,7 @@ --- title: Filter description: Filter events based on a set of conditions -kind: transform +component_kind: transform layout: component tags: ["filter", "component", "transform"] --- diff --git a/website/content/en/docs/reference/configuration/transforms/log_to_metric.md b/website/content/en/docs/reference/configuration/transforms/log_to_metric.md index 5bf1f2ac574cb..5513df395f928 100644 --- a/website/content/en/docs/reference/configuration/transforms/log_to_metric.md +++ b/website/content/en/docs/reference/configuration/transforms/log_to_metric.md @@ -1,7 +1,7 @@ --- title: Log to metric description: Convert log events to metric events -kind: transform +component_kind: transform layout: component tags: ["log to metric", "convert", "component", "transform"] --- diff --git a/website/content/en/docs/reference/configuration/transforms/lua.md b/website/content/en/docs/reference/configuration/transforms/lua.md index 08284fdee0fd7..2b4782c4b85de 100644 --- a/website/content/en/docs/reference/configuration/transforms/lua.md +++ b/website/content/en/docs/reference/configuration/transforms/lua.md @@ -1,7 +1,7 @@ --- title: Lua description: Modify event data using the [Lua](https://lua.org) programming language -kind: transform +component_kind: transform layout: component tags: ["lua", "runtime", "component", "transform"] --- diff --git a/website/content/en/docs/reference/configuration/transforms/metric_to_log.md b/website/content/en/docs/reference/configuration/transforms/metric_to_log.md index 578f3af784c8d..5b7854e730d66 100644 --- a/website/content/en/docs/reference/configuration/transforms/metric_to_log.md +++ b/website/content/en/docs/reference/configuration/transforms/metric_to_log.md @@ -1,7 +1,7 @@ --- title: Metric to log description: Convert metric events to log events -kind: transform +component_kind: transform layout: component tags: ["metric to log", "convert", "component", "transform"] --- diff --git a/website/content/en/docs/reference/configuration/transforms/reduce.md b/website/content/en/docs/reference/configuration/transforms/reduce.md index 3cbd6a9a3836b..650bbd0422655 100644 --- a/website/content/en/docs/reference/configuration/transforms/reduce.md +++ b/website/content/en/docs/reference/configuration/transforms/reduce.md @@ -1,7 +1,7 @@ --- title: Reduce description: Collapse multiple log events into a single event based on a set of conditions and merge strategies -kind: transform +component_kind: transform layout: component tags: ["filter", "multiline", "component", "transform", "logs"] --- diff --git a/website/content/en/docs/reference/configuration/transforms/remap.md b/website/content/en/docs/reference/configuration/transforms/remap.md index 33acbf3fd0c66..bb4c28a3cef25 100644 --- a/website/content/en/docs/reference/configuration/transforms/remap.md +++ b/website/content/en/docs/reference/configuration/transforms/remap.md @@ -2,7 +2,7 @@ title: Remap with VRL description: > Modify your observability data as it passes through your topology using [Vector Remap Language](/docs/reference/vrl) (VRL) -kind: transform +component_kind: transform featured: true layout: component weight: 1 diff --git a/website/content/en/docs/reference/configuration/transforms/route.md b/website/content/en/docs/reference/configuration/transforms/route.md index b985f2a60cd2c..2bc41b4aacf6c 100644 --- a/website/content/en/docs/reference/configuration/transforms/route.md +++ b/website/content/en/docs/reference/configuration/transforms/route.md @@ -1,7 +1,7 @@ --- title: Route description: Split a stream of events into multiple sub-streams based on user-supplied conditions -kind: transform +component_kind: transform layout: component tags: ["route", "swimlanes", "split", "component", "transform"] --- diff --git a/website/content/en/docs/reference/configuration/transforms/sample.md b/website/content/en/docs/reference/configuration/transforms/sample.md index 67c56382d8433..5597aeb002068 100644 --- a/website/content/en/docs/reference/configuration/transforms/sample.md +++ b/website/content/en/docs/reference/configuration/transforms/sample.md @@ -1,7 +1,7 @@ --- title: Sample description: Sample events from an event stream based on supplied criteria and at a configurable rate -kind: transform +component_kind: transform layout: component tags: ["sample", "component", "log", "trace", "transform"] --- diff --git a/website/content/en/docs/reference/configuration/transforms/tag_cardinality_limit.md b/website/content/en/docs/reference/configuration/transforms/tag_cardinality_limit.md index 23d2ba9dfca21..d3ad92e743a8f 100644 --- a/website/content/en/docs/reference/configuration/transforms/tag_cardinality_limit.md +++ b/website/content/en/docs/reference/configuration/transforms/tag_cardinality_limit.md @@ -1,7 +1,7 @@ --- title: Tag cardinality limit description: Limit the cardinality of tags on metrics events as a safeguard against cardinality explosion -kind: transform +component_kind: transform layout: component tags: ["tag", "cardinality", "component", "transform", "metrics"] --- diff --git a/website/content/en/docs/reference/configuration/transforms/throttle.md b/website/content/en/docs/reference/configuration/transforms/throttle.md index 9b79b86bdb67d..ebf28a2e43750 100644 --- a/website/content/en/docs/reference/configuration/transforms/throttle.md +++ b/website/content/en/docs/reference/configuration/transforms/throttle.md @@ -1,7 +1,7 @@ --- title: Throttle description: Rate limit logs passing through a topology -kind: transform +component_kind: transform layout: component tags: ["throttle", "component", "transform"] --- diff --git a/website/layouts/shortcodes/components.html b/website/layouts/shortcodes/components.html index 49bcc04f57a8c..d1d8135d86da0 100644 --- a/website/layouts/shortcodes/components.html +++ b/website/layouts/shortcodes/components.html @@ -1,8 +1,8 @@ -{{ $kind := .Get 0 }} -{{ $componentsOfType := where site.RegularPages ".Params.kind" $kind }} +{{ $componentKind:= .Get 0 }} +{{ $componentsOfKind := where site.RegularPages ".Params.component_kind" $componentKind}}
- {{ range $componentsOfType }} + {{ range $componentsOfKind }} {{ .Render "component-card" }} {{ end }}
From b35eaf53315532a7668cd36342f72af2d4e00488 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Mon, 11 Mar 2024 12:56:13 -0700 Subject: [PATCH 0124/1491] chore(releasing): Regenerate k8s manifests for Helm chart v0.31.1 (#20060) Signed-off-by: Jesse Szwedko --- distribution/kubernetes/vector-agent/README.md | 2 +- distribution/kubernetes/vector-agent/configmap.yaml | 4 ++-- distribution/kubernetes/vector-agent/daemonset.yaml | 4 ++-- distribution/kubernetes/vector-agent/rbac.yaml | 4 ++-- distribution/kubernetes/vector-agent/service-headless.yaml | 2 +- distribution/kubernetes/vector-agent/serviceaccount.yaml | 2 +- distribution/kubernetes/vector-aggregator/README.md | 2 +- distribution/kubernetes/vector-aggregator/configmap.yaml | 2 +- .../kubernetes/vector-aggregator/service-headless.yaml | 2 +- distribution/kubernetes/vector-aggregator/service.yaml | 2 +- .../kubernetes/vector-aggregator/serviceaccount.yaml | 2 +- distribution/kubernetes/vector-aggregator/statefulset.yaml | 5 +++-- .../kubernetes/vector-stateless-aggregator/README.md | 2 +- .../kubernetes/vector-stateless-aggregator/configmap.yaml | 2 +- .../kubernetes/vector-stateless-aggregator/deployment.yaml | 5 +++-- .../vector-stateless-aggregator/service-headless.yaml | 2 +- .../kubernetes/vector-stateless-aggregator/service.yaml | 2 +- .../vector-stateless-aggregator/serviceaccount.yaml | 2 +- 18 files changed, 25 insertions(+), 23 deletions(-) diff --git a/distribution/kubernetes/vector-agent/README.md b/distribution/kubernetes/vector-agent/README.md index e16e7123167ff..305c49b19cdbb 100644 --- a/distribution/kubernetes/vector-agent/README.md +++ b/distribution/kubernetes/vector-agent/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.31.0 with the following `values.yaml`: +version 0.31.1 with the following `values.yaml`: ```yaml role: Agent diff --git a/distribution/kubernetes/vector-agent/configmap.yaml b/distribution/kubernetes/vector-agent/configmap.yaml index 6131fadb49e6d..50a0f146022bf 100644 --- a/distribution/kubernetes/vector-agent/configmap.yaml +++ b/distribution/kubernetes/vector-agent/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" data: agent.yaml: | data_dir: /vector-data-dir @@ -25,7 +25,7 @@ data: excludes: [binfmt_misc] filesystems: excludes: [binfmt_misc] - mountPoints: + mountpoints: excludes: ["*/proc/sys/fs/binfmt_misc"] type: host_metrics internal_metrics: diff --git a/distribution/kubernetes/vector-agent/daemonset.yaml b/distribution/kubernetes/vector-agent/daemonset.yaml index 26355970e0245..a14e9ff4113f0 100644 --- a/distribution/kubernetes/vector-agent/daemonset.yaml +++ b/distribution/kubernetes/vector-agent/daemonset.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" annotations: {} spec: selector: @@ -30,7 +30,7 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.36.0-distroless-libc" + image: "timberio/vector:0.36.1-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir diff --git a/distribution/kubernetes/vector-agent/rbac.yaml b/distribution/kubernetes/vector-agent/rbac.yaml index 457ddcdb500aa..0fa5e329dfbaa 100644 --- a/distribution/kubernetes/vector-agent/rbac.yaml +++ b/distribution/kubernetes/vector-agent/rbac.yaml @@ -10,7 +10,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" rules: - apiGroups: - "" @@ -31,7 +31,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole diff --git a/distribution/kubernetes/vector-agent/service-headless.yaml b/distribution/kubernetes/vector-agent/service-headless.yaml index 45b21cf3bccb4..a064605c6eafe 100644 --- a/distribution/kubernetes/vector-agent/service-headless.yaml +++ b/distribution/kubernetes/vector-agent/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-agent/serviceaccount.yaml b/distribution/kubernetes/vector-agent/serviceaccount.yaml index 0c7fd9014dcb3..45c0f65f458c4 100644 --- a/distribution/kubernetes/vector-agent/serviceaccount.yaml +++ b/distribution/kubernetes/vector-agent/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" automountServiceAccountToken: true diff --git a/distribution/kubernetes/vector-aggregator/README.md b/distribution/kubernetes/vector-aggregator/README.md index e9b192647230d..a628dd9f7a7cf 100644 --- a/distribution/kubernetes/vector-aggregator/README.md +++ b/distribution/kubernetes/vector-aggregator/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.31.0 with the following `values.yaml`: +version 0.31.1 with the following `values.yaml`: ```yaml diff --git a/distribution/kubernetes/vector-aggregator/configmap.yaml b/distribution/kubernetes/vector-aggregator/configmap.yaml index c81e8eb40126d..f63c63908af06 100644 --- a/distribution/kubernetes/vector-aggregator/configmap.yaml +++ b/distribution/kubernetes/vector-aggregator/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" data: aggregator.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-aggregator/service-headless.yaml b/distribution/kubernetes/vector-aggregator/service-headless.yaml index e4e607cf01f0d..95243260dbac0 100644 --- a/distribution/kubernetes/vector-aggregator/service-headless.yaml +++ b/distribution/kubernetes/vector-aggregator/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-aggregator/service.yaml b/distribution/kubernetes/vector-aggregator/service.yaml index 5d99f70aa1c97..9ab6f562d84b9 100644 --- a/distribution/kubernetes/vector-aggregator/service.yaml +++ b/distribution/kubernetes/vector-aggregator/service.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" annotations: spec: ports: diff --git a/distribution/kubernetes/vector-aggregator/serviceaccount.yaml b/distribution/kubernetes/vector-aggregator/serviceaccount.yaml index 2ad01a8db7a0a..75bec8b11ec83 100644 --- a/distribution/kubernetes/vector-aggregator/serviceaccount.yaml +++ b/distribution/kubernetes/vector-aggregator/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" automountServiceAccountToken: true diff --git a/distribution/kubernetes/vector-aggregator/statefulset.yaml b/distribution/kubernetes/vector-aggregator/statefulset.yaml index 3dbcb63db7975..8ddce3f40fe86 100644 --- a/distribution/kubernetes/vector-aggregator/statefulset.yaml +++ b/distribution/kubernetes/vector-aggregator/statefulset.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" annotations: {} spec: replicas: 1 @@ -18,6 +18,7 @@ spec: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator + minReadySeconds: 0 serviceName: vector-headless template: metadata: @@ -32,7 +33,7 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.36.0-distroless-libc" + image: "timberio/vector:0.36.1-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/README.md b/distribution/kubernetes/vector-stateless-aggregator/README.md index 2d013cfd7ad1e..6f23102e6b1a8 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/README.md +++ b/distribution/kubernetes/vector-stateless-aggregator/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.31.0 with the following `values.yaml`: +version 0.31.1 with the following `values.yaml`: ```yaml role: Stateless-Aggregator diff --git a/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml b/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml index 30224b72d03e7..1ef333048e261 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" data: aggregator.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml b/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml index a87b68a9bd334..ae35e1940c01a 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" annotations: {} spec: replicas: 1 @@ -17,6 +17,7 @@ spec: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator + minReadySeconds: 0 template: metadata: annotations: {} @@ -30,7 +31,7 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.36.0-distroless-libc" + image: "timberio/vector:0.36.1-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml b/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml index 7d57fe61128a0..4fa239d4a656b 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-stateless-aggregator/service.yaml b/distribution/kubernetes/vector-stateless-aggregator/service.yaml index fbcaf26d05212..e9b10fdda00f3 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/service.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/service.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" annotations: spec: ports: diff --git a/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml b/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml index 549de176d0f4d..18316cff0d047 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.36.0-distroless-libc" + app.kubernetes.io/version: "0.36.1-distroless-libc" automountServiceAccountToken: true From e9815e1f328a4ef59099c3d07918f167947c2e1f Mon Sep 17 00:00:00 2001 From: William Taylor Date: Mon, 11 Mar 2024 21:06:13 +0000 Subject: [PATCH 0125/1491] feat(platforms): Add ARMv6 builds (#19192) * feat(platforms): Add ARMv6 builds Just curious to see if this works Signed-off-by: Jesse Szwedko * add make targets Signed-off-by: Jesse Szwedko * add image overrides for new arm v6 targets protoc was previously not found due to `cross` falling back to the default build images instead of the image overlayed with protoc Remove references to debs and rpms * Seperate arch var setting in alpine dockerfile due to subshell * use filtered platforms for buildx * fix shellcheck errors --------- Signed-off-by: Jesse Szwedko Co-authored-by: Jesse Szwedko --- .github/actions/spelling/allow.txt | 2 + .github/workflows/cross.yml | 2 + .github/workflows/publish.yml | 104 +++++++++++++++++- .gitignore | 3 + Cargo.toml | 5 + Cross.toml | 6 + Makefile | 23 ++++ .../18445_armv6_binaries.enhancement.md | 3 + distribution/docker/alpine/Dockerfile | 7 +- scripts/build-docker.sh | 31 +++++- .../arm-unknown-linux-gnueabi.dockerfile | 4 + .../arm-unknown-linux-musleabi.dockerfile | 8 ++ 12 files changed, 195 insertions(+), 3 deletions(-) create mode 100644 changelog.d/18445_armv6_binaries.enhancement.md create mode 100644 scripts/cross/arm-unknown-linux-gnueabi.dockerfile create mode 100644 scripts/cross/arm-unknown-linux-musleabi.dockerfile diff --git a/.github/actions/spelling/allow.txt b/.github/actions/spelling/allow.txt index d764f60fa9e84..40cbe949e2a70 100644 --- a/.github/actions/spelling/allow.txt +++ b/.github/actions/spelling/allow.txt @@ -131,6 +131,7 @@ Moto Mpman Multilaser Mumbai +musleabi Mytab NLB Nabi @@ -307,6 +308,7 @@ gcr gcs gdpr github +gnueabi gnueabihf gnupg gnuplot diff --git a/.github/workflows/cross.yml b/.github/workflows/cross.yml index 695f79d0a9ab3..ca1d49a02f8fa 100644 --- a/.github/workflows/cross.yml +++ b/.github/workflows/cross.yml @@ -19,6 +19,8 @@ jobs: - aarch64-unknown-linux-musl - armv7-unknown-linux-gnueabihf - armv7-unknown-linux-musleabihf + - arm-unknown-linux-gnueabi + - arm-unknown-linux-musleabi steps: - name: (PR comment) Get PR branch diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index b40a11170c751..f84ff29b56a44 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -207,6 +207,61 @@ jobs: name: vector-${{ env.VECTOR_VERSION }}-armv7-unknown-linux-musleabihf path: target/artifacts/vector* + build-arm-unknown-linux-gnueabi-packages: + name: Build Vector for arm-unknown-linux-gnueabi (.tar.gz) + runs-on: [ linux, release-builder ] + timeout-minutes: 60 + needs: generate-publish-metadata + env: + VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} + VECTOR_BUILD_DESC: ${{ needs.generate-publish-metadata.outputs.vector_build_desc }} + CHANNEL: ${{ needs.generate-publish-metadata.outputs.vector_release_channel }} + steps: + - name: Checkout Vector + uses: actions/checkout@v3 + with: + ref: ${{ inputs.git_ref }} + - name: Bootstrap runner environment (Ubuntu-specific) + run: sudo -E bash scripts/environment/bootstrap-ubuntu-20.04.sh + - name: Bootstrap runner environment (generic) + run: bash scripts/environment/prepare.sh + - name: Build Vector + env: + DOCKER_PRIVILEGED: "true" + run: make package-arm-unknown-linux-gnueabi-all + - name: Stage package artifacts for publish + uses: actions/upload-artifact@v3 + with: + name: vector-${{ env.VECTOR_VERSION }}-arm-unknown-linux-gnueabi + path: target/artifacts/vector* + + build-arm-unknown-linux-musleabi-packages: + name: Build Vector for arm-unknown-linux-musleabi (.tar.gz) + runs-on: [ linux, release-builder ] + needs: generate-publish-metadata + env: + VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} + VECTOR_BUILD_DESC: ${{ needs.generate-publish-metadata.outputs.vector_build_desc }} + CHANNEL: ${{ needs.generate-publish-metadata.outputs.vector_release_channel }} + steps: + - name: Checkout Vector + uses: actions/checkout@v3 + with: + ref: ${{ inputs.git_ref }} + - name: Bootstrap runner environment (Ubuntu-specific) + run: sudo -E bash scripts/environment/bootstrap-ubuntu-20.04.sh + - name: Bootstrap runner environment (generic) + run: bash scripts/environment/prepare.sh + - name: Build Vector + env: + DOCKER_PRIVILEGED: "true" + run: make package-arm-unknown-linux-musleabi + - name: Stage package artifacts for publish + uses: actions/upload-artifact@v3 + with: + name: vector-${{ env.VECTOR_VERSION }}-arm-unknown-linux-musleabi + path: target/artifacts/vector* + build-x86_64-apple-darwin-packages: name: Build Vector for x86_64-apple-darwin (.tar.gz) runs-on: macos-latest-xl @@ -414,6 +469,8 @@ jobs: - build-x86_64-unknown-linux-musl-packages - build-armv7-unknown-linux-musleabihf-packages - build-armv7-unknown-linux-gnueabihf-packages + - build-arm-unknown-linux-gnueabi-packages + - build-arm-unknown-linux-musleabi-packages - deb-verify env: VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} @@ -469,9 +526,19 @@ jobs: with: name: vector-${{ env.VECTOR_VERSION }}-armv7-unknown-linux-musleabihf path: target/artifacts + - name: Download staged package artifacts (arm-unknown-linux-gnueabi) + uses: actions/download-artifact@v3 + with: + name: vector-${{ env.VECTOR_VERSION }}-arm-unknown-linux-gnueabi + path: target/artifacts + - name: Download staged package artifacts (arm-unknown-linux-musleabi) + uses: actions/download-artifact@v3 + with: + name: vector-${{ env.VECTOR_VERSION }}-arm-unknown-linux-musleabi + path: target/artifacts - name: Build and publish Docker images env: - PLATFORM: "linux/amd64,linux/arm64,linux/arm/v7" + PLATFORM: "linux/amd64,linux/arm64,linux/arm/v7,linux/arm/v6" run: | make release-docker @@ -489,6 +556,8 @@ jobs: - build-x86_64-pc-windows-msvc-packages - build-armv7-unknown-linux-musleabihf-packages - build-armv7-unknown-linux-gnueabihf-packages + - build-arm-unknown-linux-gnueabi-packages + - build-arm-unknown-linux-musleabi-packages - deb-verify - rpm-verify - macos-verify @@ -540,6 +609,16 @@ jobs: with: name: vector-${{ env.VECTOR_VERSION }}-armv7-unknown-linux-musleabihf path: target/artifacts + - name: Download staged package artifacts (arm-unknown-linux-gnueabi) + uses: actions/download-artifact@v3 + with: + name: vector-${{ env.VECTOR_VERSION }}-arm-unknown-linux-gnueabi + path: target/artifacts + - name: Download staged package artifacts (arm-unknown-linux-musleabi) + uses: actions/download-artifact@v3 + with: + name: vector-${{ env.VECTOR_VERSION }}-arm-unknown-linux-musleabi + path: target/artifacts - name: Publish artifacts to S3 env: AWS_ACCESS_KEY_ID: ${{ secrets.CI_AWS_ACCESS_KEY_ID }} @@ -562,6 +641,8 @@ jobs: - build-x86_64-pc-windows-msvc-packages - build-armv7-unknown-linux-gnueabihf-packages - build-armv7-unknown-linux-musleabihf-packages + - build-arm-unknown-linux-gnueabi-packages + - build-arm-unknown-linux-musleabi-packages - deb-verify - rpm-verify - macos-verify @@ -617,6 +698,15 @@ jobs: uses: actions/download-artifact@v3 with: name: vector-${{ env.VECTOR_VERSION }}-SHA256SUMS + - name: Download staged package artifacts (arm-unknown-linux-gnueabi) + uses: actions/download-artifact@v3 + with: + name: vector-${{ env.VECTOR_VERSION }}-arm-unknown-linux-gnueabi + path: target/artifacts + - name: Download staged package artifacts (arm-unknown-linux-musleabi) + uses: actions/download-artifact@v3 + with: + name: vector-${{ env.VECTOR_VERSION }}-arm-unknown-linux-musleabi path: target/artifacts - name: Publish release to GitHub env: @@ -658,6 +748,8 @@ jobs: - build-x86_64-pc-windows-msvc-packages - build-armv7-unknown-linux-gnueabihf-packages - build-armv7-unknown-linux-musleabihf-packages + - build-arm-unknown-linux-gnueabi-packages + - build-arm-unknown-linux-musleabi-packages env: VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} steps: @@ -705,6 +797,16 @@ jobs: with: name: vector-${{ env.VECTOR_VERSION }}-armv7-unknown-linux-musleabihf path: target/artifacts + - name: Download staged package artifacts (arm-unknown-linux-gnueabi) + uses: actions/download-artifact@v3 + with: + name: vector-${{ env.VECTOR_VERSION }}-arm-unknown-linux-gnueabi + path: target/artifacts + - name: Download staged package artifacts (arm-unknown-linux-musleabi) + uses: actions/download-artifact@v3 + with: + name: vector-${{ env.VECTOR_VERSION }}-arm-unknown-linux-musleabi + path: target/artifacts - name: Generate SHA256 checksums for artifacts run: make sha256sum - name: Stage checksum for publish diff --git a/.gitignore b/.gitignore index 25a85d5f6b361..24b0593d51d6c 100644 --- a/.gitignore +++ b/.gitignore @@ -19,3 +19,6 @@ massif.* # tilt tilt_modules/ + +# Jetbrains +.idea/ diff --git a/Cargo.toml b/Cargo.toml index fbabba3ad2811..e62d8215332bf 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -77,6 +77,9 @@ start = false # libc requirements are defined by `cross` # https://github.com/rust-embedded/cross#supported-targets # Though, it seems like aarch64 libc is actually 2.18 and not 2.19 +[package.metadata.deb.variants.arm-unknown-linux-gnueabi] +depends = "libc6 (>= 2.15)" + [package.metadata.deb.variants.armv7-unknown-linux-gnueabihf] depends = "libc6 (>= 2.15)" @@ -424,6 +427,8 @@ target-aarch64-unknown-linux-gnu = ["api", "api-client", "enrichment-tables", "r target-aarch64-unknown-linux-musl = ["api", "api-client", "enrichment-tables", "rdkafka?/cmake_build", "sinks", "sources", "sources-dnstap", "transforms", "unix", "enterprise"] target-armv7-unknown-linux-gnueabihf = ["api", "api-client", "enrichment-tables", "rdkafka?/cmake_build", "sinks", "sources", "sources-dnstap", "transforms", "unix", "enterprise"] target-armv7-unknown-linux-musleabihf = ["api", "api-client", "rdkafka?/cmake_build", "enrichment-tables", "sinks", "sources", "sources-dnstap", "transforms", "enterprise"] +target-arm-unknown-linux-gnueabi = ["api", "api-client", "enrichment-tables", "rdkafka?/cmake_build", "sinks", "sources", "sources-dnstap", "transforms", "unix", "enterprise"] +target-arm-unknown-linux-musleabi = ["api", "api-client", "rdkafka?/cmake_build", "enrichment-tables", "sinks", "sources", "sources-dnstap", "transforms", "enterprise"] target-x86_64-unknown-linux-gnu = ["api", "api-client", "rdkafka?/cmake_build", "enrichment-tables", "sinks", "sources", "sources-dnstap", "transforms", "unix", "rdkafka?/gssapi-vendored", "enterprise"] target-x86_64-unknown-linux-musl = ["api", "api-client", "rdkafka?/cmake_build", "enrichment-tables", "sinks", "sources", "sources-dnstap", "transforms", "unix", "enterprise"] # Does not currently build diff --git a/Cross.toml b/Cross.toml index 1b138c3638f6d..96e97d7ff64dc 100644 --- a/Cross.toml +++ b/Cross.toml @@ -28,3 +28,9 @@ image = "vector-cross-env:armv7-unknown-linux-gnueabihf" [target.armv7-unknown-linux-musleabihf] image = "vector-cross-env:armv7-unknown-linux-musleabihf" + +[target.arm-unknown-linux-gnueabi] +image = "vector-cross-env:arm-unknown-linux-gnueabi" + +[target.arm-unknown-linux-musleabi] +image = "vector-cross-env:arm-unknown-linux-musleabi" diff --git a/Makefile b/Makefile index 934e7c5eb7755..96e83bc169e13 100644 --- a/Makefile +++ b/Makefile @@ -223,6 +223,14 @@ build-armv7-unknown-linux-gnueabihf: target/armv7-unknown-linux-gnueabihf/releas build-armv7-unknown-linux-musleabihf: target/armv7-unknown-linux-musleabihf/release/vector ## Build a release binary for the armv7-unknown-linux-musleabihf triple. @echo "Output to ${<}" +.PHONY: build-arm-unknown-linux-gnueabi +build-arm-unknown-linux-gnueabi: target/arm-unknown-linux-gnueabi/release/vector ## Build a release binary for the arm-unknown-linux-gnueabi triple. + @echo "Output to ${<}" + +.PHONY: build-arm-unknown-linux-musleabi +build-arm-unknown-linux-musleabi: target/arm-unknown-linux-musleabi/release/vector ## Build a release binary for the arm-unknown-linux-musleabi triple. + @echo "Output to ${<}" + .PHONY: build-graphql-schema build-graphql-schema: ## Generate the `schema.json` for Vector's GraphQL API ${MAYBE_ENVIRONMENT_EXEC} cargo run --bin graphql-schema --no-default-features --features=default-no-api-client @@ -529,6 +537,9 @@ package-aarch64-unknown-linux-gnu-all: package-aarch64-unknown-linux-gnu package .PHONY: package-armv7-unknown-linux-gnueabihf-all package-armv7-unknown-linux-gnueabihf-all: package-armv7-unknown-linux-gnueabihf package-deb-armv7-gnu package-rpm-armv7hl-gnu # Build all armv7-unknown-linux-gnueabihf MUSL packages +.PHONY: package-arm-unknown-linux-gnueabi-all +package-arm-unknown-linux-gnueabi-all: package-arm-unknown-linux-gnueabi package-deb-arm-gnu # Build all arm-unknown-linux-gnueabihf GNU packages + .PHONY: package-x86_64-unknown-linux-gnu package-x86_64-unknown-linux-gnu: target/artifacts/vector-${VERSION}-x86_64-unknown-linux-gnu.tar.gz ## Build an archive suitable for the `x86_64-unknown-linux-gnu` triple. @echo "Output to ${<}." @@ -553,6 +564,14 @@ package-armv7-unknown-linux-gnueabihf: target/artifacts/vector-${VERSION}-armv7- package-armv7-unknown-linux-musleabihf: target/artifacts/vector-${VERSION}-armv7-unknown-linux-musleabihf.tar.gz ## Build an archive suitable for the `armv7-unknown-linux-musleabihf triple. @echo "Output to ${<}." +.PHONY: package-arm-unknown-linux-gnueabi +package-arm-unknown-linux-gnueabi: target/artifacts/vector-${VERSION}-arm-unknown-linux-gnueabi.tar.gz ## Build an archive suitable for the `arm-unknown-linux-gnueabi` triple. + @echo "Output to ${<}." + +.PHONY: package-arm-unknown-linux-musleabi +package-arm-unknown-linux-musleabi: target/artifacts/vector-${VERSION}-arm-unknown-linux-musleabi.tar.gz ## Build an archive suitable for the `arm-unknown-linux-musleabi` triple. + @echo "Output to ${<}." + # debs .PHONY: package-deb-x86_64-unknown-linux-gnu @@ -571,6 +590,10 @@ package-deb-aarch64: package-aarch64-unknown-linux-gnu ## Build the aarch64 deb package-deb-armv7-gnu: package-armv7-unknown-linux-gnueabihf ## Build the armv7-unknown-linux-gnueabihf deb package $(CONTAINER_TOOL) run -v $(PWD):/git/vectordotdev/vector/ -e TARGET=armv7-unknown-linux-gnueabihf -e VECTOR_VERSION $(ENVIRONMENT_UPSTREAM) cargo vdev package deb +.PHONY: package-deb-arm-gnu +package-deb-arm-gnu: package-arm-unknown-linux-gnueabi ## Build the arm-unknown-linux-gnueabi deb package + $(CONTAINER_TOOL) run -v $(PWD):/git/vectordotdev/vector/ -e TARGET=arm-unknown-linux-gnueabi -e VECTOR_VERSION $(ENVIRONMENT_UPSTREAM) cargo vdev package deb + # rpms .PHONY: package-rpm-x86_64-unknown-linux-gnu diff --git a/changelog.d/18445_armv6_binaries.enhancement.md b/changelog.d/18445_armv6_binaries.enhancement.md new file mode 100644 index 0000000000000..5b92362cc711c --- /dev/null +++ b/changelog.d/18445_armv6_binaries.enhancement.md @@ -0,0 +1,3 @@ +ARMv6 builds are now provided as binaries, `.deb` archives and container images (alpine and debian). + +authors: wtaylor diff --git a/distribution/docker/alpine/Dockerfile b/distribution/docker/alpine/Dockerfile index 4201c8d54861c..ccdfdae4fff64 100644 --- a/distribution/docker/alpine/Dockerfile +++ b/distribution/docker/alpine/Dockerfile @@ -2,8 +2,13 @@ FROM docker.io/alpine:3.19 AS builder WORKDIR /vector +ARG TARGETPLATFORM + COPY vector-*-unknown-linux-musl*.tar.gz ./ -RUN tar -xvf vector-0*-"$(cat /etc/apk/arch)"-unknown-linux-musl*.tar.gz --strip-components=2 + +# special case for arm v6 builds, /etc/apk/arch reports armhf which conflicts with the armv7 package +RUN ARCH=$(if [ "$TARGETPLATFORM" = "linux/arm/v6" ]; then echo "arm"; else cat /etc/apk/arch; fi) \ + && tar -xvf vector-0*-"$ARCH"-unknown-linux-musl*.tar.gz --strip-components=2 RUN mkdir -p /var/lib/vector diff --git a/scripts/build-docker.sh b/scripts/build-docker.sh index 7c97470950995..14a53e4a9722a 100755 --- a/scripts/build-docker.sh +++ b/scripts/build-docker.sh @@ -17,10 +17,36 @@ PLATFORM="${PLATFORM:-}" PUSH="${PUSH:-"true"}" REPO="${REPO:-"timberio/vector"}" +IFS=, read -ra REQUESTED_PLATFORMS <<< "$PLATFORM" +declare -A SUPPORTED_PLATFORMS=( + [debian]="linux/amd64,linux/arm/v6,linux/arm/v7,linux/arm64/v8" + [alpine]="linux/amd64,linux/arm/v6,linux/arm/v7,linux/arm64/v8" + [distroless-static]="linux/amd64,linux/arm/v7,linux/arm64/v8" + [distroless-libc]="linux/amd64,linux/arm/v7,linux/arm64/v8" +) + # # Functions # +evaluate_supported_platforms_for_base() { + local BASE="$1" + IFS=, read -ra SUPPORTED_PLATFORMS_FOR_BASE <<< "${SUPPORTED_PLATFORMS["$BASE"]}" + + local BUILDABLE_PLATFORMS="" + for platform in "${REQUESTED_PLATFORMS[@]}" + do + if [[ ${SUPPORTED_PLATFORMS_FOR_BASE[*]} =~ $platform ]] + then + BUILDABLE_PLATFORMS+="$platform," + else + >&2 echo "WARN: skipping $platform for $BASE, no base image for platform" + fi + done + + echo "${BUILDABLE_PLATFORMS%?}" +} + build() { local BASE="$1" local VERSION="$2" @@ -34,8 +60,11 @@ build() { ARGS+=(--push) fi + local BUILDABLE_PLATFORMS + BUILDABLE_PLATFORMS=$(evaluate_supported_platforms_for_base "$BASE") + docker buildx build \ - --platform="$PLATFORM" \ + --platform="$BUILDABLE_PLATFORMS" \ --tag "$TAG" \ target/artifacts \ -f "$DOCKERFILE" \ diff --git a/scripts/cross/arm-unknown-linux-gnueabi.dockerfile b/scripts/cross/arm-unknown-linux-gnueabi.dockerfile new file mode 100644 index 0000000000000..fa728d6f4a4b1 --- /dev/null +++ b/scripts/cross/arm-unknown-linux-gnueabi.dockerfile @@ -0,0 +1,4 @@ +FROM ghcr.io/cross-rs/arm-unknown-linux-gnueabi:0.2.5 + +COPY scripts/cross/bootstrap-ubuntu.sh scripts/environment/install-protoc.sh / +RUN /bootstrap-ubuntu.sh && bash /install-protoc.sh diff --git a/scripts/cross/arm-unknown-linux-musleabi.dockerfile b/scripts/cross/arm-unknown-linux-musleabi.dockerfile new file mode 100644 index 0000000000000..ad80b289a8b28 --- /dev/null +++ b/scripts/cross/arm-unknown-linux-musleabi.dockerfile @@ -0,0 +1,8 @@ +FROM ghcr.io/cross-rs/arm-unknown-linux-musleabi:0.2.5 + +COPY scripts/cross/bootstrap-ubuntu.sh scripts/environment/install-protoc.sh / +RUN /bootstrap-ubuntu.sh && bash /install-protoc.sh + +# Stick `libstdc++` somewhere it can be found other than it's normal location, otherwise we end up using the wrong version +# of _other_ libraries, which ultimately just breaks linking. We'll set `/lib/native-libs` as a search path in `.cargo/config.toml`. +RUN mkdir -p /lib/native-libs && cp /usr/local/arm-linux-musleabi/lib/libstdc++.a /lib/native-libs/ From 38acf37f1d5d33f46af93f24034475e450f04b29 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Mon, 11 Mar 2024 14:06:24 -0700 Subject: [PATCH 0126/1491] chore(docs): Update banner to use past tense for repository decommissioning (#20059) Signed-off-by: Jesse Szwedko --- website/layouts/partials/banner.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/layouts/partials/banner.html b/website/layouts/partials/banner.html index 0ebd7e595b92b..7ed0a001eb79e 100644 --- a/website/layouts/partials/banner.html +++ b/website/layouts/partials/banner.html @@ -3,7 +3,7 @@

- APT and RPM repositories at repositories.timber.io will be decommissioned on February 28th + APT and RPM repositories at repositories.timber.io have been decommissioned. See Migration instructions From 4804e1745170dab2075fe6ef27534d57033ec2f7 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Mon, 11 Mar 2024 15:30:26 -0700 Subject: [PATCH 0127/1491] fix(docs): Use `component_kind` rather than `kind` in templates (#20063) I missed a few spots in https://github.com/vectordotdev/vector/pull/20058 Signed-off-by: Jesse Szwedko --- website/layouts/_default/component-card-selectable.html | 4 ++-- website/layouts/_default/component-card.html | 2 +- website/layouts/partials/meta.html | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/website/layouts/_default/component-card-selectable.html b/website/layouts/_default/component-card-selectable.html index bdbf99bd7f373..daa24d1b249d2 100644 --- a/website/layouts/_default/component-card-selectable.html +++ b/website/layouts/_default/component-card-selectable.html @@ -16,7 +16,7 @@

{{ end }}
- + \ No newline at end of file diff --git a/website/layouts/_default/component-card.html b/website/layouts/_default/component-card.html index d227fb3b3d14e..6fd9959ddca04 100644 --- a/website/layouts/_default/component-card.html +++ b/website/layouts/_default/component-card.html @@ -1,6 +1,6 @@ {{ $title := .Params.short | default .Title }} {{ $componentTag := .File.BaseFileName }} -{{ $kind := .Params.kind }} +{{ $kind := .Params.component_kind }}
diff --git a/website/layouts/partials/meta.html b/website/layouts/partials/meta.html index 2a6cd8553f131..7d654646ed1d8 100644 --- a/website/layouts/partials/meta.html +++ b/website/layouts/partials/meta.html @@ -5,7 +5,7 @@ {{ $img := site.Params.site_logo | absURL }} {{ $imgAlt := printf "Logo for %s" site.Title }} {{ $twitter := printf "@%s" site.Params.social.twitter_handle }} -{{ $title := cond (eq .Layout "component") (printf "%s %s" .Title .Params.kind) .Title }} +{{ $title := cond (eq .Layout "component") (printf "%s %s" .Title .Params.component_kind) .Title }} From f0d3037541b99bfcebfabdb1796200992f0747a8 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 12 Mar 2024 08:15:19 -0700 Subject: [PATCH 0128/1491] chore(ci): Default env vars for enterprise_http_to_http regression case (#20073) To unblock making unset env vars an error per https://github.com/vectordotdev/vector/pull/20062 Signed-off-by: Jesse Szwedko --- regression/cases/enterprise_http_to_http/vector/vector.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/regression/cases/enterprise_http_to_http/vector/vector.yaml b/regression/cases/enterprise_http_to_http/vector/vector.yaml index 3e9784c339ffa..c092bdd84fdc9 100644 --- a/regression/cases/enterprise_http_to_http/vector/vector.yaml +++ b/regression/cases/enterprise_http_to_http/vector/vector.yaml @@ -4,8 +4,8 @@ data_dir: "/var/lib/vector" ## Enterprise ## enterprise: - api_key: "${DD_API_KEY}" - configuration_key: "${DD_CONFIGURATION_KEY}" + api_key: "${DD_API_KEY-}" + configuration_key: "${DD_CONFIGURATION_KEY-}" endpoint: "http://localhost:8080" ## From a7c3dbc453dc63dd4499b8f0c3dce15f16839f46 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 12 Mar 2024 09:45:52 -0700 Subject: [PATCH 0129/1491] chore(cli)!: Update default for --strict-env-vars to true (#20062) * chore(cli)!: Update default for --strict-env-vars to true This deprecates this option. Signed-off-by: Jesse Szwedko * Allow setting --strict-env-vars=false Signed-off-by: Jesse Szwedko * Add note for how to opt into old behavior to changelog Signed-off-by: Jesse Szwedko --------- Signed-off-by: Jesse Szwedko --- .../deprecate_strict_env_vars.breaking.md | 4 ++++ docs/DEPRECATIONS.md | 6 +++--- src/cli.rs | 19 ++++++++++++++----- website/cue/reference/cli.cue | 8 ++++++-- 4 files changed, 27 insertions(+), 10 deletions(-) create mode 100644 changelog.d/deprecate_strict_env_vars.breaking.md diff --git a/changelog.d/deprecate_strict_env_vars.breaking.md b/changelog.d/deprecate_strict_env_vars.breaking.md new file mode 100644 index 0000000000000..5b66ffeae9e38 --- /dev/null +++ b/changelog.d/deprecate_strict_env_vars.breaking.md @@ -0,0 +1,4 @@ +The default of `--strict-env-vars` has been changed to `true`. This option has been deprecated. In +a future version it will be removed and Vector will have the behavior it currently has when set +to `true` which is that missing environment variables will cause Vector to fail to start up with an +error instead of a warning. Set `--strict-env-vars=false` to opt into deprecated behavior. diff --git a/docs/DEPRECATIONS.md b/docs/DEPRECATIONS.md index 019a262d706bd..a18ed62e132a8 100644 --- a/docs/DEPRECATIONS.md +++ b/docs/DEPRECATIONS.md @@ -16,7 +16,7 @@ For example: ## To be migrated -- v0.37.0 strict_env_vars Change the default for missing environment variable interpolation from - warning to erroring. - ## To be removed + +- v0.38.0 strict_env_vars Remove option for configuring missing environment variable interpolation + to be a warning rather than an error diff --git a/src/cli.rs b/src/cli.rs index 441cce9f720df..c1092d3cab996 100644 --- a/src/cli.rs +++ b/src/cli.rs @@ -213,11 +213,20 @@ pub struct RootOpts { #[arg(long, env = "VECTOR_ALLOW_EMPTY_CONFIG", default_value = "false")] pub allow_empty_config: bool, - /// Turn on strict mode for environment variable interpolation. When set, interpolation of a - /// missing environment variable in configuration files will cause an error instead of a - /// warning, which will result in a failure to load any such configuration file. This defaults - /// to false, but that default is deprecated and will be changed to strict in future versions. - #[arg(long, env = "VECTOR_STRICT_ENV_VARS", default_value = "false")] + /// Turn on strict mode for environment variable interpolation. When set, interpolation of + /// a missing environment variable in configuration files will cause an error instead of + /// a warning, which will result in a failure to load any such configuration file. This option + /// is deprecated and will be removed in a future version to remove the ability to downgrade + /// missing environment variables to warnings. + #[arg( + long, + env = "VECTOR_STRICT_ENV_VARS", + default_value = "true", + default_missing_value = "true", + num_args = 0..=1, + require_equals = true, + action = ArgAction::Set + )] pub strict_env_vars: bool, } diff --git a/website/cue/reference/cli.cue b/website/cue/reference/cli.cue index 33b5ef448e062..8e839a32ec40d 100644 --- a/website/cue/reference/cli.cue +++ b/website/cue/reference/cli.cue @@ -658,9 +658,13 @@ cli: { } VECTOR_STRICT_ENV_VARS: { description: """ - Turn on strict mode for environment variable interpolation. When set, interpolation of a missing environment variable in configuration files will cause an error instead of a warning, which will result in a failure to load any such configuration file. This defaults to false, but that default is deprecated and will be changed to strict in future versions. + Turn on strict mode for environment variable interpolation. When set, interpolation of a missing + environment variable in configuration files will cause an error instead of a warning, which will + result in a failure to load any such configuration file. This option is deprecated and will be + removed in a future version to remove the ability to downgrade missing environment variables to + warnings. """ - type: bool: default: false + type: bool: default: true } } From 6a6c159da14b441df6dde0a3a9997a787910087a Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 12 Mar 2024 14:23:34 -0700 Subject: [PATCH 0130/1491] chore(dev): Update changelog generation script to handle authors and whitespace (#20075) * chore(dev): Update changelog generation script to handle authors and whitespace This: - Assumes contributors are space delimited, as we have been doing in practice - Normalizes the description field so that it always terminates in a single newline. Previously, if there were contributors, it would have an extra blank line Signed-off-by: Jesse Szwedko * Update fragment checker to validate authors Signed-off-by: Jesse Szwedko * Remove accidentally committed file Signed-off-by: Jesse Szwedko --------- Signed-off-by: Jesse Szwedko --- changelog.d/README.md | 2 +- scripts/check_changelog_fragments.sh | 5 ++++- scripts/generate-release-cue.rb | 9 +++------ 3 files changed, 8 insertions(+), 8 deletions(-) diff --git a/changelog.d/README.md b/changelog.d/README.md index d98be4201011a..f7e6f9b27e8e9 100644 --- a/changelog.d/README.md +++ b/changelog.d/README.md @@ -74,7 +74,7 @@ the authors specified. The process for adding this is simply to have the last line of the file be in this format: - authors: , , <...> + authors: <...> Do not include a leading `@` when specifying your username. diff --git a/scripts/check_changelog_fragments.sh b/scripts/check_changelog_fragments.sh index cc9b8bb33b429..0cb2718d4764a 100755 --- a/scripts/check_changelog_fragments.sh +++ b/scripts/check_changelog_fragments.sh @@ -61,9 +61,12 @@ while IFS= read -r fname; do # used for external contributor PRs. if [[ $1 == "--authors" ]]; then last=$( tail -n 1 "${CHANGELOG_DIR}/${fname}" ) - if [[ "${last}" =~ ^(authors: @.*)$ ]]; then + if [[ "${last}" == "authors: "*@* ]]; then echo "invalid fragment contents: author should not be prefixed with @" exit 1 + elif [[ "${last}" == "authors: "*,* ]]; then + echo "invalid fragment contents: authors should be space delimited, not comma delimited." + exit 1 elif ! [[ "${last}" =~ ^(authors: .*)$ ]]; then echo "invalid fragment contents: author option was specified but fragment ${fname} contains no authors." exit 1 diff --git a/scripts/generate-release-cue.rb b/scripts/generate-release-cue.rb index c152b86d1e249..c308f44f8bb43 100755 --- a/scripts/generate-release-cue.rb +++ b/scripts/generate-release-cue.rb @@ -145,16 +145,13 @@ def generate_changelog!(new_version) contributors = Array.new if last.start_with?("authors: ") - authors_str = last[9..] - authors_str = authors_str.delete(" \t\r\n") - authors_arr = authors_str.split(",") - authors_arr.each { |author| contributors.push(author) } + contributors = last[9..].split(" ").map(&:strip) # remove that line from the description lines.pop() end - description = lines.join("") + description = lines.join("").strip() # get the PR number of the changelog fragment. # the fragment type is not used in the Vector release currently. @@ -195,7 +192,7 @@ def generate_changelog!(new_version) entry = "{\n" + "type: #{type.to_json}\n" + "description: \"\"\"\n" + - "#{description}" + + "#{description}\n" + "\"\"\"\n" if contributors.length() > 0 From 52d72dae521be48260c82a5e9fdb9ef81629e24c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 22:09:53 +0000 Subject: [PATCH 0131/1491] chore(ci): Bump docker/build-push-action from 5.1.0 to 5.2.0 (#20057) Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 5.1.0 to 5.2.0. - [Release notes](https://github.com/docker/build-push-action/releases) - [Commits](https://github.com/docker/build-push-action/compare/v5.1.0...v5.2.0) --- updated-dependencies: - dependency-name: docker/build-push-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/environment.yml | 2 +- .github/workflows/regression.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/environment.yml b/.github/workflows/environment.yml index 3e7f0b6d1cb6d..133089d2f41f5 100644 --- a/.github/workflows/environment.yml +++ b/.github/workflows/environment.yml @@ -64,7 +64,7 @@ jobs: org.opencontainers.image.title=Vector development environment org.opencontainers.image.url=https://github.com/vectordotdev/vector - name: Build and push - uses: docker/build-push-action@v5.1.0 + uses: docker/build-push-action@v5.2.0 with: context: . file: ./scripts/environment/Dockerfile diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 82709290f002b..35fce84b6a240 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -306,7 +306,7 @@ jobs: uses: docker/setup-buildx-action@v3.1.0 - name: Build 'vector' target image - uses: docker/build-push-action@v5.1.0 + uses: docker/build-push-action@v5.2.0 with: context: baseline-vector/ cache-from: type=gha @@ -344,7 +344,7 @@ jobs: uses: docker/setup-buildx-action@v3.1.0 - name: Build 'vector' target image - uses: docker/build-push-action@v5.1.0 + uses: docker/build-push-action@v5.2.0 with: context: comparison-vector/ cache-from: type=gha From bcc6e40862ee16f4cec75b8f752c54a399bd6cbc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 22:10:06 +0000 Subject: [PATCH 0132/1491] chore(deps): Bump toml from 0.8.10 to 0.8.11 (#20067) Bumps [toml](https://github.com/toml-rs/toml) from 0.8.10 to 0.8.11. - [Commits](https://github.com/toml-rs/toml/compare/toml-v0.8.10...toml-v0.8.11) --- updated-dependencies: - dependency-name: toml dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 25 +++++++++++++++++-------- Cargo.toml | 2 +- 2 files changed, 18 insertions(+), 9 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c2626d1fd56a1..143b86de91c6e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9296,14 +9296,14 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.10" +version = "0.8.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9a9aad4a3066010876e8dcf5a8a06e70a558751117a145c6ce2b82c2e2054290" +checksum = "af06656561d28735e9c1cd63dfd57132c8155426aa6af24f36a00a351f88c48e" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.4", + "toml_edit 0.22.7", ] [[package]] @@ -9323,7 +9323,7 @@ checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ "indexmap 2.2.5", "toml_datetime", - "winnow", + "winnow 0.5.18", ] [[package]] @@ -9334,20 +9334,20 @@ checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" dependencies = [ "indexmap 2.2.5", "toml_datetime", - "winnow", + "winnow 0.5.18", ] [[package]] name = "toml_edit" -version = "0.22.4" +version = "0.22.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c9ffdf896f8daaabf9b66ba8e77ea1ed5ed0f72821b398aba62352e95062951" +checksum = "18769cd1cec395d70860ceb4d932812a0b4d06b1a4bb336745a4d21b9496e992" dependencies = [ "indexmap 2.2.5", "serde", "serde_spanned", "toml_datetime", - "winnow", + "winnow 0.6.5", ] [[package]] @@ -11157,6 +11157,15 @@ dependencies = [ "memchr", ] +[[package]] +name = "winnow" +version = "0.6.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dffa400e67ed5a4dd237983829e66475f0a4a26938c4b04c21baede6262215b8" +dependencies = [ + "memchr", +] + [[package]] name = "winreg" version = "0.50.0" diff --git a/Cargo.toml b/Cargo.toml index e62d8215332bf..91c37117ace86 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -137,7 +137,7 @@ proptest = "1.4" proptest-derive = "0.4.0" serde_json = { version = "1.0.114", default-features = false, features = ["raw_value", "std"] } serde = { version = "1.0.197", default-features = false, features = ["alloc", "derive", "rc"] } -toml = { version = "0.8.10", default-features = false, features = ["display", "parse"] } +toml = { version = "0.8.11", default-features = false, features = ["display", "parse"] } vrl = { version = "0.12.0", features = ["arbitrary", "cli", "test", "test_framework"] } [dependencies] From 98df316fedbdffcf475b3ca9c51ab5ad4bdaa1ae Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 22:10:19 +0000 Subject: [PATCH 0133/1491] chore(deps): Bump serde_with from 3.6.1 to 3.7.0 (#20068) Bumps [serde_with](https://github.com/jonasbb/serde_with) from 3.6.1 to 3.7.0. - [Release notes](https://github.com/jonasbb/serde_with/releases) - [Commits](https://github.com/jonasbb/serde_with/compare/v3.6.1...v3.7.0) --- updated-dependencies: - dependency-name: serde_with dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 20 ++++++++++---------- Cargo.toml | 2 +- lib/vector-config/Cargo.toml | 4 ++-- lib/vector-core/Cargo.toml | 2 +- 4 files changed, 14 insertions(+), 14 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 143b86de91c6e..2de26b027f987 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1580,7 +1580,7 @@ dependencies = [ "chrono", "serde", "serde_repr", - "serde_with 3.6.1", + "serde_with 3.7.0", ] [[package]] @@ -6019,7 +6019,7 @@ dependencies = [ "serde_json", "serde_path_to_error", "serde_plain", - "serde_with 3.6.1", + "serde_with 3.7.0", "sha2", "subtle", "thiserror", @@ -8262,9 +8262,9 @@ dependencies = [ [[package]] name = "serde_with" -version = "3.6.1" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "15d167997bd841ec232f5b2b8e0e26606df2e7caa4c31b95ea9ca52b200bd270" +checksum = "ee80b0e361bbf88fd2f6e242ccd19cfda072cb0faa6ae694ecee08199938569a" dependencies = [ "base64 0.21.7", "chrono", @@ -8274,7 +8274,7 @@ dependencies = [ "serde", "serde_derive", "serde_json", - "serde_with_macros 3.6.1", + "serde_with_macros 3.7.0", "time", ] @@ -8292,9 +8292,9 @@ dependencies = [ [[package]] name = "serde_with_macros" -version = "3.6.1" +version = "3.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "865f9743393e638991566a8b7a479043c2c8da94a33e0a31f18214c9cae0a64d" +checksum = "6561dc161a9224638a31d876ccdfefbc1df91d3f3a8342eddb35f055d48c7655" dependencies = [ "darling 0.20.8", "proc-macro2 1.0.78", @@ -10149,7 +10149,7 @@ dependencies = [ "serde-toml-merge", "serde_bytes", "serde_json", - "serde_with 3.6.1", + "serde_with 3.7.0", "serde_yaml 0.9.32", "sha2", "similar-asserts", @@ -10310,7 +10310,7 @@ dependencies = [ "num-traits", "serde", "serde_json", - "serde_with 3.6.1", + "serde_with 3.7.0", "snafu 0.7.5", "toml", "tracing 0.1.40", @@ -10404,7 +10404,7 @@ dependencies = [ "security-framework", "serde", "serde_json", - "serde_with 3.6.1", + "serde_with 3.7.0", "serde_yaml 0.9.32", "similar-asserts", "smallvec", diff --git a/Cargo.toml b/Cargo.toml index 91c37117ace86..ee2ea1bce7e84 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -215,7 +215,7 @@ serde.workspace = true serde-toml-merge = { version = "0.3.4", default-features = false } serde_bytes = { version = "0.11.14", default-features = false, features = ["std"], optional = true } serde_json.workspace = true -serde_with = { version = "3.6.1", default-features = false, features = ["macros", "std"] } +serde_with = { version = "3.7.0", default-features = false, features = ["macros", "std"] } serde_yaml = { version = "0.9.32", default-features = false } # Messagepack diff --git a/lib/vector-config/Cargo.toml b/lib/vector-config/Cargo.toml index a1a84348d4487..5ebde30a79cbc 100644 --- a/lib/vector-config/Cargo.toml +++ b/lib/vector-config/Cargo.toml @@ -20,7 +20,7 @@ no-proxy = { version = "0.3.4", default-features = false, features = ["serialize num-traits = { version = "0.2.18", default-features = false } serde.workspace = true serde_json.workspace = true -serde_with = { version = "3.6.1", default-features = false, features = ["std"] } +serde_with = { version = "3.7.0", default-features = false, features = ["std"] } snafu = { version = "0.7.5", default-features = false } toml.workspace = true tracing = { version = "0.1.34", default-features = false } @@ -32,5 +32,5 @@ vector-config-macros = { path = "../vector-config-macros" } [dev-dependencies] assert-json-diff = { version = "2", default-features = false } -serde_with = { version = "3.6.1", default-features = false, features = ["std", "macros"] } +serde_with = { version = "3.7.0", default-features = false, features = ["std", "macros"] } vector-core = { path = "../vector-core", default-features = false, features = ["test"] } diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index 9d427d75d54e5..380286f3319b2 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -43,7 +43,7 @@ regex = { version = "1.10.3", default-features = false, features = ["std", "perf ryu = { version = "1", default-features = false } serde.workspace = true serde_json.workspace = true -serde_with = { version = "3.6.1", default-features = false, features = ["std", "macros"] } +serde_with = { version = "3.7.0", default-features = false, features = ["std", "macros"] } smallvec = { version = "1", default-features = false, features = ["serde", "const_generics"] } snafu = { version = "0.7.5", default-features = false } socket2 = { version = "0.5.6", default-features = false } From 34d3aa5b23b859d0e9e0c566c2ae3ec5bf79ceca Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 22:10:32 +0000 Subject: [PATCH 0134/1491] chore(deps): Bump thiserror from 1.0.57 to 1.0.58 (#20069) Bumps [thiserror](https://github.com/dtolnay/thiserror) from 1.0.57 to 1.0.58. - [Release notes](https://github.com/dtolnay/thiserror/releases) - [Commits](https://github.com/dtolnay/thiserror/compare/1.0.57...1.0.58) --- updated-dependencies: - dependency-name: thiserror dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2de26b027f987..c69375237599e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8995,18 +8995,18 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.57" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e45bcbe8ed29775f228095caf2cd67af7a4ccf756ebff23a306bf3e8b47b24b" +checksum = "03468839009160513471e86a034bb2c5c0e4baae3b43f79ffc55c4a5427b3297" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.57" +version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a953cb265bef375dae3de6663da4d3804eee9682ea80d8e2542529b73c531c81" +checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ "proc-macro2 1.0.78", "quote 1.0.35", From 8811e218d9d691d0d5e600d0cd2cd50cacb02c0a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 12 Mar 2024 22:10:42 +0000 Subject: [PATCH 0135/1491] chore(deps): Bump proc-macro2 from 1.0.78 to 1.0.79 (#20070) Bumps [proc-macro2](https://github.com/dtolnay/proc-macro2) from 1.0.78 to 1.0.79. - [Release notes](https://github.com/dtolnay/proc-macro2/releases) - [Commits](https://github.com/dtolnay/proc-macro2/compare/1.0.78...1.0.79) --- updated-dependencies: - dependency-name: proc-macro2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 142 ++++++++++++++++++++++++++--------------------------- 1 file changed, 71 insertions(+), 71 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c69375237599e..a1c1041944f90 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -465,7 +465,7 @@ dependencies = [ "async-graphql-parser", "darling 0.20.8", "proc-macro-crate 1.3.1", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "strum 0.25.0", "syn 2.0.52", @@ -649,7 +649,7 @@ version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -689,7 +689,7 @@ version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -706,7 +706,7 @@ version = "0.1.77" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -1601,7 +1601,7 @@ checksum = "f404657a7ea7b5249e36808dff544bc88a28f26e0ac40009f674b7a009d14be3" dependencies = [ "once_cell", "proc-macro-crate 2.0.0", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", "syn_derive", @@ -1673,7 +1673,7 @@ version = "0.6.11" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7ec4c6f261935ad534c0c22dbef2201b45918860eb1c574b972bd213a76af61" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -1741,7 +1741,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ad9f16c0d84de31a2ab7fdf5f7783c14631f7075cf464eb3bb43119f61c9cb2a" dependencies = [ "darling 0.14.4", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -2033,7 +2033,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -2561,7 +2561,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -2604,7 +2604,7 @@ checksum = "859d65a907b6852c9361e3185c862aae7fafd2887876799fa55f5f99dc40d610" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "strsim 0.10.0", "syn 1.0.109", @@ -2618,7 +2618,7 @@ checksum = "109c1ca6e6b7f82cc233a97004ea8ed7ca123a9af07a8230878fcfda9b158bf0" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "strsim 0.10.0", "syn 1.0.109", @@ -2632,7 +2632,7 @@ checksum = "9c2cf1c23a687a1feeb728783b993c4e1ad83d99f351801977dd809b48d0a70f" dependencies = [ "fnv", "ident_case", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "strsim 0.10.0", "syn 2.0.52", @@ -2760,7 +2760,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fcc3dd5e9e9c0b295d6e1e4d811fb6f157d5ffd784b8d202fc62eac8035a770b" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -2771,7 +2771,7 @@ version = "1.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -2783,7 +2783,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ "convert_case 0.4.0", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "rustc_version 0.4.0", "syn 1.0.109", @@ -3040,7 +3040,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "21cdad81446a7f7dc43f6a77409efeb9733d2fa65553efef6018ef257c959b73" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -3052,7 +3052,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -3064,7 +3064,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f33313078bb8d4d05a2733a94ac4c2d8a0df9a2b84424ebf4f33bfc224a890e" dependencies = [ "once_cell", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -3084,7 +3084,7 @@ version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5c785274071b1b420972453b306eeca06acf4633829db4223b58a2a8c5953bc4" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -3486,7 +3486,7 @@ version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -3665,7 +3665,7 @@ dependencies = [ "graphql-parser", "heck 0.4.1", "lazy_static", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "serde", "serde_json", @@ -3679,7 +3679,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "00bda454f3d313f909298f626115092d348bc231025699f557b27e248475f48c" dependencies = [ "graphql_client_codegen", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "syn 1.0.109", ] @@ -5277,7 +5277,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -5396,7 +5396,7 @@ dependencies = [ "itertools 0.12.1", "once_cell", "proc-macro-error", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "regex", "syn 2.0.52", @@ -5835,7 +5835,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dcbff9bc912032c62bf65ef1d5aea88983b420f4f839db1e9b0c281a25c9c799" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -5847,7 +5847,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96667db765a921f7b295ffee8b60472b686a51d4f21c2ee4ffdb94c7013b65a6" dependencies = [ "proc-macro-crate 1.3.1", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -5859,7 +5859,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "681030a937600a36906c185595136d26abfebb4aa9c65701cefcaf8578bb982b" dependencies = [ "proc-macro-crate 2.0.0", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -6047,7 +6047,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -6332,7 +6332,7 @@ checksum = "1381c29a877c6d34b8c176e734f35d7f7f5b3adaefe940cb4d1bb7af94678e2e" dependencies = [ "pest", "pest_meta", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -6420,7 +6420,7 @@ version = "1.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -6688,7 +6688,7 @@ version = "0.1.25" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "syn 1.0.109", ] @@ -6698,7 +6698,7 @@ version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "syn 2.0.52", ] @@ -6751,7 +6751,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c" dependencies = [ "proc-macro-error-attr", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", "version_check", @@ -6763,7 +6763,7 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "version_check", ] @@ -6791,9 +6791,9 @@ dependencies = [ [[package]] name = "proc-macro2" -version = "1.0.78" +version = "1.0.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e2422ad645d89c99f8f3e6b88a9fdeca7fabeac836b1002371c4367c8f984aae" +checksum = "e835ff2298f5721608eb1a980ecaee1aef2c132bf95ecc026a11b7bf3c01c02e" dependencies = [ "unicode-ident", ] @@ -6838,7 +6838,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9cf16337405ca084e9c78985114633b6827711d22b9e6ef6c6c0d665eb3f0b6e" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -6915,7 +6915,7 @@ checksum = "e5d2d8d10f3c6ded6da8b05b5fb3b8a5082514344d56c9f871412d29b4e075b4" dependencies = [ "anyhow", "itertools 0.10.5", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -6928,7 +6928,7 @@ checksum = "efb6c9a1dd1def8e2124d17e83a20af56f1570d6c2d2bd9e266ccb768df3840e" dependencies = [ "anyhow", "itertools 0.11.0", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -6995,7 +6995,7 @@ version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "16b845dbfca988fa33db069c0e230574d15a3088f147a87b64c7589eb662c9ac" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -7109,7 +7109,7 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b22a693222d716a9587786f37ac3f6b4faedb5b80c23914e7303ff5a1d8016e9" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -7129,7 +7129,7 @@ version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "291ec9ab5efd934aaf503a6466c5d5251535d108ee747472c3977cc5acc868ef" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", ] [[package]] @@ -7602,7 +7602,7 @@ version = "0.7.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a7dddfff8de25e6f62b9d64e6e432bf1c6736c57d20323e15ee10435fbda7c65" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -7699,7 +7699,7 @@ checksum = "d428f8247852f894ee1be110b375111b586d4fa431f6c46e64ba5a0dcccbe605" dependencies = [ "cfg-if", "glob", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "regex", "relative-path", @@ -8151,7 +8151,7 @@ version = "1.0.197" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -8162,7 +8162,7 @@ version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "330f01ce65a3a5fe59a60c82f3c9a024b573b8a6e875bd233fe5f934e71d54e3" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -8224,7 +8224,7 @@ version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -8285,7 +8285,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e182d6ec6f05393cc0e5ed1bf81ad6db3a8feedf8ee515ecdd369809bcce8082" dependencies = [ "darling 0.13.4", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -8297,7 +8297,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6561dc161a9224638a31d876ccdfefbc1df91d3f3a8342eddb35f055d48c7655" dependencies = [ "darling 0.20.8", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -8564,7 +8564,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "990079665f075b699031e9c08fd3ab99be5029b96f3b78dc0709e8f77e4efebf" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -8576,7 +8576,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "080c44971436b1af15d6f61ddd8b543995cf63ab8e677d46b00cc06f4ef267a0" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -8735,7 +8735,7 @@ checksum = "dcb5ae327f9cc13b68763b5749770cb9e048a99bd9dfdfa58d0cf05d5f64afe0" dependencies = [ "heck 0.3.3", "proc-macro-error", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -8765,7 +8765,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "23dc1fa9ac9c169a78ba62f0b841814b7abae11bdd047b9c58f893439e309ea0" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "rustversion", "syn 2.0.52", @@ -8778,7 +8778,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7a3417fc93d76740d974a01654a09777cb500428cc874ca9f45edfe0c4d4cd18" dependencies = [ "heck 0.4.1", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "rustversion", "syn 2.0.52", @@ -8817,7 +8817,7 @@ version = "1.0.109" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b64191b275b66ffe2469e8af2c1cfe3bafa67b529ead792a6d0160888b4237" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "unicode-ident", ] @@ -8828,7 +8828,7 @@ version = "2.0.52" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "unicode-ident", ] @@ -8840,7 +8840,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1329189c02ff984e9736652b1631330da25eaa6bc639089ed4915d25446cbe7b" dependencies = [ "proc-macro-error", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -9008,7 +9008,7 @@ version = "1.0.58" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -9156,7 +9156,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -9389,7 +9389,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a6fdaae4c2c638bb70fe42803a26fbd6fc6ac8c72f5c59f67ecc2a2dcabf4b07" dependencies = [ "prettyplease 0.1.25", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "prost-build 0.11.9", "quote 1.0.35", "syn 1.0.109", @@ -9402,7 +9402,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9d021fc044c18582b9a2408cd0dd05b1596e3ecdb5c4df822bb0183545683889" dependencies = [ "prettyplease 0.2.15", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "prost-build 0.12.3", "quote 1.0.35", "syn 2.0.52", @@ -9506,7 +9506,7 @@ version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -9720,7 +9720,7 @@ version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "89851716b67b937e393b3daa8423e67ddfc4bbbf1654bcf05488e95e0828db0c" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 1.0.109", ] @@ -9740,7 +9740,7 @@ version = "0.16.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f03ca4cb38206e2bef0700092660bb74d696f808514dae47fa1467cbfe26e96e" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -9770,7 +9770,7 @@ version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ac73887f47b9312552aa90ef477927ff014d63d1920ca8037c6c1951eab64bb1" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] @@ -10328,7 +10328,7 @@ dependencies = [ "convert_case 0.6.0", "darling 0.20.8", "once_cell", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "serde", "serde_json", @@ -10341,7 +10341,7 @@ name = "vector-config-macros" version = "0.1.0" dependencies = [ "darling 0.20.8", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "serde", "serde_derive_internals", @@ -10653,7 +10653,7 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d257817081c7dffcdbab24b9e62d2def62e2ff7d00b1c20062551e6cccc145ff" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", ] @@ -10758,7 +10758,7 @@ dependencies = [ "bumpalo", "log", "once_cell", - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", "wasm-bindgen-shared", @@ -10792,7 +10792,7 @@ version = "0.2.92" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", "wasm-bindgen-backend", @@ -11247,7 +11247,7 @@ version = "0.7.31" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" dependencies = [ - "proc-macro2 1.0.78", + "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.52", ] From fe23c97ae6a45115c9924a3ea6410c62018c5060 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 02:38:04 +0000 Subject: [PATCH 0136/1491] chore(deps): Bump anyhow from 1.0.80 to 1.0.81 (#20066) Bumps [anyhow](https://github.com/dtolnay/anyhow) from 1.0.80 to 1.0.81. - [Release notes](https://github.com/dtolnay/anyhow/releases) - [Commits](https://github.com/dtolnay/anyhow/compare/1.0.80...1.0.81) --- updated-dependencies: - dependency-name: anyhow dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- lib/docs-renderer/Cargo.toml | 2 +- lib/vector-api-client/Cargo.toml | 2 +- vdev/Cargo.toml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a1c1041944f90..290785afbb257 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -227,9 +227,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.80" +version = "1.0.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ad32ce52e4161730f7098c077cd2ed6229b5804ccf99e5366be1ab72a98b4e1" +checksum = "0952808a6c2afd1aa8947271f3a60f1a6763c7b912d210184c5149b5cf147247" [[package]] name = "apache-avro" diff --git a/lib/docs-renderer/Cargo.toml b/lib/docs-renderer/Cargo.toml index eca94c8ef139b..fa14f26b61b63 100644 --- a/lib/docs-renderer/Cargo.toml +++ b/lib/docs-renderer/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" publish = false [dependencies] -anyhow = { version = "1.0.80", default-features = false, features = ["std"] } +anyhow = { version = "1.0.81", default-features = false, features = ["std"] } serde.workspace = true serde_json.workspace = true snafu = { version = "0.7.5", default-features = false } diff --git a/lib/vector-api-client/Cargo.toml b/lib/vector-api-client/Cargo.toml index 72be949d2e691..d6aad353bd815 100644 --- a/lib/vector-api-client/Cargo.toml +++ b/lib/vector-api-client/Cargo.toml @@ -13,7 +13,7 @@ serde.workspace = true serde_json.workspace = true # Error handling -anyhow = { version = "1.0.80", default-features = false, features = ["std"] } +anyhow = { version = "1.0.81", default-features = false, features = ["std"] } # Tokio / Futures async-trait = { version = "0.1", default-features = false } diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index e7124c58d0d22..b7057f39836d1 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -8,7 +8,7 @@ readme = "README.md" publish = false [dependencies] -anyhow = "1.0.80" +anyhow = "1.0.81" cached = "0.49.2" chrono.workspace = true clap.workspace = true From aa04ac86707ee0f1df8e7b77acbd459834ca1fa4 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ensar=20Saraj=C4=8Di=C4=87?= Date: Wed, 13 Mar 2024 14:45:15 +0100 Subject: [PATCH 0137/1491] feat(sources): add `permit_origin` config option for all tcp sources (#20051) * feat(sources): add `permit_origin` config option for all tcp sources Adds `permit_origin` config option to all sources that have a TCP mode (introduced in https://github.com/vectordotdev/vector/pull/19892). Related: https://github.com/vectordotdev/vector/pull/19892 * Add changelog entry * Add `statsd` to allowed words * Fix typo in changelog * Remove duplication in docs * Update allowlist docs for dnstap and socket tcp as well * Implement IpAllowlistConfig -> Vec for less duplication * Update docs for `IpAllowlistConfig` --- .github/actions/spelling/allow.txt | 1 + changelog.d/20051_permit_origin_tcp.feature.md | 3 +++ lib/vector-core/src/ipallowlist.rs | 18 +++++++++++++++++- src/sources/dnstap/tcp.rs | 8 ++------ src/sources/fluent/mod.rs | 11 ++++++++++- src/sources/logstash.rs | 9 ++++++++- src/sources/socket/mod.rs | 4 +--- src/sources/socket/tcp.rs | 4 +--- src/sources/statsd/mod.rs | 7 ++++++- src/sources/syslog.rs | 11 ++++++++++- .../components/sources/base/dnstap.cue | 8 ++------ .../components/sources/base/fluent.cue | 5 +++++ .../components/sources/base/logstash.cue | 5 +++++ .../components/sources/base/socket.cue | 8 ++------ .../components/sources/base/statsd.cue | 6 ++++++ .../components/sources/base/syslog.cue | 6 ++++++ 16 files changed, 85 insertions(+), 29 deletions(-) create mode 100644 changelog.d/20051_permit_origin_tcp.feature.md diff --git a/.github/actions/spelling/allow.txt b/.github/actions/spelling/allow.txt index 40cbe949e2a70..cc66dca7762ee 100644 --- a/.github/actions/spelling/allow.txt +++ b/.github/actions/spelling/allow.txt @@ -444,6 +444,7 @@ solarwinds splunk ssh staticuser +statsd symbian tanushri timeframe diff --git a/changelog.d/20051_permit_origin_tcp.feature.md b/changelog.d/20051_permit_origin_tcp.feature.md new file mode 100644 index 0000000000000..e8e2cecb9f5b3 --- /dev/null +++ b/changelog.d/20051_permit_origin_tcp.feature.md @@ -0,0 +1,3 @@ +Added support for `permit_origin` config option for all sources with TCP mode (`fluent`, `logstash`, `statsd`, `syslog`). + +authors: esensar diff --git a/lib/vector-core/src/ipallowlist.rs b/lib/vector-core/src/ipallowlist.rs index 7475e92fd2e67..e1e59803834fd 100644 --- a/lib/vector-core/src/ipallowlist.rs +++ b/lib/vector-core/src/ipallowlist.rs @@ -6,13 +6,23 @@ use ipnet::IpNet; use vector_config::{configurable_component, Configurable, Metadata, ToValue}; use vector_config_common::schema::{InstanceType, SchemaGenerator, SchemaObject}; -/// IP network allowlist settings for network components +/// List of allowed origin IP networks. IP addresses must be in CIDR notation. #[configurable_component] #[derive(Clone, Debug, PartialEq, Eq)] #[serde(deny_unknown_fields, transparent)] #[configurable(metadata(docs::human_name = "Allowed IP network origins"))] +#[configurable(metadata(docs::examples = "ip_allow_list_example()"))] pub struct IpAllowlistConfig(pub Vec); +const fn ip_allow_list_example() -> [&'static str; 4] { + [ + "192.168.0.0/16", + "127.0.0.1/32", + "::1/128", + "9876:9ca3:99ab::23/128", + ] +} + /// IP network #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[serde(deny_unknown_fields, transparent)] @@ -38,3 +48,9 @@ impl Configurable for IpNetConfig { Metadata::with_description("IP network") } } + +impl From for Vec { + fn from(value: IpAllowlistConfig) -> Self { + value.0.iter().map(|net| net.0).collect() + } +} diff --git a/src/sources/dnstap/tcp.rs b/src/sources/dnstap/tcp.rs index f45dab66f1749..1f7f8bb467a7f 100644 --- a/src/sources/dnstap/tcp.rs +++ b/src/sources/dnstap/tcp.rs @@ -46,9 +46,7 @@ pub struct TcpConfig { #[serde(default = "default_port_key")] pub port_key: OptionalValuePath, - /// List of allowed origin IP networks - /// - /// By default, all origins are allowed + #[configurable(derived)] permit_origin: Option, #[configurable(derived)] @@ -161,9 +159,7 @@ impl DnstapFrameHandler { receive_buffer_bytes: config.receive_buffer_bytes, max_connection_duration_secs: config.max_connection_duration_secs, max_connections: config.connection_limit, - allowlist: config - .permit_origin - .map(|p| p.0.iter().map(|net| net.0).collect()), + allowlist: config.permit_origin.map(Into::into), log_namespace, } } diff --git a/src/sources/fluent/mod.rs b/src/sources/fluent/mod.rs index 023b7cc17e0a1..c2bc3c9ba1ecd 100644 --- a/src/sources/fluent/mod.rs +++ b/src/sources/fluent/mod.rs @@ -14,6 +14,7 @@ use tokio_util::codec::Decoder; use vector_lib::codecs::{BytesDeserializerConfig, StreamDecodingError}; use vector_lib::config::{LegacyKey, LogNamespace}; use vector_lib::configurable::configurable_component; +use vector_lib::ipallowlist::IpAllowlistConfig; use vector_lib::lookup::lookup_v2::parse_value_path; use vector_lib::lookup::{metadata_path, owned_value_path, path, OwnedValuePath}; use vector_lib::schema::Definition; @@ -50,6 +51,9 @@ pub struct FluentConfig { #[configurable(derived)] keepalive: Option, + #[configurable(derived)] + pub permit_origin: Option, + /// The size of the receive buffer used for each connection. /// /// This generally should not need to be changed. @@ -75,6 +79,7 @@ impl GenerateConfig for FluentConfig { toml::Value::try_from(Self { address: SocketListenAddr::SocketAddr("0.0.0.0:24224".parse().unwrap()), keepalive: None, + permit_origin: None, tls: None, receive_buffer_bytes: None, acknowledgements: Default::default(), @@ -110,7 +115,7 @@ impl SourceConfig for FluentConfig { cx, self.acknowledgements, self.connection_limit, - None, + self.permit_origin.clone().map(Into::into), FluentConfig::NAME, log_namespace, ) @@ -895,6 +900,7 @@ mod tests { address: address.into(), tls: None, keepalive: None, + permit_origin: None, receive_buffer_bytes: None, acknowledgements: true.into(), connection_limit: None, @@ -959,6 +965,7 @@ mod tests { address: SocketListenAddr::SocketAddr("0.0.0.0:24224".parse().unwrap()), tls: None, keepalive: None, + permit_origin: None, receive_buffer_bytes: None, acknowledgements: false.into(), connection_limit: None, @@ -1014,6 +1021,7 @@ mod tests { address: SocketListenAddr::SocketAddr("0.0.0.0:24224".parse().unwrap()), tls: None, keepalive: None, + permit_origin: None, receive_buffer_bytes: None, acknowledgements: false.into(), connection_limit: None, @@ -1235,6 +1243,7 @@ mod integration_tests { address: address.into(), tls: None, keepalive: None, + permit_origin: None, receive_buffer_bytes: None, acknowledgements: false.into(), connection_limit: None, diff --git a/src/sources/logstash.rs b/src/sources/logstash.rs index 37a0bdc9a9849..2b7d79b4469da 100644 --- a/src/sources/logstash.rs +++ b/src/sources/logstash.rs @@ -5,6 +5,7 @@ use std::{ convert::TryFrom, io::{self, Read}, }; +use vector_lib::ipallowlist::IpAllowlistConfig; use bytes::{Buf, Bytes, BytesMut}; use flate2::read::ZlibDecoder; @@ -45,6 +46,9 @@ pub struct LogstashConfig { #[configurable(metadata(docs::advanced))] keepalive: Option, + #[configurable(derived)] + pub permit_origin: Option, + #[configurable(derived)] tls: Option, @@ -117,6 +121,7 @@ impl Default for LogstashConfig { Self { address: SocketListenAddr::SocketAddr("0.0.0.0:5044".parse().unwrap()), keepalive: None, + permit_origin: None, tls: None, receive_buffer_bytes: None, acknowledgements: Default::default(), @@ -162,7 +167,7 @@ impl SourceConfig for LogstashConfig { cx, self.acknowledgements, self.connection_limit, - None, + self.permit_origin.clone().map(Into::into), LogstashConfig::NAME, log_namespace, ) @@ -717,6 +722,7 @@ mod test { let source = LogstashConfig { address: address.into(), tls: None, + permit_origin: None, keepalive: None, receive_buffer_bytes: None, acknowledgements: true.into(), @@ -960,6 +966,7 @@ mod integration_tests { address: address.into(), tls: Some(tls_config), keepalive: None, + permit_origin: None, receive_buffer_bytes: None, acknowledgements: false.into(), connection_limit: None, diff --git a/src/sources/socket/mod.rs b/src/sources/socket/mod.rs index 03cae75acd390..bcd32859d0c20 100644 --- a/src/sources/socket/mod.rs +++ b/src/sources/socket/mod.rs @@ -145,9 +145,7 @@ impl SourceConfig for SocketConfig { cx, false.into(), config.connection_limit, - config - .permit_origin - .map(|p| p.0.iter().map(|net| net.0).collect()), + config.permit_origin.map(Into::into), SocketConfig::NAME, log_namespace, ) diff --git a/src/sources/socket/tcp.rs b/src/sources/socket/tcp.rs index 1b917df06533a..1f640e8ea835f 100644 --- a/src/sources/socket/tcp.rs +++ b/src/sources/socket/tcp.rs @@ -59,9 +59,7 @@ pub struct TcpConfig { #[serde(default = "default_port_key")] port_key: OptionalValuePath, - /// List of allowed origin IP networks - /// - /// By default, all origins are allowed + #[configurable(derived)] pub permit_origin: Option, #[configurable(derived)] diff --git a/src/sources/statsd/mod.rs b/src/sources/statsd/mod.rs index e00b944bd4731..dfc32a1851718 100644 --- a/src/sources/statsd/mod.rs +++ b/src/sources/statsd/mod.rs @@ -2,6 +2,7 @@ use std::{ net::{Ipv4Addr, SocketAddr, SocketAddrV4}, time::Duration, }; +use vector_lib::ipallowlist::IpAllowlistConfig; use bytes::Bytes; use futures::{StreamExt, TryFutureExt}; @@ -92,6 +93,9 @@ pub struct TcpConfig { #[configurable(derived)] keepalive: Option, + #[configurable(derived)] + pub permit_origin: Option, + #[configurable(derived)] #[serde(default)] tls: Option, @@ -117,6 +121,7 @@ impl TcpConfig { Self { address, keepalive: None, + permit_origin: None, tls: None, shutdown_timeout_secs: default_shutdown_timeout_secs(), receive_buffer_bytes: None, @@ -168,7 +173,7 @@ impl SourceConfig for StatsdConfig { cx, false.into(), config.connection_limit, - None, + config.permit_origin.clone().map(Into::into), StatsdConfig::NAME, LogNamespace::Legacy, ) diff --git a/src/sources/syslog.rs b/src/sources/syslog.rs index 0667571d10814..3ac5dfc8ba656 100644 --- a/src/sources/syslog.rs +++ b/src/sources/syslog.rs @@ -1,6 +1,7 @@ #[cfg(unix)] use std::path::PathBuf; use std::{net::SocketAddr, time::Duration}; +use vector_lib::ipallowlist::IpAllowlistConfig; use bytes::Bytes; use chrono::Utc; @@ -70,6 +71,7 @@ pub struct SyslogConfig { #[derive(Clone, Debug)] #[serde(tag = "mode", rename_all = "snake_case")] #[configurable(metadata(docs::enum_tag_description = "The type of socket to use."))] +#[allow(clippy::large_enum_variant)] pub enum Mode { /// Listen on TCP. Tcp { @@ -79,6 +81,9 @@ pub enum Mode { #[configurable(derived)] keepalive: Option, + #[configurable(derived)] + permit_origin: Option, + #[configurable(derived)] tls: Option, @@ -141,6 +146,7 @@ impl Default for SyslogConfig { mode: Mode::Tcp { address: SocketListenAddr::SocketAddr("0.0.0.0:514".parse().unwrap()), keepalive: None, + permit_origin: None, tls: None, receive_buffer_bytes: None, connection_limit: None, @@ -173,6 +179,7 @@ impl SourceConfig for SyslogConfig { Mode::Tcp { address, keepalive, + permit_origin, tls, receive_buffer_bytes, connection_limit, @@ -200,7 +207,7 @@ impl SourceConfig for SyslogConfig { cx, false.into(), connection_limit, - None, + permit_origin.map(Into::into), SyslogConfig::NAME, log_namespace, ) @@ -1115,6 +1122,7 @@ mod test { // Create and spawn the source. let config = SyslogConfig::from_mode(Mode::Tcp { address: in_addr.into(), + permit_origin: None, keepalive: None, tls: None, receive_buffer_bytes: None, @@ -1258,6 +1266,7 @@ mod test { // Create and spawn the source. let config = SyslogConfig::from_mode(Mode::Tcp { address: in_addr.into(), + permit_origin: None, keepalive: None, tls: None, receive_buffer_bytes: None, diff --git a/website/cue/reference/components/sources/base/dnstap.cue b/website/cue/reference/components/sources/base/dnstap.cue index 3b4fca30d9b16..ea0df54556c2b 100644 --- a/website/cue/reference/components/sources/base/dnstap.cue +++ b/website/cue/reference/components/sources/base/dnstap.cue @@ -87,14 +87,10 @@ base: components: sources: dnstap: configuration: { type: bool: {} } permit_origin: { - description: """ - List of allowed origin IP networks - - By default, all origins are allowed - """ + description: "List of allowed origin IP networks. IP addresses must be in CIDR notation." relevant_when: "mode = \"tcp\"" required: false - type: array: items: type: string: {} + type: array: items: type: string: examples: ["192.168.0.0/16", "127.0.0.1/32", "::1/128", "9876:9ca3:99ab::23/128"] } port_key: { description: """ diff --git a/website/cue/reference/components/sources/base/fluent.cue b/website/cue/reference/components/sources/base/fluent.cue index 3df2cbcc53b58..b5f07df7da264 100644 --- a/website/cue/reference/components/sources/base/fluent.cue +++ b/website/cue/reference/components/sources/base/fluent.cue @@ -46,6 +46,11 @@ base: components: sources: fluent: configuration: { type: uint: unit: "seconds" } } + permit_origin: { + description: "List of allowed origin IP networks. IP addresses must be in CIDR notation." + required: false + type: array: items: type: string: examples: ["192.168.0.0/16", "127.0.0.1/32", "::1/128", "9876:9ca3:99ab::23/128"] + } receive_buffer_bytes: { description: """ The size of the receive buffer used for each connection. diff --git a/website/cue/reference/components/sources/base/logstash.cue b/website/cue/reference/components/sources/base/logstash.cue index af5389d26818c..de60b7c977e7c 100644 --- a/website/cue/reference/components/sources/base/logstash.cue +++ b/website/cue/reference/components/sources/base/logstash.cue @@ -46,6 +46,11 @@ base: components: sources: logstash: configuration: { type: uint: unit: "seconds" } } + permit_origin: { + description: "List of allowed origin IP networks. IP addresses must be in CIDR notation." + required: false + type: array: items: type: string: examples: ["192.168.0.0/16", "127.0.0.1/32", "::1/128", "9876:9ca3:99ab::23/128"] + } receive_buffer_bytes: { description: "The size of the receive buffer used for each connection." required: false diff --git a/website/cue/reference/components/sources/base/socket.cue b/website/cue/reference/components/sources/base/socket.cue index 68994a0e52a62..1ce0c35487c09 100644 --- a/website/cue/reference/components/sources/base/socket.cue +++ b/website/cue/reference/components/sources/base/socket.cue @@ -394,14 +394,10 @@ base: components: sources: socket: configuration: { type: string: examples: ["/path/to/socket"] } permit_origin: { - description: """ - List of allowed origin IP networks - - By default, all origins are allowed - """ + description: "List of allowed origin IP networks. IP addresses must be in CIDR notation." relevant_when: "mode = \"tcp\"" required: false - type: array: items: type: string: {} + type: array: items: type: string: examples: ["192.168.0.0/16", "127.0.0.1/32", "::1/128", "9876:9ca3:99ab::23/128"] } port_key: { description: """ diff --git a/website/cue/reference/components/sources/base/statsd.cue b/website/cue/reference/components/sources/base/statsd.cue index a94e954201f42..5b1c6ea8bff94 100644 --- a/website/cue/reference/components/sources/base/statsd.cue +++ b/website/cue/reference/components/sources/base/statsd.cue @@ -47,6 +47,12 @@ base: components: sources: statsd: configuration: { required: true type: string: examples: ["/path/to/socket"] } + permit_origin: { + description: "List of allowed origin IP networks. IP addresses must be in CIDR notation." + relevant_when: "mode = \"tcp\"" + required: false + type: array: items: type: string: examples: ["192.168.0.0/16", "127.0.0.1/32", "::1/128", "9876:9ca3:99ab::23/128"] + } receive_buffer_bytes: { description: "The size of the receive buffer used for each connection." relevant_when: "mode = \"tcp\" or mode = \"udp\"" diff --git a/website/cue/reference/components/sources/base/syslog.cue b/website/cue/reference/components/sources/base/syslog.cue index 475cac66c4419..77ac76e222954 100644 --- a/website/cue/reference/components/sources/base/syslog.cue +++ b/website/cue/reference/components/sources/base/syslog.cue @@ -77,6 +77,12 @@ base: components: sources: syslog: configuration: { required: true type: string: examples: ["/path/to/socket"] } + permit_origin: { + description: "List of allowed origin IP networks. IP addresses must be in CIDR notation." + relevant_when: "mode = \"tcp\"" + required: false + type: array: items: type: string: examples: ["192.168.0.0/16", "127.0.0.1/32", "::1/128", "9876:9ca3:99ab::23/128"] + } receive_buffer_bytes: { description: """ The size of the receive buffer used for each connection. From 0ec279d2a1b6a113f6e62d1f755a29a371862307 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 06:48:07 -0700 Subject: [PATCH 0138/1491] chore(ci): Bump bufbuild/buf-setup-action from 1.29.0 to 1.30.0 (#20056) Bumps [bufbuild/buf-setup-action](https://github.com/bufbuild/buf-setup-action) from 1.29.0 to 1.30.0. - [Release notes](https://github.com/bufbuild/buf-setup-action/releases) - [Commits](https://github.com/bufbuild/buf-setup-action/compare/v1.29.0...v1.30.0) --- updated-dependencies: - dependency-name: bufbuild/buf-setup-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/protobuf.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/protobuf.yml b/.github/workflows/protobuf.yml index 4bc925d25e003..f9ea54bf8a06c 100644 --- a/.github/workflows/protobuf.yml +++ b/.github/workflows/protobuf.yml @@ -21,7 +21,7 @@ jobs: # Run `git checkout` - uses: actions/checkout@v3 # Install the `buf` CLI - - uses: bufbuild/buf-setup-action@v1.29.0 + - uses: bufbuild/buf-setup-action@v1.30.0 # Perform breaking change detection against the `master` branch - uses: bufbuild/buf-breaking-action@v1.1.3 with: From de4687ff51eda7c67a66ebe86138ab9ad7ceb54c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 13:48:18 +0000 Subject: [PATCH 0139/1491] chore(deps): Bump the aws group with 4 updates (#20079) Bumps the aws group with 4 updates: [aws-smithy-types](https://github.com/smithy-lang/smithy-rs), [aws-smithy-runtime-api](https://github.com/smithy-lang/smithy-rs), [aws-smithy-runtime](https://github.com/smithy-lang/smithy-rs) and [aws-smithy-async](https://github.com/smithy-lang/smithy-rs). Updates `aws-smithy-types` from 1.1.7 to 1.1.8 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-smithy-runtime-api` from 1.1.7 to 1.2.0 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-smithy-runtime` from 1.1.7 to 1.1.8 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-smithy-async` from 1.1.7 to 1.1.8 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) --- updated-dependencies: - dependency-name: aws-smithy-types dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-smithy-runtime-api dependency-type: direct:production update-type: version-update:semver-minor dependency-group: aws - dependency-name: aws-smithy-runtime dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-smithy-async dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 21 ++++++++++----------- Cargo.toml | 8 ++++---- 2 files changed, 14 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 290785afbb257..fbbd91ae397d5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1046,9 +1046,9 @@ dependencies = [ [[package]] name = "aws-smithy-async" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fcf7f09a27286d84315dfb9346208abb3b0973a692454ae6d0bc8d803fcce3b4" +checksum = "d26ea8fa03025b2face2b3038a63525a10891e3d8829901d502e5384a0d8cd46" dependencies = [ "futures-util", "pin-project-lite", @@ -1089,9 +1089,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.60.6" +version = "0.60.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6ca214a6a26f1b7ebd63aa8d4f5e2194095643023f9608edf99a58247b9d80d" +checksum = "3f10fa66956f01540051b0aa7ad54574640f748f9839e843442d99b970d3aff9" dependencies = [ "aws-smithy-eventstream", "aws-smithy-runtime-api", @@ -1129,9 +1129,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbb5fca54a532a36ff927fbd7407a7c8eb9c3b4faf72792ba2965ea2cad8ed55" +checksum = "ec81002d883e5a7fd2bb063d6fb51c4999eb55d404f4fff3dd878bf4733b9f01" dependencies = [ "aws-smithy-async", "aws-smithy-http", @@ -1154,9 +1154,9 @@ dependencies = [ [[package]] name = "aws-smithy-runtime-api" -version = "1.1.7" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22389cb6f7cac64f266fb9f137745a9349ced7b47e0d2ba503e9e40ede4f7060" +checksum = "9acb931e0adaf5132de878f1398d83f8677f90ba70f01f65ff87f6d7244be1c5" dependencies = [ "aws-smithy-async", "aws-smithy-types", @@ -1170,14 +1170,13 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f081da5481210523d44ffd83d9f0740320050054006c719eae0232d411f024d3" +checksum = "abe14dceea1e70101d38fbf2a99e6a34159477c0fb95e68e05c66bd7ae4c3729" dependencies = [ "base64-simd", "bytes 1.5.0", "bytes-utils", - "futures-core", "http 0.2.9", "http-body 0.4.5", "itoa", diff --git a/Cargo.toml b/Cargo.toml index ee2ea1bce7e84..1a4ae39853917 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -193,9 +193,9 @@ aws-sigv4 = { version = "1.1.7", default-features = false, features = ["sign-htt aws-config = { version = "1.0.1", default-features = false, features = ["behavior-version-latest", "credentials-process"], optional = true } aws-credential-types = { version = "1.1.7", default-features = false, features = ["hardcoded-credentials"], optional = true } aws-smithy-http = { version = "0.60", default-features = false, features = ["event-stream"], optional = true } -aws-smithy-types = { version = "1.1.7", default-features = false, optional = true } -aws-smithy-runtime-api = { version = "1.1.7", default-features = false, optional = true } -aws-smithy-runtime = { version = "1.1.7", default-features = false, features = ["client", "connector-hyper-0-14-x", "rt-tokio"], optional = true } +aws-smithy-types = { version = "1.1.8", default-features = false, optional = true } +aws-smithy-runtime-api = { version = "1.2.0", default-features = false, optional = true } +aws-smithy-runtime = { version = "1.1.8", default-features = false, features = ["client", "connector-hyper-0-14-x", "rt-tokio"], optional = true } aws-smithy-async = { version = "1.0.2", default-features = false, features = ["rt-tokio"], optional = true } # Azure @@ -366,7 +366,7 @@ openssl-src = { version = "300", default-features = false, features = ["force-en [dev-dependencies] approx = "0.5.1" assert_cmd = { version = "2.0.14", default-features = false } -aws-smithy-runtime = { version = "1.1.7", default-features = false, features = ["tls-rustls"] } +aws-smithy-runtime = { version = "1.1.8", default-features = false, features = ["tls-rustls"] } azure_core = { version = "0.17", default-features = false, features = ["enable_reqwest", "azurite_workaround"] } azure_identity = { version = "0.17", default-features = false, features = ["enable_reqwest"] } azure_storage_blobs = { version = "0.17", default-features = false, features = ["azurite_workaround"] } From c62ec39ab159b964ec0069db5b528f0954a66c43 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 13:48:28 +0000 Subject: [PATCH 0140/1491] chore(deps): Bump reqwest from 0.11.24 to 0.11.26 (#20080) Bumps [reqwest](https://github.com/seanmonstar/reqwest) from 0.11.24 to 0.11.26. - [Release notes](https://github.com/seanmonstar/reqwest/releases) - [Changelog](https://github.com/seanmonstar/reqwest/blob/master/CHANGELOG.md) - [Commits](https://github.com/seanmonstar/reqwest/compare/v0.11.24...v0.11.26) --- updated-dependencies: - dependency-name: reqwest dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- lib/k8s-e2e-tests/Cargo.toml | 2 +- lib/vector-api-client/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index fbbd91ae397d5..526811412a0f1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7493,9 +7493,9 @@ dependencies = [ [[package]] name = "reqwest" -version = "0.11.24" +version = "0.11.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6920094eb85afde5e4a138be3f2de8bbdf28000f0029e72c45025a56b042251" +checksum = "78bf93c4af7a8bb7d879d51cebe797356ff10ae8516ace542b5182d9dcac10b2" dependencies = [ "base64 0.21.7", "bytes 1.5.0", diff --git a/lib/k8s-e2e-tests/Cargo.toml b/lib/k8s-e2e-tests/Cargo.toml index 7a2fa20f37490..7fd257e2170c7 100644 --- a/lib/k8s-e2e-tests/Cargo.toml +++ b/lib/k8s-e2e-tests/Cargo.toml @@ -12,7 +12,7 @@ futures = "0.3" k8s-openapi = { version = "0.16.0", default-features = false, features = ["v1_19"] } k8s-test-framework = { version = "0.1", path = "../k8s-test-framework" } regex = "1" -reqwest = { version = "0.11.24", features = ["json"] } +reqwest = { version = "0.11.26", features = ["json"] } serde_json.workspace = true tokio = { version = "1.36.0", features = ["full"] } indoc = "2.0.4" diff --git a/lib/vector-api-client/Cargo.toml b/lib/vector-api-client/Cargo.toml index d6aad353bd815..9231f2970c4e0 100644 --- a/lib/vector-api-client/Cargo.toml +++ b/lib/vector-api-client/Cargo.toml @@ -25,7 +25,7 @@ tokio-stream = { version = "0.1.14", default-features = false, features = ["sync graphql_client = { version = "0.13.0", default-features = false, features = ["graphql_query_derive"] } # HTTP / WebSockets -reqwest = { version = "0.11.24", default-features = false, features = ["json"] } +reqwest = { version = "0.11.26", default-features = false, features = ["json"] } tokio-tungstenite = { version = "0.20.1", default-features = false, features = ["connect", "rustls"] } # External libs From 62de4218e00a9907bc3c79b9e36c01066b772bb5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 13:48:38 +0000 Subject: [PATCH 0141/1491] chore(deps): Bump serde-toml-merge from 0.3.4 to 0.3.5 (#20081) Bumps [serde-toml-merge](https://github.com/jdrouet/serde-toml-merge) from 0.3.4 to 0.3.5. - [Release notes](https://github.com/jdrouet/serde-toml-merge/releases) - [Changelog](https://github.com/jdrouet/serde-toml-merge/blob/main/CHANGELOG.md) - [Commits](https://github.com/jdrouet/serde-toml-merge/compare/v0.3.4...v0.3.5) --- updated-dependencies: - dependency-name: serde-toml-merge dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 526811412a0f1..e81c2cfec184b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8107,9 +8107,9 @@ dependencies = [ [[package]] name = "serde-toml-merge" -version = "0.3.4" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc6244d74ff29bd838ad4cfc9184e3f5d0011500acc8d3fb96708211d4edfb26" +checksum = "317213b881aea9bc19d7590b9cf467c58ad5f536f95b4d42129b5643f351d27f" dependencies = [ "toml", ] diff --git a/Cargo.toml b/Cargo.toml index 1a4ae39853917..c2167ec1f7f1f 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -212,7 +212,7 @@ tower = { version = "0.4.13", default-features = false, features = ["buffer", "l tower-http = { version = "0.4.4", default-features = false, features = ["decompression-gzip", "trace"]} # Serde serde.workspace = true -serde-toml-merge = { version = "0.3.4", default-features = false } +serde-toml-merge = { version = "0.3.5", default-features = false } serde_bytes = { version = "0.11.14", default-features = false, features = ["std"], optional = true } serde_json.workspace = true serde_with = { version = "3.7.0", default-features = false, features = ["macros", "std"] } From d23730e3138c20fac276178357234135f1fc52bd Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 13 Mar 2024 13:48:52 +0000 Subject: [PATCH 0142/1491] chore(deps): Bump os_info from 3.7.0 to 3.8.0 (#20082) Bumps [os_info](https://github.com/stanislav-tkach/os_info) from 3.7.0 to 3.8.0. - [Release notes](https://github.com/stanislav-tkach/os_info/releases) - [Changelog](https://github.com/stanislav-tkach/os_info/blob/master/CHANGELOG.md) - [Commits](https://github.com/stanislav-tkach/os_info/compare/v3.7.0...v3.8.0) --- updated-dependencies: - dependency-name: os_info dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 6 +++--- vdev/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e81c2cfec184b..2a4c7bb7f6887 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6131,12 +6131,12 @@ dependencies = [ [[package]] name = "os_info" -version = "3.7.0" +version = "3.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "006e42d5b888366f1880eda20371fedde764ed2213dc8496f49622fa0c99cd5e" +checksum = "52a07930afc1bd77ac9e1101dc18d3fc4986c6568e939c31d1c26657eb0ccbf5" dependencies = [ "log", - "winapi", + "windows-sys 0.52.0", ] [[package]] diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index b7057f39836d1..5690043c0261f 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -25,7 +25,7 @@ indicatif = { version = "0.17.8", features = ["improved_unicode"] } itertools = "0.12.1" log = "0.4.21" once_cell = "1.19" -os_info = { version = "3.7.0", default-features = false } +os_info = { version = "3.8.0", default-features = false } # watch https://github.com/epage/anstyle for official interop with Clap owo-colors = { version = "4.0.0", features = ["supports-colors"] } paste = "1.0.14" From ebdc64dbfc0ac71a1ff73ab9080849eca718a442 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Wed, 13 Mar 2024 10:12:04 -0700 Subject: [PATCH 0143/1491] fix(elasticsearch sink): Readd error log for elasticsearch sink (#19846) * fix(elasticsearch sink): Readd error log for elasticsearch sink Users were depending on this log to determine the number of failed events. Ideally these failed events could be routed from the sink and counted that way, but until then re-adding the log unblocks users from upgrading. Fixes: #15886 Signed-off-by: Jesse Szwedko * Add changelog Signed-off-by: Jesse Szwedko * Use different name for event since not part of the component event framework Signed-off-by: Jesse Szwedko * Revert "Use different name for event since not part of the component event framework" This reverts commit d9f41ef21f5d667fdb6e9ddbcd8cf9672470794a. * Reapply "Use different name for event since not part of the component event framework" This reverts commit 9d0b2612a5988e0b97f174fc9e511edab299b82b. * Remove event add direct log Signed-off-by: Jesse Szwedko --------- Signed-off-by: Jesse Szwedko --- changelog.d/elasticsearch_log.fix.md | 3 +++ src/sinks/elasticsearch/service.rs | 16 ++++++++++++++++ 2 files changed, 19 insertions(+) create mode 100644 changelog.d/elasticsearch_log.fix.md diff --git a/changelog.d/elasticsearch_log.fix.md b/changelog.d/elasticsearch_log.fix.md new file mode 100644 index 0000000000000..4867a4954b3cc --- /dev/null +++ b/changelog.d/elasticsearch_log.fix.md @@ -0,0 +1,3 @@ +An error log for the Elasticsearch sink that logs out the response body when errors occur. This was +a log that used to exist in Vector v0.24.0, but was removed in v0.25.0. Some users were depending on +this log to count the number of errors so it was re-added. diff --git a/src/sinks/elasticsearch/service.rs b/src/sinks/elasticsearch/service.rs index 9824c0e38b177..a189e33c2990c 100644 --- a/src/sinks/elasticsearch/service.rs +++ b/src/sinks/elasticsearch/service.rs @@ -206,18 +206,34 @@ impl Service for ElasticsearchService { } } +// This event is not part of the event framework but is kept because some users were depending on it +// to identify the number of errors returned by Elasticsearch. It can be dropped when we have better +// telemetry. Ref: #15886 +fn emit_bad_response_error(response: &Response) { + let error_code = format!("http_response_{}", response.status().as_u16()); + + error!( + message = "Response contained errors.", + error_code = error_code, + response = ?response, + ); +} + fn get_event_status(response: &Response) -> EventStatus { let status = response.status(); if status.is_success() { let body = String::from_utf8_lossy(response.body()); if body.contains("\"errors\":true") { + emit_bad_response_error(response); EventStatus::Rejected } else { EventStatus::Delivered } } else if status.is_server_error() { + emit_bad_response_error(response); EventStatus::Errored } else { + emit_bad_response_error(response); EventStatus::Rejected } } From f7380e45e4e1af63dd1bb3ecefac50ff45376a3c Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Wed, 13 Mar 2024 12:29:15 -0700 Subject: [PATCH 0144/1491] fix(file source): Set ignored_header_bytes default to `0` (#20076) To match the docs. Fixes: #20072 Signed-off-by: Jesse Szwedko --- changelog.d/file_ignored_header_bytes_default.fix.md | 1 + src/sources/file.rs | 5 +++++ 2 files changed, 6 insertions(+) create mode 100644 changelog.d/file_ignored_header_bytes_default.fix.md diff --git a/changelog.d/file_ignored_header_bytes_default.fix.md b/changelog.d/file_ignored_header_bytes_default.fix.md new file mode 100644 index 0000000000000..442d848e2da6a --- /dev/null +++ b/changelog.d/file_ignored_header_bytes_default.fix.md @@ -0,0 +1 @@ +The `fingerprint.ignored_header_bytes` option on the `file` source now has a default of `0`. diff --git a/src/sources/file.rs b/src/sources/file.rs index 6e618d11c4f3a..6c1ee7b073e8c 100644 --- a/src/sources/file.rs +++ b/src/sources/file.rs @@ -321,6 +321,7 @@ pub enum FingerprintConfig { /// The number of bytes to skip ahead (or ignore) when reading the data used for generating the checksum. /// /// This can be helpful if all files share a common header that should be skipped. + #[serde(default = "default_ignored_header_bytes")] #[configurable(metadata(docs::type_unit = "bytes"))] ignored_header_bytes: usize, @@ -351,6 +352,10 @@ impl Default for FingerprintConfig { } } +const fn default_ignored_header_bytes() -> usize { + 0 +} + const fn default_lines() -> usize { 1 } From ccaa7e376d0167d187573c4b9b478f1c2778e359 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Wed, 13 Mar 2024 15:18:39 -0700 Subject: [PATCH 0145/1491] chore(dev): Update CODEOWNERS to reflect consolidation (#20087) * chore(dev): Update CODEOWNERS to reflect consolidation Signed-off-by: Jesse Szwedko * Missed a few Signed-off-by: Jesse Szwedko --------- Signed-off-by: Jesse Szwedko --- .github/CODEOWNERS | 196 ++++++++++++++++++++++----------------------- 1 file changed, 98 insertions(+), 98 deletions(-) diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index c42b902aa2c0a..31043e805b35e 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,100 +1,100 @@ docs/ @vectordotdev/ux-team -lib/dnsmsg-parser/ @vectordotdev/integrations-team -lib/file-source/ @vectordotdev/integrations-team -lib/k8s-e2e-tests/ @vectordotdev/integrations-team -lib/k8s-test-framework/ @vectordotdev/integrations-team -lib/opentelemetry-proto/ @vectordotdev/integrations-team -lib/vector-common/ @vectordotdev/core-team -lib/vector-config/ @vectordotdev/core-team -lib/vector-config-common/ @vectordotdev/core-team -lib/vector-config-macros/ @vectordotdev/core-team -lib/vector-core/ @vectordotdev/core-team -lib/vector-vrl-functions/ @vectordotdev/processing-team -lib/vrl/ @vectordotdev/processing-team -src/config/ @vectordotdev/core-team -src/internal_telemetry/ @vectordotdev/core-team -src/sinks/ @vectordotdev/integrations-team -src/sinks/amqp/ @StephenWakely @vectordotdev/integrations-team -src/sinks/appsignal/ @neuronull @vectordotdev/integrations-team -src/sinks/aws_cloudwatch_logs/ @vectordotdev/integrations-team -src/sinks/aws_cloudwatch_metrics/ @vectordotdev/integrations-team -src/sinks/aws_kinesis/ @vectordotdev/integrations-team # sink_aws_kinesis_firehose,sink_aws_kinesis_stream -src/sinks/aws_s3/ @vectordotdev/integrations-team -src/sinks/aws_sqs/ @vectordotdev/integrations-team -src/sinks/axiom.rs @vectordotdev/integrations-team -src/sinks/azure_blob/ @dsmith3197 @vectordotdev/integrations-team -src/sinks/azure_monitor_logs.rs @dsmith3197 @vectordotdev/integrations-team -src/sinks/blackhole/ @dsmith3197 @vectordotdev/integrations-team -src/sinks/clickhouse/ @dsmith3197 @vectordotdev/integrations-team -src/sinks/console/ @dsmith3197 @vectordotdev/integrations-team -src/sinks/databend/ @vectordotdev/integrations-team -src/sinks/datadog_events/ @neuronull @vectordotdev/integrations-team -src/sinks/datadog_logs/ @neuronull @vectordotdev/integrations-team -src/sinks/datadog_metrics/ @neuronull @vectordotdev/integrations-team -src/sinks/datadog_traces/ @neuronull @vectordotdev/integrations-team -src/sinks/elasticsearch/ @vectordotdev/integrations-team -src/sinks/file/ @vectordotdev/integrations-team -src/sinks/gcp/ @StephenWakely @vectordotdev/integrations-team # sink_gcp_chronicle_unstructured,sink_gcp_cloud_storage,sink_gcp_pubsub,sink_gcp_stackdriver_logs,sink_gcp_stackdriver_metrics -src/sinks/honeycomb.rs @vectordotdev/integrations-team -src/sinks/http.rs @neuronull @vectordotdev/integrations-team -src/sinks/humio/ @StephenWakely @vectordotdev/integrations-team # sink_humio_logs,sink_humio_metrics -src/sinks/influxdb/ @dsmith3197 @vectordotdev/integrations-team # sink_influxdb_logs,sink_influxdb_metrics -src/sinks/kafka/ @dsmith3197 @vectordotdev/integrations-team -src/sinks/logdna.rs @neuronull @vectordotdev/integrations-team -src/sinks/loki/ @vectordotdev/integrations-team -src/sinks/nats.rs @StephenWakely @vectordotdev/integrations-team -src/sinks/new_relic/ @dsmith3197 @vectordotdev/integrations-team # sink_newrelix,sink_newrelic_logs -src/sinks/papertrail.rs @StephenWakely @vectordotdev/integrations-team -src/sinks/prometheus/ @StephenWakely @vectordotdev/integrations-team # sink_prometheus_exporter,sink_prometheus_remote_write -src/sinks/pulsar.rs @dsmith3197 @vectordotdev/integrations-team -src/sinks/redis.rs @StephenWakely @vectordotdev/integrations-team -src/sinks/sematext/ @vectordotdev/integrations-team # sink_sematext_logs,sink_sematext_metrics -src/sinks/socket.rs @neuronull @vectordotdev/integrations-team -src/sinks/splunk_hec/ @StephenWakely @vectordotdev/integrations-team # sink_splunk_hec_logs,sink_splunk_hec_metrics -src/sinks/statsd.rs @neuronull @vectordotdev/integrations-team -src/sinks/vector/ @neuronull @vectordotdev/integrations-team -src/sinks/websocket/ @neuronull @vectordotdev/integrations-team -src/source_sender/ @vectordotdev/core-team -src/sources/ @vectordotdev/integrations-team -src/sources/amqp.rs @StephenWakely @vectordotdev/integrations-team -src/sources/apache_metrics/ @dsmith3197 @vectordotdev/integrations-team -src/sources/aws_ecs_metrics/ @vectordotdev/integrations-team -src/sources/aws_kinesis_firehose/ @vectordotdev/integrations-team -src/sources/aws_s3/ @vectordotdev/integrations-team -src/sources/aws_sqs/ @vectordotdev/integrations-team -src/sources/datadog_agent/ @neuronull @vectordotdev/integrations-team -src/sources/demo_logs.rs @StephenWakely @vectordotdev/integrations-team -src/sources/dnstap/ @StephenWakely @vectordotdev/integrations-team -src/sources/docker_logs/ @vectordotdev/integrations-team -src/sources/eventstoredb_metrics/ @dsmith3197 @vectordotdev/integrations-team -src/sources/exec/ @dsmith3197 @vectordotdev/integrations-team -src/sources/file.rs @vectordotdev/integrations-team -src/sources/file_descriptors/ @dsmith3197 @vectordotdev/integrations-team # source_file_descriptor,source_stdin -src/sources/fluent/ @neuronull @vectordotdev/integrations-team -src/sources/gcp_pubsub.rs @StephenWakely @vectordotdev/integrations-team -src/sources/heroku_logs.rs @vectordotdev/integrations-team -src/sources/host_metrics/ @dsmith3197 @vectordotdev/integrations-team -src/sources/http_client/ @neuronull @vectordotdev/integrations-team -src/sources/http_server.rs @neuronull @vectordotdev/integrations-team -src/sources/internal_logs.rs @neuronull @vectordotdev/integrations-team -src/sources/internal_metrics.rs @neuronull @vectordotdev/integrations-team -src/sources/journald.rs @vectordotdev/integrations-team -src/sources/kafka.rs @dsmith3197 @vectordotdev/integrations-team -src/sources/kubernetes_logs/ @vectordotdev/integrations-team -src/sources/logstash.rs @neuronull @vectordotdev/integrations-team -src/sources/mongodb_metrics/ @dsmith3197 @vectordotdev/integrations-team -src/sources/nats.rs @StephenWakely @vectordotdev/integrations-team -src/sources/nginx_metrics/ @dsmith3197 @vectordotdev/integrations-team -src/sources/opentelemetry/ @vectordotdev/integrations-team -src/sources/postgresql_metrics.rs @dsmith3197 @vectordotdev/integrations-team -src/sources/prometheus/ @StephenWakely @vectordotdev/integrations-team # source_prometheus_remote_write,source_prometheus_scrape -src/sources/redis/ @StephenWakely @vectordotdev/integrations-team -src/sources/socket/ @neuronull @vectordotdev/integrations-team -src/sources/splunk_hec/ @StephenWakely @vectordotdev/integrations-team -src/sources/statsd/ @neuronull @vectordotdev/integrations-team -src/sources/syslog.rs @StephenWakely @vectordotdev/integrations-team -src/sources/vector/ @neuronull @vectordotdev/integrations-team -src/test_util/ @vectordotdev/core-team -src/topology/ @vectordotdev/core-team -src/transforms/ @vectordotdev/processing-team +lib/dnsmsg-parser/ @vectordotdev/vector +lib/file-source/ @vectordotdev/vector +lib/k8s-e2e-tests/ @vectordotdev/vector +lib/k8s-test-framework/ @vectordotdev/vector +lib/opentelemetry-proto/ @vectordotdev/vector +lib/vector-common/ @vectordotdev/vector +lib/vector-config/ @vectordotdev/vector +lib/vector-config-common/ @vectordotdev/vector +lib/vector-config-macros/ @vectordotdev/vector +lib/vector-core/ @vectordotdev/vector +lib/vector-vrl-functions/ @vectordotdev/vector +lib/vrl/ @vectordotdev/vector +src/config/ @vectordotdev/vector +src/internal_telemetry/ @vectordotdev/vector +src/sinks/ @vectordotdev/vector +src/sinks/amqp/ @vectordotdev/vector +src/sinks/appsignal/ @vectordotdev/vector +src/sinks/aws_cloudwatch_logs/ @vectordotdev/vector +src/sinks/aws_cloudwatch_metrics/ @vectordotdev/vector +src/sinks/aws_kinesis/ @vectordotdev/vector # sink_aws_kinesis_firehose,sink_aws_kinesis_stream +src/sinks/aws_s3/ @vectordotdev/vector +src/sinks/aws_sqs/ @vectordotdev/vector +src/sinks/axiom.rs @vectordotdev/vector +src/sinks/azure_blob/ @vectordotdev/vector +src/sinks/azure_monitor_logs.rs @vectordotdev/vector +src/sinks/blackhole/ @vectordotdev/vector +src/sinks/clickhouse/ @vectordotdev/vector +src/sinks/console/ @vectordotdev/vector +src/sinks/databend/ @vectordotdev/vector +src/sinks/datadog_events/ @vectordotdev/vector +src/sinks/datadog_logs/ @vectordotdev/vector +src/sinks/datadog_metrics/ @vectordotdev/vector +src/sinks/datadog_traces/ @vectordotdev/vector +src/sinks/elasticsearch/ @vectordotdev/vector +src/sinks/file/ @vectordotdev/vector +src/sinks/gcp/ @vectordotdev/vector # sink_gcp_chronicle_unstructured,sink_gcp_cloud_storage,sink_gcp_pubsub,sink_gcp_stackdriver_logs,sink_gcp_stackdriver_metrics +src/sinks/honeycomb.rs @vectordotdev/vector +src/sinks/http.rs @vectordotdev/vector +src/sinks/humio/ @vectordotdev/vector # sink_humio_logs,sink_humio_metrics +src/sinks/influxdb/ @vectordotdev/vector # sink_influxdb_logs,sink_influxdb_metrics +src/sinks/kafka/ @vectordotdev/vector +src/sinks/logdna.rs @vectordotdev/vector +src/sinks/loki/ @vectordotdev/vector +src/sinks/nats.rs @vectordotdev/vector +src/sinks/new_relic/ @vectordotdev/vector # sink_newrelix,sink_newrelic_logs +src/sinks/papertrail.rs @vectordotdev/vector +src/sinks/prometheus/ @vectordotdev/vector # sink_prometheus_exporter,sink_prometheus_remote_write +src/sinks/pulsar.rs @vectordotdev/vector +src/sinks/redis.rs @vectordotdev/vector +src/sinks/sematext/ @vectordotdev/vector # sink_sematext_logs,sink_sematext_metrics +src/sinks/socket.rs @vectordotdev/vector +src/sinks/splunk_hec/ @vectordotdev/vector # sink_splunk_hec_logs,sink_splunk_hec_metrics +src/sinks/statsd.rs @vectordotdev/vector +src/sinks/vector/ @vectordotdev/vector +src/sinks/websocket/ @vectordotdev/vector +src/source_sender/ @vectordotdev/vector +src/sources/ @vectordotdev/vector +src/sources/amqp.rs @vectordotdev/vector +src/sources/apache_metrics/ @vectordotdev/vector +src/sources/aws_ecs_metrics/ @vectordotdev/vector +src/sources/aws_kinesis_firehose/ @vectordotdev/vector +src/sources/aws_s3/ @vectordotdev/vector +src/sources/aws_sqs/ @vectordotdev/vector +src/sources/datadog_agent/ @vectordotdev/vector +src/sources/demo_logs.rs @vectordotdev/vector +src/sources/dnstap/ @vectordotdev/vector +src/sources/docker_logs/ @vectordotdev/vector +src/sources/eventstoredb_metrics/ @vectordotdev/vector +src/sources/exec/ @vectordotdev/vector +src/sources/file.rs @vectordotdev/vector +src/sources/file_descriptors/ @vectordotdev/vector # source_file_descriptor,source_stdin +src/sources/fluent/ @vectordotdev/vector +src/sources/gcp_pubsub.rs @vectordotdev/vector +src/sources/heroku_logs.rs @vectordotdev/vector +src/sources/host_metrics/ @vectordotdev/vector +src/sources/http_client/ @vectordotdev/vector +src/sources/http_server.rs @vectordotdev/vector +src/sources/internal_logs.rs @vectordotdev/vector +src/sources/internal_metrics.rs @vectordotdev/vector +src/sources/journald.rs @vectordotdev/vector +src/sources/kafka.rs @vectordotdev/vector +src/sources/kubernetes_logs/ @vectordotdev/vector +src/sources/logstash.rs @vectordotdev/vector +src/sources/mongodb_metrics/ @vectordotdev/vector +src/sources/nats.rs @vectordotdev/vector +src/sources/nginx_metrics/ @vectordotdev/vector +src/sources/opentelemetry/ @vectordotdev/vector +src/sources/postgresql_metrics.rs @vectordotdev/vector +src/sources/prometheus/ @vectordotdev/vector # source_prometheus_remote_write,source_prometheus_scrape +src/sources/redis/ @vectordotdev/vector +src/sources/socket/ @vectordotdev/vector +src/sources/splunk_hec/ @vectordotdev/vector +src/sources/statsd/ @vectordotdev/vector +src/sources/syslog.rs @vectordotdev/vector +src/sources/vector/ @vectordotdev/vector +src/test_util/ @vectordotdev/vector +src/topology/ @vectordotdev/vector +src/transforms/ @vectordotdev/vector website/ @vectordotdev/ux-team @vectordotdev/documentation From d511e893ad0e594231e06f25a9d35ab70248bedc Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ensar=20Saraj=C4=8Di=C4=87?= Date: Thu, 14 Mar 2024 15:41:54 +0100 Subject: [PATCH 0146/1491] feat(enrichment_tables): add support for custom MMDB types (#20054) * feat(enrichment_tables): add support for custom MMDB types This adds support for custom MMDB types. It will just return whatever is stored in the database, without further modifications. Test data was generated using the official go example: https://github.com/maxmind/mmdbwriter/blob/main/examples/asn-writer/main.go Fixes: #19995 * Add changelog entry * Change `hostname` in tests to an actual word to avoid spellcheck * Update `enrichment_tables` docs * Update docs Co-authored-by: Ursula Chen <58821586+urseberry@users.noreply.github.com> * Add separate `mmdb` enrichment table type * Update docs * Remove todos * Update comment on geoip `DatabaseKind` * Update changelog entry * Fix mmdb docs * Add benches for mmdb enrichment_tables --------- Co-authored-by: Ursula Chen <58821586+urseberry@users.noreply.github.com> --- Cargo.toml | 5 +- benches/enrichment_tables.rs | 85 +++++- .../20054_custom_mmdb_types.feature.md | 5 + src/enrichment_tables/geoip.rs | 39 ++- src/enrichment_tables/mmdb.rs | 278 ++++++++++++++++++ src/enrichment_tables/mod.rs | 11 + tests/data/custom-type.mmdb | Bin 0 -> 2614 bytes website/cue/reference/configuration.cue | 26 +- website/cue/reference/remap/functions.cue | 2 +- 9 files changed, 436 insertions(+), 15 deletions(-) create mode 100644 changelog.d/20054_custom_mmdb_types.feature.md create mode 100644 src/enrichment_tables/mmdb.rs create mode 100644 tests/data/custom-type.mmdb diff --git a/Cargo.toml b/Cargo.toml index c2167ec1f7f1f..75f10808c0123 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -483,8 +483,9 @@ protobuf-build = ["dep:tonic-build", "dep:prost-build"] gcp = ["dep:base64", "dep:goauth", "dep:smpl_jwt"] # Enrichment Tables -enrichment-tables = ["enrichment-tables-geoip"] +enrichment-tables = ["enrichment-tables-geoip", "enrichment-tables-mmdb"] enrichment-tables-geoip = ["dep:maxminddb"] +enrichment-tables-mmdb = ["dep:maxminddb"] # Codecs codecs-syslog = ["vector-lib/syslog"] @@ -942,7 +943,7 @@ remap-benches = ["transforms-remap"] transform-benches = ["transforms-filter", "transforms-dedupe", "transforms-reduce", "transforms-route"] codecs-benches = [] loki-benches = ["sinks-loki"] -enrichment-tables-benches = ["enrichment-tables-geoip"] +enrichment-tables-benches = ["enrichment-tables-geoip", "enrichment-tables-mmdb"] [[bench]] name = "default" diff --git a/benches/enrichment_tables.rs b/benches/enrichment_tables.rs index 762383095794e..5c9a11f157a62 100644 --- a/benches/enrichment_tables.rs +++ b/benches/enrichment_tables.rs @@ -5,6 +5,7 @@ use criterion::{criterion_group, criterion_main, BatchSize, Criterion}; use vector::enrichment_tables::{ file::File, geoip::{Geoip, GeoipConfig}, + mmdb::{Mmdb, MmdbConfig}, Condition, Table, }; use vector_lib::enrichment::Case; @@ -13,7 +14,7 @@ use vrl::value::{ObjectMap, Value}; criterion_group!( name = benches; config = Criterion::default().noise_threshold(0.02).sample_size(10); - targets = benchmark_enrichment_tables_file, benchmark_enrichment_tables_geoip + targets = benchmark_enrichment_tables_file, benchmark_enrichment_tables_geoip, benchmark_enrichment_tables_mmdb ); criterion_main!(benches); @@ -323,3 +324,85 @@ fn benchmark_enrichment_tables_geoip(c: &mut Criterion) { ); }); } + +fn benchmark_enrichment_tables_mmdb(c: &mut Criterion) { + let mut group = c.benchmark_group("enrichment_tables_mmdb"); + let build = |path: &str| { + Mmdb::new(MmdbConfig { + path: path.to_string(), + }) + .unwrap() + }; + + group.bench_function("enrichment_tables/mmdb_isp", |b| { + let table = build("tests/data/GeoIP2-ISP-Test.mmdb"); + let ip = "208.192.1.2"; + let mut expected = ObjectMap::new(); + expected.insert("autonomous_system_number".into(), 701i64.into()); + expected.insert( + "autonomous_system_organization".into(), + "MCI Communications Services, Inc. d/b/a Verizon Business".into(), + ); + expected.insert("isp".into(), "Verizon Business".into()); + expected.insert("organization".into(), "Verizon Business".into()); + + b.iter_batched( + || (&table, ip, &expected), + |(table, ip, expected)| { + assert_eq!( + Ok(expected), + table + .find_table_row( + Case::Insensitive, + &[Condition::Equals { + field: "ip", + value: ip.into(), + }], + None, + None, + ) + .as_ref() + ) + }, + BatchSize::SmallInput, + ); + }); + + group.bench_function("enrichment_tables/mmdb_city", |b| { + let table = build("tests/data/GeoIP2-City-Test.mmdb"); + let ip = "67.43.156.9"; + let mut expected = ObjectMap::new(); + expected.insert( + "location".into(), + ObjectMap::from([ + ("latitude".into(), Value::from(27.5)), + ("longitude".into(), Value::from(90.5)), + ]) + .into(), + ); + + b.iter_batched( + || (&table, ip, &expected), + |(table, ip, expected)| { + assert_eq!( + Ok(expected), + table + .find_table_row( + Case::Insensitive, + &[Condition::Equals { + field: "ip", + value: ip.into(), + }], + Some(&[ + "location.latitude".to_string(), + "location.longitude".to_string(), + ]), + None, + ) + .as_ref() + ) + }, + BatchSize::SmallInput, + ); + }); +} diff --git a/changelog.d/20054_custom_mmdb_types.feature.md b/changelog.d/20054_custom_mmdb_types.feature.md new file mode 100644 index 0000000000000..08e2f7917e4c5 --- /dev/null +++ b/changelog.d/20054_custom_mmdb_types.feature.md @@ -0,0 +1,5 @@ +Added support for custom MMDB enrichment tables. GeoIP enrichment tables will no longer fall back to +City type for unknown types and will instead return an error. New MMDB enrichment table should be +used for such types. + +authors: esensar diff --git a/src/enrichment_tables/geoip.rs b/src/enrichment_tables/geoip.rs index 64c77fb75a159..7d7ad25cc106d 100644 --- a/src/enrichment_tables/geoip.rs +++ b/src/enrichment_tables/geoip.rs @@ -18,8 +18,7 @@ use vrl::value::{ObjectMap, Value}; use crate::config::{EnrichmentTableConfig, GenerateConfig}; // MaxMind GeoIP database files have a type field we can use to recognize specific -// products. If we encounter one of these two types, we look for ASN/ISP information; -// otherwise we expect to be working with a City database. +// products. If it is an unknown type, an error will be returned. #[derive(Copy, Clone, Debug)] #[allow(missing_docs)] pub enum DatabaseKind { @@ -29,13 +28,16 @@ pub enum DatabaseKind { City, } -impl From<&str> for DatabaseKind { - fn from(v: &str) -> Self { - match v { - "GeoLite2-ASN" => Self::Asn, - "GeoIP2-ISP" => Self::Isp, - "GeoIP2-Connection-Type" => Self::ConnectionType, - _ => Self::City, +impl TryFrom<&str> for DatabaseKind { + type Error = (); + + fn try_from(value: &str) -> Result { + match value { + "GeoLite2-ASN" => Ok(Self::Asn), + "GeoIP2-ISP" => Ok(Self::Isp), + "GeoIP2-Connection-Type" => Ok(Self::ConnectionType), + "GeoIP2-City" => Ok(Self::City), + _ => Err(()), } } } @@ -48,6 +50,7 @@ pub struct GeoipConfig { /// (**GeoLite2-City.mmdb**). /// /// Other databases, such as the country database, are not supported. + /// `mmdb` enrichment table can be used for other databases. /// /// [geoip2]: https://dev.maxmind.com/geoip/geoip2/downloadable /// [geolite2]: https://dev.maxmind.com/geoip/geoip2/geolite2/#Download_Access @@ -112,7 +115,13 @@ impl Geoip { /// Creates a new GeoIP struct from the provided config. pub fn new(config: GeoipConfig) -> crate::Result { let dbreader = Arc::new(Reader::open_readfile(config.path.clone())?); - let dbkind = DatabaseKind::from(dbreader.metadata.database_type.as_str()); + let dbkind = + DatabaseKind::try_from(dbreader.metadata.database_type.as_str()).map_err(|_| { + format!( + "Unsupported MMDB database type ({}). Use `mmdb` enrichment table instead.", + dbreader.metadata.database_type + ) + })?; // Check if we can read database with dummy Ip. let ip = IpAddr::V4(std::net::Ipv4Addr::UNSPECIFIED); @@ -444,6 +453,16 @@ mod tests { assert!(values.is_none()); } + #[test] + fn custom_mmdb_type_error() { + let result = Geoip::new(GeoipConfig { + path: "tests/data/custom-type.mmdb".to_string(), + locale: default_locale(), + }); + + assert!(result.is_err()); + } + fn find(ip: &str, database: &str) -> Option { find_select(ip, database, None) } diff --git a/src/enrichment_tables/mmdb.rs b/src/enrichment_tables/mmdb.rs new file mode 100644 index 0000000000000..7b63ef4650c88 --- /dev/null +++ b/src/enrichment_tables/mmdb.rs @@ -0,0 +1,278 @@ +//! Handles enrichment tables for `type = mmdb`. +//! Enrichment data is loaded from any database in [MaxMind][maxmind] format. +//! +//! [maxmind]: https://maxmind.com +use std::{fs, net::IpAddr, sync::Arc, time::SystemTime}; + +use maxminddb::{MaxMindDBError, Reader}; +use vector_lib::configurable::configurable_component; +use vector_lib::enrichment::{Case, Condition, IndexHandle, Table}; +use vrl::value::{ObjectMap, Value}; + +use crate::config::{EnrichmentTableConfig, GenerateConfig}; + +/// Configuration for the `mmdb` enrichment table. +#[derive(Clone, Debug, Eq, PartialEq)] +#[configurable_component(enrichment_table("mmdb"))] +pub struct MmdbConfig { + /// Path to the [MaxMind][maxmind] database + /// + /// [maxmind]: https://maxmind.com + pub path: String, +} + +impl GenerateConfig for MmdbConfig { + fn generate_config() -> toml::Value { + toml::Value::try_from(Self { + path: "/path/to/GeoLite2-City.mmdb".to_string(), + }) + .unwrap() + } +} + +#[async_trait::async_trait] +impl EnrichmentTableConfig for MmdbConfig { + async fn build( + &self, + _: &crate::config::GlobalOptions, + ) -> crate::Result> { + Ok(Box::new(Mmdb::new(self.clone())?)) + } +} + +#[derive(Clone)] +/// A struct that implements [vector_lib::enrichment::Table] to handle loading enrichment data from a MaxMind database. +pub struct Mmdb { + config: MmdbConfig, + dbreader: Arc>>, + last_modified: SystemTime, +} + +impl Mmdb { + /// Creates a new Mmdb struct from the provided config. + pub fn new(config: MmdbConfig) -> crate::Result { + let dbreader = Arc::new(Reader::open_readfile(config.path.clone())?); + + // Check if we can read database with dummy Ip. + let ip = IpAddr::V4(std::net::Ipv4Addr::UNSPECIFIED); + let result = dbreader.lookup::(ip).map(|_| ()); + + match result { + Ok(_) | Err(MaxMindDBError::AddressNotFoundError(_)) => Ok(Mmdb { + last_modified: fs::metadata(&config.path)?.modified()?, + dbreader, + config, + }), + Err(error) => Err(error.into()), + } + } + + fn lookup(&self, ip: IpAddr, select: Option<&[String]>) -> Option { + let data = self.dbreader.lookup::(ip).ok()?; + + if let Some(fields) = select { + let mut filtered = Value::from(ObjectMap::new()); + let mut data_value = Value::from(data); + for field in fields { + filtered.insert( + field.as_str(), + data_value + .remove(field.as_str(), false) + .unwrap_or(Value::Null), + ); + } + filtered.into_object() + } else { + Some(data) + } + } +} + +impl Table for Mmdb { + /// Search the enrichment table data with the given condition. + /// All conditions must match (AND). + /// + /// # Errors + /// Errors if no rows, or more than 1 row is found. + fn find_table_row<'a>( + &self, + case: Case, + condition: &'a [Condition<'a>], + select: Option<&[String]>, + index: Option, + ) -> Result { + let mut rows = self.find_table_rows(case, condition, select, index)?; + + match rows.pop() { + Some(row) if rows.is_empty() => Ok(row), + Some(_) => Err("More than 1 row found".to_string()), + None => Err("IP not found".to_string()), + } + } + + /// Search the enrichment table data with the given condition. + /// All conditions must match (AND). + /// Can return multiple matched records + fn find_table_rows<'a>( + &self, + _: Case, + condition: &'a [Condition<'a>], + select: Option<&[String]>, + _: Option, + ) -> Result, String> { + match condition.first() { + Some(_) if condition.len() > 1 => Err("Only one condition is allowed".to_string()), + Some(Condition::Equals { value, .. }) => { + let ip = value + .to_string_lossy() + .parse::() + .map_err(|_| "Invalid IP address".to_string())?; + Ok(self + .lookup(ip, select) + .map(|values| vec![values]) + .unwrap_or_default()) + } + Some(_) => Err("Only equality condition is allowed".to_string()), + None => Err("IP condition must be specified".to_string()), + } + } + + /// Hints to the enrichment table what data is going to be searched to allow it to index the + /// data in advance. + /// + /// # Errors + /// Errors if the fields are not in the table. + fn add_index(&mut self, _: Case, fields: &[&str]) -> Result { + match fields.len() { + 0 => Err("IP field is required".to_string()), + 1 => Ok(IndexHandle(0)), + _ => Err("Only one field is allowed".to_string()), + } + } + + /// Returns a list of the field names that are in each index + fn index_fields(&self) -> Vec<(Case, Vec)> { + Vec::new() + } + + /// Returns true if the underlying data has changed and the table needs reloading. + fn needs_reload(&self) -> bool { + matches!(fs::metadata(&self.config.path) + .and_then(|metadata| metadata.modified()), + Ok(modified) if modified > self.last_modified) + } +} + +impl std::fmt::Debug for Mmdb { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "Maxmind database {})", self.config.path) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use vrl::value::Value; + + #[test] + fn city_partial_lookup() { + let values = find_select( + "2.125.160.216", + "tests/data/GeoIP2-City-Test.mmdb", + Some(&[ + "location.latitude".to_string(), + "location.longitude".to_string(), + ]), + ) + .unwrap(); + + let mut expected = ObjectMap::new(); + expected.insert( + "location".into(), + ObjectMap::from([ + ("latitude".into(), Value::from(51.75)), + ("longitude".into(), Value::from(-1.25)), + ]) + .into(), + ); + + assert_eq!(values, expected); + } + + #[test] + fn isp_lookup() { + let values = find("208.192.1.2", "tests/data/GeoIP2-ISP-Test.mmdb").unwrap(); + + let mut expected = ObjectMap::new(); + expected.insert("autonomous_system_number".into(), 701i64.into()); + expected.insert( + "autonomous_system_organization".into(), + "MCI Communications Services, Inc. d/b/a Verizon Business".into(), + ); + expected.insert("isp".into(), "Verizon Business".into()); + expected.insert("organization".into(), "Verizon Business".into()); + + assert_eq!(values, expected); + } + + #[test] + fn connection_type_lookup_success() { + let values = find( + "201.243.200.1", + "tests/data/GeoIP2-Connection-Type-Test.mmdb", + ) + .unwrap(); + + let mut expected = ObjectMap::new(); + expected.insert("connection_type".into(), "Corporate".into()); + + assert_eq!(values, expected); + } + + #[test] + fn lookup_missing() { + let values = find("10.1.12.1", "tests/data/custom-type.mmdb"); + + assert!(values.is_none()); + } + + #[test] + fn custom_mmdb_type() { + let values = find("208.192.1.2", "tests/data/custom-type.mmdb").unwrap(); + + let mut expected = ObjectMap::new(); + expected.insert("hostname".into(), "custom".into()); + expected.insert( + "nested".into(), + ObjectMap::from([ + ("hostname".into(), "custom".into()), + ("original_cidr".into(), "208.192.1.2/24".into()), + ]) + .into(), + ); + + assert_eq!(values, expected); + } + + fn find(ip: &str, database: &str) -> Option { + find_select(ip, database, None) + } + + fn find_select(ip: &str, database: &str, select: Option<&[String]>) -> Option { + Mmdb::new(MmdbConfig { + path: database.to_string(), + }) + .unwrap() + .find_table_rows( + Case::Insensitive, + &[Condition::Equals { + field: "ip", + value: ip.into(), + }], + select, + None, + ) + .unwrap() + .pop() + } +} diff --git a/src/enrichment_tables/mod.rs b/src/enrichment_tables/mod.rs index 15ec912d911be..97a93b0059022 100644 --- a/src/enrichment_tables/mod.rs +++ b/src/enrichment_tables/mod.rs @@ -10,6 +10,9 @@ pub mod file; #[cfg(feature = "enrichment-tables-geoip")] pub mod geoip; +#[cfg(feature = "enrichment-tables-mmdb")] +pub mod mmdb; + /// Configurable enrichment tables. #[configurable_component] #[derive(Clone, Debug)] @@ -25,6 +28,12 @@ pub enum EnrichmentTables { /// [geoip2]: https://www.maxmind.com/en/geoip2-databases #[cfg(feature = "enrichment-tables-geoip")] Geoip(geoip::GeoipConfig), + + /// Exposes data from a [MaxMind][maxmind] database as an enrichment table. + /// + /// [maxmind]: https://www.maxmind.com/ + #[cfg(feature = "enrichment-tables-mmdb")] + Mmdb(mmdb::MmdbConfig), } // TODO: Use `enum_dispatch` here. @@ -34,6 +43,8 @@ impl NamedComponent for EnrichmentTables { Self::File(config) => config.get_component_name(), #[cfg(feature = "enrichment-tables-geoip")] Self::Geoip(config) => config.get_component_name(), + #[cfg(feature = "enrichment-tables-mmdb")] + Self::Mmdb(config) => config.get_component_name(), #[allow(unreachable_patterns)] _ => unimplemented!(), } diff --git a/tests/data/custom-type.mmdb b/tests/data/custom-type.mmdb new file mode 100644 index 0000000000000000000000000000000000000000..218558d69a8613186962d03805adfdb5ca929656 GIT binary patch literal 2614 zcmZA12Urt#9LDkY5K(dOiQ>jV7%p(*Kmmh$qNX7^LXhMz2?*A$-Fxp=yIZ^W9(E7A z_q24{rQLhneMzh~x##n||M&mnfA>6>2g#6ew3H-chty2Q0x6l?98<6bwv^FJYDG@P z*4PHyVmoY)9k3&I!p_(QyJ9!&jyCb`I28+U8cxRwT7S6^ZEXER? zgKjLvxi}B!%V;evATPv4xEPn>TbbX6+mD<-hVocEF21D`q)TW{luk;NCrhVD=Sim~n$ws$9nZis@hs%QbPoAk zN9W!ugchGko9 zhwZTgcEnED8M|Ot?1tU32ljN7{qLnW%RZQfeX$>=V}Bfg191=z#vwQqGjJFV#}POZ zGtq@vn2kA@i=%Kfj=`}w4)ZV{$KwQ?h?8(KPQj^IfYWd~&cK;ih_i4u7GW`#;2d;g zDbB@tI3MjdWnT+%5iZ6hxD=P+a$JFBxDr?4Y8mSZ9vSuvvd{ElIr`8q!;)c=D=>hS z7{n@EgVh+qu%q+pn;y7E#@a-V5TY_-iKdoNhwEh26B-h`ho7^7awBfS&A0`(%GgG* zZ@N7mbmXUTcBK`Kg}ouOFko4MYO^qGMq{SGG2KW{bz6~OAQ<*mdwfBEq&UyTH!F9v zy<6z3jmE4{dPiq6KRvB9H#^Ujlb`F#apl?*yZ0V&d+Xi7u;1mgLi@|g?PDSho(d}x z^2R(NZ4`Pe zm__b}%z}lbnKP!9_|2#<60C{Yhwdv5)*QM)N>Q~p9H{jM%&3x!!%;{ RBx3rkh~E Date: Thu, 14 Mar 2024 08:28:24 -0700 Subject: [PATCH 0147/1491] fix(docs): Use correct how_it_works section for Vector sink (#20095) It was accidentally pulling the source's. Fixes: #20092 Signed-off-by: Jesse Szwedko --- website/cue/reference/components/sinks/vector.cue | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/cue/reference/components/sinks/vector.cue b/website/cue/reference/components/sinks/vector.cue index 217b44c23222e..3e5c61940c7a4 100644 --- a/website/cue/reference/components/sinks/vector.cue +++ b/website/cue/reference/components/sinks/vector.cue @@ -77,7 +77,7 @@ components: sinks: vector: { configuration: base.components.sinks.vector.configuration - how_it_works: components.sources.vector.how_it_works + how_it_works: components.sinks.vector.how_it_works telemetry: metrics: { protobuf_decode_errors_total: components.sources.internal_metrics.output.metrics.protobuf_decode_errors_total From fafe8c50a4721fa3ddbea34e0641d3c145f14388 Mon Sep 17 00:00:00 2001 From: teslaedison <156734008+teslaedison@users.noreply.github.com> Date: Fri, 15 Mar 2024 02:38:43 +0800 Subject: [PATCH 0148/1491] chore: remove repetitive words (#20091) Signed-off-by: teslaedison --- lib/k8s-e2e-tests/src/lib.rs | 2 +- lib/k8s-e2e-tests/tests/vector-agent.rs | 2 +- lib/vector-buffers/examples/buffer_perf.rs | 2 +- lib/vector-buffers/src/lib.rs | 2 +- rfcs/2020-03-06-1999-api-extensions-for-lua-transform.md | 4 ++-- src/transforms/lua/v2/mod.rs | 2 +- website/cue/reference/components/transforms/base/lua.cue | 2 +- website/cue/reference/releases/0.31.0.cue | 2 +- 8 files changed, 9 insertions(+), 9 deletions(-) diff --git a/lib/k8s-e2e-tests/src/lib.rs b/lib/k8s-e2e-tests/src/lib.rs index 6d5788cfa767c..076d2015f1713 100644 --- a/lib/k8s-e2e-tests/src/lib.rs +++ b/lib/k8s-e2e-tests/src/lib.rs @@ -307,7 +307,7 @@ where Ok(()) } -/// Create a pod for our other pods to have an affinity to to ensure they are all deployed on +/// Create a pod for our other pods to have an affinity to ensure they are all deployed on /// the same node. pub async fn create_affinity_pod( framework: &Framework, diff --git a/lib/k8s-e2e-tests/tests/vector-agent.rs b/lib/k8s-e2e-tests/tests/vector-agent.rs index 0c3c2c221445e..e15b690ed74e6 100644 --- a/lib/k8s-e2e-tests/tests/vector-agent.rs +++ b/lib/k8s-e2e-tests/tests/vector-agent.rs @@ -1614,7 +1614,7 @@ async fn multiple_ns() -> Result<(), Box> { expected_namespaces.insert(name); } - // Create a pod for our other pods to have an affinity to to ensure they are all deployed on + // Create a pod for our other pods to have an affinity to ensure they are all deployed on // the same node. let affinity_ns_name = format!("{}-affinity", pod_namespace); let affinity_ns = framework diff --git a/lib/vector-buffers/examples/buffer_perf.rs b/lib/vector-buffers/examples/buffer_perf.rs index c853ae5dcb946..c571c8b74e7f7 100644 --- a/lib/vector-buffers/examples/buffer_perf.rs +++ b/lib/vector-buffers/examples/buffer_perf.rs @@ -283,7 +283,7 @@ where variant .add_to_builder(&mut builder, Some(data_dir), id) - .expect("should not fail to to add variant to builder"); + .expect("should not fail to add variant to builder"); builder .build(String::from("buffer_perf"), Span::none()) diff --git a/lib/vector-buffers/src/lib.rs b/lib/vector-buffers/src/lib.rs index 0e0460e80d351..d24065e0736bd 100644 --- a/lib/vector-buffers/src/lib.rs +++ b/lib/vector-buffers/src/lib.rs @@ -74,7 +74,7 @@ pub enum WhenFull { impl Arbitrary for WhenFull { fn arbitrary(g: &mut Gen) -> Self { // TODO: We explicitly avoid generating "overflow" as a possible value because nothing yet - // supports handling it, and will be defaulted to to using "block" if they encounter + // supports handling it, and will be defaulted to using "block" if they encounter // "overflow". Thus, there's no reason to emit it here... yet. if bool::arbitrary(g) { WhenFull::Block diff --git a/rfcs/2020-03-06-1999-api-extensions-for-lua-transform.md b/rfcs/2020-03-06-1999-api-extensions-for-lua-transform.md index fbc130122a115..fc2afcc4733a3 100644 --- a/rfcs/2020-03-06-1999-api-extensions-for-lua-transform.md +++ b/rfcs/2020-03-06-1999-api-extensions-for-lua-transform.md @@ -503,7 +503,7 @@ Both log and metrics events are encoded using [external tagging](https://serde.r If a log event is created by the user inside the transform is a table, then, if default fields named according to the [global schema](https://vector.dev/docs/reference/global-options/#log_schema) are not present in such a table, then they are automatically added to the event. This rule does not apply to events having `userdata` type. **Example 1** - > The global schema is configured so that `message_key` is `"message"`, `timestamp_key` is `"timestamp"`, and `host_key` is is `"instance_id"`. + > The global schema is configured so that `message_key` is `"message"`, `timestamp_key` is `"timestamp"`, and `host_key` is `"instance_id"`. > > If a new event is created inside the user-defined Lua code as a table > @@ -652,7 +652,7 @@ The mapping between Vector data types and Lua data types is the following: | [`Timestamp`](https://vector.dev/docs/about/data-model/log/#timestamps) | [`userdata`](https://www.lua.org/pil/28.1.html) | There is no dedicated timestamp type in Lua. However, there is a standard library function [`os.date`](https://www.lua.org/manual/5.1/manual.html#pdf-os.date) which returns a table with fields `year`, `month`, `day`, `hour`, `min`, `sec`, and some others. Other standard library functions, such as [`os.time`](https://www.lua.org/manual/5.1/manual.html#pdf-os.time), support tables with these fields as arguments. Because of that, Vector timestamps passed to the transform are represented as `userdata` with the same set of accessible fields. In order to have one-to-one correspondence between Vector timestamps and Lua timestamps, `os.date` function from the standard library is patched to return not a table, but `userdata` with the same set of fields as it usually would return instead. This approach makes it possible to have both compatibility with the standard library functions and a dedicated data type for timestamps. | | [`Null`](https://vector.dev/docs/about/data-model/log/#null-values) | empty string | In Lua setting a table field to `nil` means deletion of this field. Furthermore, setting an array element to `nil` leads to deletion of this element. In order to avoid inconsistencies, already present `Null` values are visible represented as empty strings from Lua code, and it is impossible to create a new `Null` value in the user-defined code. | | [`Map`](https://vector.dev/docs/about/data-model/log/#maps) | [`userdata`](https://www.lua.org/pil/28.1.html) or [`table`](https://www.lua.org/pil/2.5.html) | Maps which are parts of events passed to the transform from Vector have `userdata` type. User-created maps have `table` type. Both types are converted to Vector's `Map` type when they are emitted from the transform. | -| [`Array`](https://vector.dev/docs/about/data-model/log/#arrays) | [`sequence`](https://www.lua.org/pil/11.1.html) | Sequences in Lua are a special case of tables. Because of that fact, the indexes can in principle start from any number. However, the convention in Lua is to to start indexes from 1 instead of 0, so Vector should adhere it. | +| [`Array`](https://vector.dev/docs/about/data-model/log/#arrays) | [`sequence`](https://www.lua.org/pil/11.1.html) | Sequences in Lua are a special case of tables. Because of that fact, the indexes can in principle start from any number. However, the convention in Lua is to start indexes from 1 instead of 0, so Vector should adhere it. | ### Configuration diff --git a/src/transforms/lua/v2/mod.rs b/src/transforms/lua/v2/mod.rs index 390d2232224eb..358505ac8c92d 100644 --- a/src/transforms/lua/v2/mod.rs +++ b/src/transforms/lua/v2/mod.rs @@ -54,7 +54,7 @@ pub enum BuildError { pub struct LuaConfig { /// The Lua program to initialize the transform with. /// - /// The program can be used to to import external dependencies, as well as define the functions + /// The program can be used to import external dependencies, as well as define the functions /// used for the various lifecycle hooks. However, it's not strictly required, as the lifecycle /// hooks can be configured directly with inline Lua source for each respective hook. #[configurable(metadata( diff --git a/website/cue/reference/components/transforms/base/lua.cue b/website/cue/reference/components/transforms/base/lua.cue index 3530f67267f4a..46549967e2f08 100644 --- a/website/cue/reference/components/transforms/base/lua.cue +++ b/website/cue/reference/components/transforms/base/lua.cue @@ -102,7 +102,7 @@ base: components: transforms: lua: configuration: { description: """ The Lua program to initialize the transform with. - The program can be used to to import external dependencies, as well as define the functions + The program can be used to import external dependencies, as well as define the functions used for the various lifecycle hooks. However, it's not strictly required, as the lifecycle hooks can be configured directly with inline Lua source for each respective hook. """ diff --git a/website/cue/reference/releases/0.31.0.cue b/website/cue/reference/releases/0.31.0.cue index 5b6e208428bf3..da2cae1940d5a 100644 --- a/website/cue/reference/releases/0.31.0.cue +++ b/website/cue/reference/releases/0.31.0.cue @@ -257,7 +257,7 @@ releases: "0.31.0": { type: "fix" scopes: ["http provider"] description: """ - The `Proxy-Authorization` header is now added to to HTTP requests from components + The `Proxy-Authorization` header is now added to HTTP requests from components that support HTTP proxies when authentication is used. """ contributors: ["syedriko"] From 0be97cdae0d97d9ccd9fb2e14501c9dd82fb6e10 Mon Sep 17 00:00:00 2001 From: neuronull Date: Fri, 15 Mar 2024 15:28:19 -0600 Subject: [PATCH 0149/1491] fix(datadog_logs sink): relax required input semantic meanings (#20086) * fix(datadog_logs): relax required input semantic meanings * fix changelog * fix changelog * feedback jesse- add unit tests * feedback bruce- changelog improvements --- changelog.d/dd_logs_semantic_meaning.fix.md | 7 ++ src/sinks/datadog/logs/config.rs | 4 +- src/sinks/datadog/logs/sink.rs | 87 +++++++++++++++++---- 3 files changed, 82 insertions(+), 16 deletions(-) create mode 100644 changelog.d/dd_logs_semantic_meaning.fix.md diff --git a/changelog.d/dd_logs_semantic_meaning.fix.md b/changelog.d/dd_logs_semantic_meaning.fix.md new file mode 100644 index 0000000000000..3700593631c09 --- /dev/null +++ b/changelog.d/dd_logs_semantic_meaning.fix.md @@ -0,0 +1,7 @@ +The `datadog_logs` sink no longer requires a semantic meaning input definition for `message` and `timestamp` fields. + +While the Datadog logs intake does handle these fields if they are present, they aren't required. + +The only impact is that configurations which enable the [Log Namespace](https://vector.dev/blog/log-namespacing/) feature and use a Source input to this sink which does not itself define a semantic meaning for `message` and `timestamp`, no longer need to manually set the semantic meaning for these two fields through a remap transform. + +Existing configurations that utilize the Legacy namespace are unaffected, as are configurations using the Vector namespace where the input source has defined the `message` and `timestamp` semantic meanings. diff --git a/src/sinks/datadog/logs/config.rs b/src/sinks/datadog/logs/config.rs index 376c24f18cf7d..a0aa0856c5b96 100644 --- a/src/sinks/datadog/logs/config.rs +++ b/src/sinks/datadog/logs/config.rs @@ -164,8 +164,8 @@ impl SinkConfig for DatadogLogsConfig { fn input(&self) -> Input { let requirement = schema::Requirement::empty() - .required_meaning(meaning::MESSAGE, Kind::bytes()) - .required_meaning(meaning::TIMESTAMP, Kind::timestamp()) + .optional_meaning(meaning::MESSAGE, Kind::bytes()) + .optional_meaning(meaning::TIMESTAMP, Kind::timestamp()) .optional_meaning(meaning::HOST, Kind::bytes()) .optional_meaning(meaning::SOURCE, Kind::bytes()) .optional_meaning(meaning::SEVERITY, Kind::bytes()) diff --git a/src/sinks/datadog/logs/sink.rs b/src/sinks/datadog/logs/sink.rs index 155be1e1bc825..653bab3681b46 100644 --- a/src/sinks/datadog/logs/sink.rs +++ b/src/sinks/datadog/logs/sink.rs @@ -88,27 +88,27 @@ pub struct LogSink { protocol: String, } +// The Datadog logs intake does not require the fields that are set in this +// function. But if they are present in the event, we normalize the paths +// (and value in the case of timestamp) to something that intake understands. fn normalize_event(event: &mut Event) { let log = event.as_mut_log(); - let message_path = log - .message_path() - .expect("message is required (make sure the \"message\" semantic meaning is set)") - .clone(); - log.rename_key(&message_path, event_path!("message")); + + if let Some(message_path) = log.message_path().cloned().as_ref() { + log.rename_key(message_path, event_path!("message")); + } if let Some(host_path) = log.host_path().cloned().as_ref() { log.rename_key(host_path, event_path!("hostname")); } - let timestamp_path = log - .timestamp_path() - .expect("timestamp is required (make sure the \"timestamp\" semantic meaning is set)") - .clone(); - if let Some(Value::Timestamp(ts)) = log.remove(×tamp_path) { - log.insert( - event_path!("timestamp"), - Value::Integer(ts.timestamp_millis()), - ); + if let Some(timestamp_path) = log.timestamp_path().cloned().as_ref() { + if let Some(Value::Timestamp(ts)) = log.remove(timestamp_path) { + log.insert( + event_path!("timestamp"), + Value::Integer(ts.timestamp_millis()), + ); + } } } @@ -319,3 +319,62 @@ where self.run_inner(input).await } } + +#[cfg(test)] +mod tests { + + use chrono::Utc; + use vector_lib::{ + config::LegacyKey, + event::{Event, LogEvent}, + }; + use vrl::{event_path, owned_value_path, path}; + + use super::normalize_event; + + #[test] + fn normalize_event_doesnt_require() { + let mut log = LogEvent::default(); + log.insert(event_path!("foo"), "bar"); + + let mut event = Event::Log(log); + normalize_event(&mut event); + + let log = event.as_log(); + + assert!(!log.contains(event_path!("message"))); + assert!(!log.contains(event_path!("timestamp"))); + assert!(!log.contains(event_path!("hostname"))); + } + + #[test] + fn normalize_event_normalizes() { + let mut log = LogEvent::from("hello"); + let namespace = log.namespace(); + + namespace.insert_standard_vector_source_metadata(&mut log, "this_source", Utc::now()); + + let legacy_key = Some(owned_value_path!("host")); + let legacy_key = legacy_key.as_ref().map(LegacyKey::Overwrite); + namespace.insert_source_metadata( + "this_source", + &mut log, + legacy_key, + path!("host"), + "the_host", + ); + + let mut event = Event::Log(log); + normalize_event(&mut event); + + let log = event.as_log(); + + assert!(log.contains(event_path!("message"))); + assert!(log.contains(event_path!("timestamp"))); + assert!(log + .get_timestamp() + .expect("should have timestamp") + .is_integer()); + assert!(log.contains(event_path!("hostname"))); + } +} From ad8a8690b7707540dd24a85e8ada8c51bab150fe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 15 Mar 2024 18:02:19 -0600 Subject: [PATCH 0150/1491] chore(deps): Bump tokio-test from 0.4.3 to 0.4.4 (#20101) Bumps [tokio-test](https://github.com/tokio-rs/tokio) from 0.4.3 to 0.4.4. - [Release notes](https://github.com/tokio-rs/tokio/releases) - [Commits](https://github.com/tokio-rs/tokio/compare/tokio-test-0.4.3...tokio-test-0.4.4) --- updated-dependencies: - dependency-name: tokio-test dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- lib/vector-buffers/Cargo.toml | 2 +- lib/vector-core/Cargo.toml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2a4c7bb7f6887..f931c5ceddf84 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9254,9 +9254,9 @@ dependencies = [ [[package]] name = "tokio-test" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e89b3cbabd3ae862100094ae433e1def582cf86451b4e9bf83aa7ac1d8a7d719" +checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7" dependencies = [ "async-stream", "bytes 1.5.0", diff --git a/Cargo.toml b/Cargo.toml index 75f10808c0123..6dcfb97aae3f8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -383,7 +383,7 @@ rstest = {version = "0.18.2"} tempfile = "3.10.1" test-generator = "0.3.1" tokio = { version = "1.36.0", features = ["test-util"] } -tokio-test = "0.4.3" +tokio-test = "0.4.4" tower-test = "0.4.0" vector-lib = { path = "lib/vector-lib", default-features = false, features = ["vrl", "test"] } vrl.workspace = true diff --git a/lib/vector-buffers/Cargo.toml b/lib/vector-buffers/Cargo.toml index 5b6a70bef6a8e..f217d0b48ff0d 100644 --- a/lib/vector-buffers/Cargo.toml +++ b/lib/vector-buffers/Cargo.toml @@ -46,7 +46,7 @@ quickcheck = "1.0" rand = "0.8.5" serde_yaml = { version = "0.9", default-features = false } temp-dir = "0.1.12" -tokio-test = "0.4.3" +tokio-test = "0.4.4" tracing-fluent-assertions = { version = "0.3" } tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt", "registry", "std", "ansi"] } diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index 380286f3319b2..baf8f8dc36ff7 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -83,7 +83,7 @@ quickcheck = "1" quickcheck_macros = "1" proptest = "1.4" similar-asserts = "1.5.0" -tokio-test = "0.4.3" +tokio-test = "0.4.4" toml.workspace = true ndarray = "0.15.6" ndarray-stats = "0.5.1" From 88606447dd9f874f27f06dc17c3e2f0b2083e221 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 08:48:54 -0400 Subject: [PATCH 0151/1491] chore(deps): Bump the aws group with 1 update (#20089) Bumps the aws group with 1 update: [aws-credential-types](https://github.com/smithy-lang/smithy-rs). Updates `aws-credential-types` from 1.1.7 to 1.1.8 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) --- updated-dependencies: - dependency-name: aws-credential-types dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 5 +++-- Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f931c5ceddf84..244a05a94ff71 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -762,9 +762,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "273fa47dafc9ef14c2c074ddddbea4561ff01b7f68d5091c0e9737ced605c01d" +checksum = "fa8587ae17c8e967e4b05a62d495be2fb7701bec52a97f7acfe8a29f938384c8" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -1166,6 +1166,7 @@ dependencies = [ "pin-project-lite", "tokio", "tracing 0.1.40", + "zeroize", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index 6dcfb97aae3f8..a629a8e19d19c 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -191,7 +191,7 @@ aws-sdk-sts = { version = "1.3.1", default-features = false, features = ["behavi aws-types = { version = "1.1.7", default-features = false, optional = true } aws-sigv4 = { version = "1.1.7", default-features = false, features = ["sign-http"], optional = true } aws-config = { version = "1.0.1", default-features = false, features = ["behavior-version-latest", "credentials-process"], optional = true } -aws-credential-types = { version = "1.1.7", default-features = false, features = ["hardcoded-credentials"], optional = true } +aws-credential-types = { version = "1.1.8", default-features = false, features = ["hardcoded-credentials"], optional = true } aws-smithy-http = { version = "0.60", default-features = false, features = ["event-stream"], optional = true } aws-smithy-types = { version = "1.1.8", default-features = false, optional = true } aws-smithy-runtime-api = { version = "1.2.0", default-features = false, optional = true } From 494d7e2a7bff5c7bebb90925b5f451a99e3f0d5c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 08:49:07 -0400 Subject: [PATCH 0152/1491] chore(ci): Bump docker/setup-buildx-action from 3.1.0 to 3.2.0 (#20097) Bumps [docker/setup-buildx-action](https://github.com/docker/setup-buildx-action) from 3.1.0 to 3.2.0. - [Release notes](https://github.com/docker/setup-buildx-action/releases) - [Commits](https://github.com/docker/setup-buildx-action/compare/v3.1.0...v3.2.0) --- updated-dependencies: - dependency-name: docker/setup-buildx-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/environment.yml | 2 +- .github/workflows/publish.yml | 2 +- .github/workflows/regression.yml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/environment.yml b/.github/workflows/environment.yml index 133089d2f41f5..e3d8fe2884409 100644 --- a/.github/workflows/environment.yml +++ b/.github/workflows/environment.yml @@ -43,7 +43,7 @@ jobs: - name: Set up QEMU uses: docker/setup-qemu-action@v3.0.0 - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3.1.0 + uses: docker/setup-buildx-action@v3.2.0 - name: Login to DockerHub uses: docker/login-action@v3 if: github.ref == 'refs/heads/master' diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index f84ff29b56a44..3461d0bdf91a1 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -492,7 +492,7 @@ jobs: platforms: all - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v3.1.0 + uses: docker/setup-buildx-action@v3.2.0 with: version: latest install: true diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 35fce84b6a240..719ca5b114bd7 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -303,7 +303,7 @@ jobs: - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v3.1.0 + uses: docker/setup-buildx-action@v3.2.0 - name: Build 'vector' target image uses: docker/build-push-action@v5.2.0 @@ -341,7 +341,7 @@ jobs: - name: Set up Docker Buildx id: buildx - uses: docker/setup-buildx-action@v3.1.0 + uses: docker/setup-buildx-action@v3.2.0 - name: Build 'vector' target image uses: docker/build-push-action@v5.2.0 From cb4a5e6257508534295dc79c8af2768c7e74284d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 12:49:18 +0000 Subject: [PATCH 0153/1491] chore(ci): Bump docker/build-push-action from 5.2.0 to 5.3.0 (#20098) Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 5.2.0 to 5.3.0. - [Release notes](https://github.com/docker/build-push-action/releases) - [Commits](https://github.com/docker/build-push-action/compare/v5.2.0...v5.3.0) --- updated-dependencies: - dependency-name: docker/build-push-action dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/environment.yml | 2 +- .github/workflows/regression.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/environment.yml b/.github/workflows/environment.yml index e3d8fe2884409..af77216ef4d1d 100644 --- a/.github/workflows/environment.yml +++ b/.github/workflows/environment.yml @@ -64,7 +64,7 @@ jobs: org.opencontainers.image.title=Vector development environment org.opencontainers.image.url=https://github.com/vectordotdev/vector - name: Build and push - uses: docker/build-push-action@v5.2.0 + uses: docker/build-push-action@v5.3.0 with: context: . file: ./scripts/environment/Dockerfile diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 719ca5b114bd7..798bc549f60bd 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -306,7 +306,7 @@ jobs: uses: docker/setup-buildx-action@v3.2.0 - name: Build 'vector' target image - uses: docker/build-push-action@v5.2.0 + uses: docker/build-push-action@v5.3.0 with: context: baseline-vector/ cache-from: type=gha @@ -344,7 +344,7 @@ jobs: uses: docker/setup-buildx-action@v3.2.0 - name: Build 'vector' target image - uses: docker/build-push-action@v5.2.0 + uses: docker/build-push-action@v5.3.0 with: context: comparison-vector/ cache-from: type=gha From 8737b24807ee6b00a20663f951ec0ce53682530e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 12:51:47 +0000 Subject: [PATCH 0154/1491] chore(deps): Bump syn from 2.0.52 to 2.0.53 (#20111) Bumps [syn](https://github.com/dtolnay/syn) from 2.0.52 to 2.0.53. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/2.0.52...2.0.53) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 94 +++++++++++++++++++++++++++--------------------------- 1 file changed, 47 insertions(+), 47 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 244a05a94ff71..a95c45a312437 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "strum 0.25.0", - "syn 2.0.52", + "syn 2.0.53", "thiserror", ] @@ -651,7 +651,7 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -691,7 +691,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -708,7 +708,7 @@ checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -1473,7 +1473,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9990737a6d5740ff51cdbbc0f0503015cb30c390f6623968281eb214a520cfc0" dependencies = [ "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -1603,7 +1603,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", "syn_derive", ] @@ -2035,7 +2035,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -2563,7 +2563,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -2635,7 +2635,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "strsim 0.10.0", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -2668,7 +2668,7 @@ checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ "darling_core 0.20.8", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -2773,7 +2773,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -3054,7 +3054,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -3066,7 +3066,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -3086,7 +3086,7 @@ checksum = "5c785274071b1b420972453b306eeca06acf4633829db4223b58a2a8c5953bc4" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -3488,7 +3488,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -5279,7 +5279,7 @@ checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -5399,7 +5399,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "regex", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -5849,7 +5849,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -5861,7 +5861,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -6049,7 +6049,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -6334,7 +6334,7 @@ dependencies = [ "pest_meta", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -6422,7 +6422,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -6699,7 +6699,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ "proc-macro2 1.0.79", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -6902,7 +6902,7 @@ dependencies = [ "prost 0.12.3", "prost-types 0.12.3", "regex", - "syn 2.0.52", + "syn 2.0.53", "tempfile", "which 4.4.2", ] @@ -6930,7 +6930,7 @@ dependencies = [ "itertools 0.11.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -7704,7 +7704,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.0", - "syn 2.0.52", + "syn 2.0.53", "unicode-ident", ] @@ -8153,7 +8153,7 @@ checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -8164,7 +8164,7 @@ checksum = "330f01ce65a3a5fe59a60c82f3c9a024b573b8a6e875bd233fe5f934e71d54e3" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -8226,7 +8226,7 @@ checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -8299,7 +8299,7 @@ dependencies = [ "darling 0.20.8", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -8578,7 +8578,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -8768,7 +8768,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "rustversion", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -8781,7 +8781,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "rustversion", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -8824,9 +8824,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.52" +version = "2.0.53" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b699d15b36d1f02c3e7c69f8ffef53de37aefae075d8488d4ba1a7788d574a07" +checksum = "7383cd0e49fff4b6b90ca5670bfd3e9d6a733b3f90c686605aa7eec8c4996032" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", @@ -8842,7 +8842,7 @@ dependencies = [ "proc-macro-error", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -9010,7 +9010,7 @@ checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -9158,7 +9158,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -9405,7 +9405,7 @@ dependencies = [ "proc-macro2 1.0.79", "prost-build 0.12.3", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -9508,7 +9508,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -9742,7 +9742,7 @@ checksum = "f03ca4cb38206e2bef0700092660bb74d696f808514dae47fa1467cbfe26e96e" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -9772,7 +9772,7 @@ checksum = "ac73887f47b9312552aa90ef477927ff014d63d1920ca8037c6c1951eab64bb1" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] @@ -10332,7 +10332,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_json", - "syn 2.0.52", + "syn 2.0.53", "tracing 0.1.40", ] @@ -10345,7 +10345,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_derive_internals", - "syn 2.0.52", + "syn 2.0.53", "vector-config", "vector-config-common", ] @@ -10760,7 +10760,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", "wasm-bindgen-shared", ] @@ -10794,7 +10794,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11249,7 +11249,7 @@ checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.52", + "syn 2.0.53", ] [[package]] From 7e3e60fa447eab3b73f27e2c98ed1f2c4d19fe94 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 12:51:57 +0000 Subject: [PATCH 0155/1491] chore(deps): Bump os_info from 3.8.0 to 3.8.1 (#20112) Bumps [os_info](https://github.com/stanislav-tkach/os_info) from 3.8.0 to 3.8.1. - [Release notes](https://github.com/stanislav-tkach/os_info/releases) - [Changelog](https://github.com/stanislav-tkach/os_info/blob/master/CHANGELOG.md) - [Commits](https://github.com/stanislav-tkach/os_info/compare/v3.8.0...v3.8.1) --- updated-dependencies: - dependency-name: os_info dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- vdev/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a95c45a312437..b369bc390cfe5 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6132,9 +6132,9 @@ dependencies = [ [[package]] name = "os_info" -version = "3.8.0" +version = "3.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52a07930afc1bd77ac9e1101dc18d3fc4986c6568e939c31d1c26657eb0ccbf5" +checksum = "6cbb46d5d01695d7a1fb8be5f0d1968bd2b2b8ba1d1b3e7062ce2a0593e57af1" dependencies = [ "log", "windows-sys 0.52.0", diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index 5690043c0261f..d4d14ebae27bb 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -25,7 +25,7 @@ indicatif = { version = "0.17.8", features = ["improved_unicode"] } itertools = "0.12.1" log = "0.4.21" once_cell = "1.19" -os_info = { version = "3.8.0", default-features = false } +os_info = { version = "3.8.1", default-features = false } # watch https://github.com/epage/anstyle for official interop with Clap owo-colors = { version = "4.0.0", features = ["supports-colors"] } paste = "1.0.14" From 068b19918fd723e26b9fc5c6de289493d9ad55de Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 12:52:13 +0000 Subject: [PATCH 0156/1491] chore(deps): Bump async-recursion from 1.0.5 to 1.1.0 (#20114) Bumps [async-recursion](https://github.com/dcchut/async-recursion) from 1.0.5 to 1.1.0. - [Release notes](https://github.com/dcchut/async-recursion/releases) - [Commits](https://github.com/dcchut/async-recursion/compare/v1.0.5...v1.1.0) --- updated-dependencies: - dependency-name: async-recursion dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- lib/vector-buffers/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b369bc390cfe5..c6db411b3345a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -645,9 +645,9 @@ dependencies = [ [[package]] name = "async-recursion" -version = "1.0.5" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" +checksum = "30c5ef0ede93efbf733c1a727f3b6b5a1060bbedd5600183e66f6e4be4af0ec5" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", diff --git a/lib/vector-buffers/Cargo.toml b/lib/vector-buffers/Cargo.toml index f217d0b48ff0d..cb9f81c4e4d67 100644 --- a/lib/vector-buffers/Cargo.toml +++ b/lib/vector-buffers/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" publish = false [dependencies] -async-recursion = "1.0.5" +async-recursion = "1.1.0" async-stream = "0.3.5" async-trait = { version = "0.1", default-features = false } bytecheck = { version = "0.6.9", default-features = false, features = ["std"] } From a1902c2897c23e40d18dc96df333461c0f65ef4a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 12:52:23 +0000 Subject: [PATCH 0157/1491] chore(deps): Bump async-trait from 0.1.77 to 0.1.78 (#20115) Bumps [async-trait](https://github.com/dtolnay/async-trait) from 0.1.77 to 0.1.78. - [Release notes](https://github.com/dtolnay/async-trait/releases) - [Commits](https://github.com/dtolnay/async-trait/compare/0.1.77...0.1.78) --- updated-dependencies: - dependency-name: async-trait dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c6db411b3345a..9dedc0ce9d93a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -702,9 +702,9 @@ checksum = "b4eb2cdb97421e01129ccb49169d8279ed21e829929144f4a22a6e54ac549ca1" [[package]] name = "async-trait" -version = "0.1.77" +version = "0.1.78" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c980ee35e870bd1a4d2c8294d4c04d0499e67bca1e4b5cefcc693c2fa00caea9" +checksum = "461abc97219de0eaaf81fe3ef974a540158f3d079c2ab200f891f1a2ef201e85" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", diff --git a/Cargo.toml b/Cargo.toml index a629a8e19d19c..58ee6671b6074 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -156,7 +156,7 @@ loki-logproto = { path = "lib/loki-logproto", optional = true } # Tokio / Futures async-stream = { version = "0.3.5", default-features = false } -async-trait = { version = "0.1.77", default-features = false } +async-trait = { version = "0.1.78", default-features = false } futures = { version = "0.3.30", default-features = false, features = ["compat", "io-compat"], package = "futures" } tokio = { version = "1.36.0", default-features = false, features = ["full"] } tokio-openssl = { version = "0.6.4", default-features = false } From 3e8c6a48451233fb7b60b4ca0a5139986745f80e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 12:52:32 +0000 Subject: [PATCH 0158/1491] chore(deps): Bump serde_yaml from 0.9.32 to 0.9.33 (#20116) Bumps [serde_yaml](https://github.com/dtolnay/serde-yaml) from 0.9.32 to 0.9.33. - [Release notes](https://github.com/dtolnay/serde-yaml/releases) - [Commits](https://github.com/dtolnay/serde-yaml/compare/0.9.32...0.9.33) --- updated-dependencies: - dependency-name: serde_yaml dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 18 +++++++++--------- Cargo.toml | 2 +- lib/vector-core/Cargo.toml | 2 +- vdev/Cargo.toml | 2 +- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 9dedc0ce9d93a..b1ad819f92d0e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4833,7 +4833,7 @@ dependencies = [ "secrecy", "serde", "serde_json", - "serde_yaml 0.9.32", + "serde_yaml 0.9.33", "thiserror", "tokio", "tokio-util", @@ -8316,9 +8316,9 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.9.32" +version = "0.9.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8fd075d994154d4a774f95b51fb96bdc2832b0ea48425c92546073816cda1f2f" +checksum = "a0623d197252096520c6f2a5e1171ee436e5af99a5d7caa2891e55e61950e6d9" dependencies = [ "indexmap 2.2.5", "itoa", @@ -9885,9 +9885,9 @@ dependencies = [ [[package]] name = "unsafe-libyaml" -version = "0.2.10" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ab4c90930b95a82d00dc9e9ac071b4991924390d46cbd0dfe566148667605e4b" +checksum = "673aac59facbab8a9007c7f6108d11f63b603f7cabff99fabf650fea5c32b861" [[package]] name = "untrusted" @@ -9993,7 +9993,7 @@ dependencies = [ "reqwest", "serde", "serde_json", - "serde_yaml 0.9.32", + "serde_yaml 0.9.33", "sha2", "tempfile", "toml", @@ -10150,7 +10150,7 @@ dependencies = [ "serde_bytes", "serde_json", "serde_with 3.7.0", - "serde_yaml 0.9.32", + "serde_yaml 0.9.33", "sha2", "similar-asserts", "smallvec", @@ -10247,7 +10247,7 @@ dependencies = [ "rand 0.8.5", "rkyv", "serde", - "serde_yaml 0.9.32", + "serde_yaml 0.9.33", "snafu 0.7.5", "temp-dir", "tokio", @@ -10405,7 +10405,7 @@ dependencies = [ "serde", "serde_json", "serde_with 3.7.0", - "serde_yaml 0.9.32", + "serde_yaml 0.9.33", "similar-asserts", "smallvec", "snafu 0.7.5", diff --git a/Cargo.toml b/Cargo.toml index 58ee6671b6074..ffefc0df7b34a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -216,7 +216,7 @@ serde-toml-merge = { version = "0.3.5", default-features = false } serde_bytes = { version = "0.11.14", default-features = false, features = ["std"], optional = true } serde_json.workspace = true serde_with = { version = "3.7.0", default-features = false, features = ["macros", "std"] } -serde_yaml = { version = "0.9.32", default-features = false } +serde_yaml = { version = "0.9.33", default-features = false } # Messagepack rmp-serde = { version = "1.1.2", default-features = false, optional = true } diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index baf8f8dc36ff7..fccd51ff78d10 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -90,7 +90,7 @@ ndarray-stats = "0.5.1" noisy_float = "0.2.0" rand = "0.8.5" rand_distr = "0.4.3" -serde_yaml = { version = "0.9.32", default-features = false } +serde_yaml = { version = "0.9.33", default-features = false } tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt", "ansi", "registry"] } vector-common = { path = "../vector-common", default-features = false, features = ["test"] } diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index d4d14ebae27bb..580673c189c30 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -33,7 +33,7 @@ regex = { version = "1.10.3", default-features = false, features = ["std", "perf reqwest = { version = "0.11", features = ["json", "blocking"] } serde.workspace = true serde_json.workspace = true -serde_yaml = "0.9.32" +serde_yaml = "0.9.33" sha2 = "0.10.8" tempfile = "3.10.1" toml.workspace = true From 5c33628279443068365616783b6a2d5466e8a548 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 12:52:46 +0000 Subject: [PATCH 0159/1491] chore(deps): Bump mongodb from 2.8.1 to 2.8.2 (#20117) Bumps [mongodb](https://github.com/mongodb/mongo-rust-driver) from 2.8.1 to 2.8.2. - [Release notes](https://github.com/mongodb/mongo-rust-driver/releases) - [Commits](https://github.com/mongodb/mongo-rust-driver/compare/v2.8.1...v2.8.2) --- updated-dependencies: - dependency-name: mongodb dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b1ad819f92d0e..e44aa6f8553e4 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5410,9 +5410,9 @@ checksum = "9366861eb2a2c436c20b12c8dbec5f798cea6b47ad99216be0282942e2c81ea0" [[package]] name = "mongodb" -version = "2.8.1" +version = "2.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de59562e5c71656c098d8e966641b31da87b89dc3dcb6e761d3b37dcdfa0cb72" +checksum = "ef206acb1b72389b49bc9985efe7eb1f8a9bb18e5680d262fac26c07f44025f1" dependencies = [ "async-trait", "base64 0.13.1", diff --git a/Cargo.toml b/Cargo.toml index ffefc0df7b34a..aef32151c7e49 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -302,7 +302,7 @@ logfmt = { version = "0.0.2", default-features = false, optional = true } lru = { version = "0.12.3", default-features = false, optional = true } maxminddb = { version = "0.24.0", default-features = false, optional = true } md-5 = { version = "0.10", default-features = false, optional = true } -mongodb = { version = "2.8.1", default-features = false, features = ["tokio-runtime"], optional = true } +mongodb = { version = "2.8.2", default-features = false, features = ["tokio-runtime"], optional = true } async-nats = { version = "0.33.0", default-features = false, optional = true } nkeys = { version = "0.4.0", default-features = false, optional = true } nom = { version = "7.1.3", default-features = false, optional = true } From 7c9b4c59c06a49c46e1f0f84faa6114dcce5c642 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 13:26:17 +0000 Subject: [PATCH 0160/1491] chore(deps): Bump the clap group with 1 update (#20108) * chore(deps): Bump the clap group with 1 update Bumps the clap group with 1 update: [clap](https://github.com/clap-rs/clap). Updates `clap` from 4.5.2 to 4.5.3 - [Release notes](https://github.com/clap-rs/clap/releases) - [Changelog](https://github.com/clap-rs/clap/blob/master/CHANGELOG.md) - [Commits](https://github.com/clap-rs/clap/compare/v4.5.2...v4.5.3) --- updated-dependencies: - dependency-name: clap dependency-type: direct:production update-type: version-update:semver-patch dependency-group: clap ... Signed-off-by: dependabot[bot] * regenerate licenses Signed-off-by: Jesse Szwedko --------- Signed-off-by: dependabot[bot] Signed-off-by: Jesse Szwedko Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jesse Szwedko --- Cargo.lock | 36 +++++++++++++++++++++--------------- Cargo.toml | 2 +- LICENSE-3rdparty.csv | 1 + 3 files changed, 23 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e44aa6f8553e4..23f7c14cc4249 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1986,9 +1986,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.2" +version = "4.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b230ab84b0ffdf890d5a10abdbc8b83ae1c4918275daea1ab8801f71536b2651" +checksum = "949626d00e063efc93b6dca932419ceb5432f99769911c0b995f7e884c778813" dependencies = [ "clap_builder", "clap_derive", @@ -2000,7 +2000,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb9b20c0dd58e4c2e991c8d203bbeb76c11304d1011659686b5b644bc29aa478" dependencies = [ - "clap 4.5.2", + "clap 4.5.3", "log", ] @@ -2023,16 +2023,16 @@ version = "4.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "885e4d7d5af40bfb99ae6f9433e292feac98d452dcb3ec3d25dfe7552b77da8c" dependencies = [ - "clap 4.5.2", + "clap 4.5.3", ] [[package]] name = "clap_derive" -version = "4.5.0" +version = "4.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "307bc0538d5f0f83b8248db3087aa92fe504e4691294d0c96c0eabc33f47ba47" +checksum = "90239a040c80f5e14809ca132ddc4176ab33d5e17e49691793296e3fcb34d72f" dependencies = [ - "heck 0.4.1", + "heck 0.5.0", "proc-macro2 1.0.79", "quote 1.0.35", "syn 2.0.53", @@ -2360,7 +2360,7 @@ dependencies = [ "anes", "cast", "ciborium", - "clap 4.5.2", + "clap 4.5.3", "criterion-plot", "futures 0.3.30", "is-terminal", @@ -3870,6 +3870,12 @@ version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" +[[package]] +name = "heck" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea" + [[package]] name = "heim" version = "0.1.0-rc.1" @@ -9973,7 +9979,7 @@ dependencies = [ "anyhow", "cached", "chrono", - "clap 4.5.2", + "clap 4.5.3", "clap-verbosity-flag", "clap_complete", "confy", @@ -10051,7 +10057,7 @@ dependencies = [ "chrono", "chrono-tz", "cidr-utils 0.6.1", - "clap 4.5.2", + "clap 4.5.3", "colored", "console-subscriber", "criterion", @@ -10202,7 +10208,7 @@ dependencies = [ "anyhow", "async-trait", "chrono", - "clap 4.5.2", + "clap 4.5.3", "futures 0.3.30", "graphql_client", "indoc", @@ -10225,7 +10231,7 @@ dependencies = [ "async-trait", "bytecheck", "bytes 1.5.0", - "clap 4.5.2", + "clap 4.5.3", "crc32fast", "criterion", "crossbeam-queue", @@ -10485,7 +10491,7 @@ dependencies = [ name = "vector-vrl-cli" version = "0.1.0" dependencies = [ - "clap 4.5.2", + "clap 4.5.3", "vector-vrl-functions", "vrl", ] @@ -10504,7 +10510,7 @@ dependencies = [ "ansi_term", "chrono", "chrono-tz", - "clap 4.5.2", + "clap 4.5.3", "enrichment", "glob", "prettydiff", @@ -10564,7 +10570,7 @@ dependencies = [ "chrono", "chrono-tz", "cidr-utils 0.6.1", - "clap 4.5.2", + "clap 4.5.3", "codespan-reporting", "community-id", "crypto_secretbox", diff --git a/Cargo.toml b/Cargo.toml index aef32151c7e49..e951a6c60f376 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -130,7 +130,7 @@ members = [ [workspace.dependencies] chrono = { version = "0.4.34", default-features = false, features = ["clock", "serde"] } -clap = { version = "4.5.2", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } +clap = { version = "4.5.3", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } indexmap = { version = "2.2.5", default-features = false, features = ["serde", "std"] } pin-project = { version = "1.1.5", default-features = false } proptest = "1.4" diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 52eaaa6b918b2..50204bc3d070b 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -239,6 +239,7 @@ h2,https://github.com/hyperium/h2,MIT,"Carl Lerche , Sean McA hash_hasher,https://github.com/Fraser999/Hash-Hasher,Apache-2.0 OR MIT,Fraser Hutchison hashbrown,https://github.com/rust-lang/hashbrown,MIT OR Apache-2.0,Amanieu d'Antras headers,https://github.com/hyperium/headers,MIT,Sean McArthur +heck,https://github.com/withoutboats/heck,MIT OR Apache-2.0,The heck Authors heck,https://github.com/withoutboats/heck,MIT OR Apache-2.0,Without Boats heim,https://github.com/heim-rs/heim,Apache-2.0 OR MIT,svartalf hermit-abi,https://github.com/hermitcore/hermit-rs,MIT OR Apache-2.0,Stefan Lankes From 4c7becebe8ec38f2a60d25a97bafa3d6c9a12fd7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 15:16:11 +0000 Subject: [PATCH 0161/1491] chore(deps): Bump tokio-stream from 0.1.14 to 0.1.15 (#20100) Bumps [tokio-stream](https://github.com/tokio-rs/tokio) from 0.1.14 to 0.1.15. - [Release notes](https://github.com/tokio-rs/tokio/releases) - [Commits](https://github.com/tokio-rs/tokio/compare/tokio-stream-0.1.14...tokio-stream-0.1.15) --- updated-dependencies: - dependency-name: tokio-stream dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- lib/vector-api-client/Cargo.toml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 23f7c14cc4249..70b2b76cf0bbb 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9249,9 +9249,9 @@ dependencies = [ [[package]] name = "tokio-stream" -version = "0.1.14" +version = "0.1.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "397c988d37662c7dda6d2208364a706264bf3d6138b11d436cbac0ad38832842" +checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" dependencies = [ "futures-core", "pin-project-lite", diff --git a/Cargo.toml b/Cargo.toml index e951a6c60f376..08a68b66caaf1 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -160,7 +160,7 @@ async-trait = { version = "0.1.78", default-features = false } futures = { version = "0.3.30", default-features = false, features = ["compat", "io-compat"], package = "futures" } tokio = { version = "1.36.0", default-features = false, features = ["full"] } tokio-openssl = { version = "0.6.4", default-features = false } -tokio-stream = { version = "0.1.14", default-features = false, features = ["net", "sync", "time"] } +tokio-stream = { version = "0.1.15", default-features = false, features = ["net", "sync", "time"] } tokio-util = { version = "0.7", default-features = false, features = ["io", "time"] } console-subscriber = { version = "0.2.0", default-features = false, optional = true } diff --git a/lib/vector-api-client/Cargo.toml b/lib/vector-api-client/Cargo.toml index 9231f2970c4e0..1b658944f1e3f 100644 --- a/lib/vector-api-client/Cargo.toml +++ b/lib/vector-api-client/Cargo.toml @@ -19,7 +19,7 @@ anyhow = { version = "1.0.81", default-features = false, features = ["std"] } async-trait = { version = "0.1", default-features = false } futures = { version = "0.3", default-features = false, features = ["compat", "io-compat"] } tokio = { version = "1.36.0", default-features = false, features = ["macros", "rt", "sync"] } -tokio-stream = { version = "0.1.14", default-features = false, features = ["sync"] } +tokio-stream = { version = "0.1.15", default-features = false, features = ["sync"] } # GraphQL graphql_client = { version = "0.13.0", default-features = false, features = ["graphql_query_derive"] } From 5e7248cfaa787126cb7654e0523d6ced8c06f245 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Mon, 18 Mar 2024 15:24:16 -0400 Subject: [PATCH 0162/1491] enhancement(remap): do not filter out file contents from error logs (#20125) * Revert "fix(remap): filter out file contents from error logs (#19356)" This reverts commit 2ad7097b10112f1bd086d6a58c3bce47eb5652ae. * Add changelog entry Signed-off-by: Jesse Szwedko --------- Signed-off-by: Jesse Szwedko --- .../remove_remap_file_filter.enhancement.md | 2 + src/transforms/remap.rs | 46 ++----------------- 2 files changed, 6 insertions(+), 42 deletions(-) create mode 100644 changelog.d/remove_remap_file_filter.enhancement.md diff --git a/changelog.d/remove_remap_file_filter.enhancement.md b/changelog.d/remove_remap_file_filter.enhancement.md new file mode 100644 index 0000000000000..495920e7c6b0e --- /dev/null +++ b/changelog.d/remove_remap_file_filter.enhancement.md @@ -0,0 +1,2 @@ +The `remap` component no longer filters out the file contents from error messages when the VRL +program is passed in via the `file` option. diff --git a/src/transforms/remap.rs b/src/transforms/remap.rs index d0fe0bbccd49f..487174bf24b55 100644 --- a/src/transforms/remap.rs +++ b/src/transforms/remap.rs @@ -18,7 +18,7 @@ use vector_vrl_functions::set_semantic_meaning::MeaningList; use vrl::compiler::runtime::{Runtime, Terminate}; use vrl::compiler::state::ExternalEnv; use vrl::compiler::{CompileConfig, ExpressionError, Function, Program, TypeState, VrlRuntime}; -use vrl::diagnostic::{DiagnosticList, DiagnosticMessage, Formatter, Note}; +use vrl::diagnostic::{DiagnosticMessage, Formatter, Note}; use vrl::path; use vrl::path::ValuePath; use vrl::value::{Kind, Value}; @@ -135,25 +135,6 @@ pub struct RemapConfig { pub runtime: VrlRuntime, } -/// The propagated errors should not contain file contents to prevent exposing sensitive data. -fn redacted_diagnostics(source: &str, diagnostics: DiagnosticList) -> String { - let placeholder = '*'; - // The formatter depends on whitespaces. - let redacted_source: String = source - .chars() - .map(|c| if c.is_whitespace() { c } else { placeholder }) - .collect(); - // Remove placeholder chars to hide the content length. - format!( - "{}{}", - "File contents were redacted.", - Formatter::new(&redacted_source, diagnostics) - .colored() - .to_string() - .replace(placeholder, " ") - ) -} - impl RemapConfig { fn compile_vrl_program( &self, @@ -192,12 +173,11 @@ impl RemapConfig { config.set_custom(MeaningList::default()); compile_vrl(&source, &functions, &state, config) - .map_err(|diagnostics| match self.file { - None => Formatter::new(&source, diagnostics) + .map_err(|diagnostics| { + Formatter::new(&source, diagnostics) .colored() .to_string() - .into(), - Some(_) => redacted_diagnostics(&source, diagnostics).into(), + .into() }) .map(|result| { ( @@ -625,7 +605,6 @@ pub enum BuildError { #[cfg(test)] mod tests { use std::collections::{HashMap, HashSet}; - use std::io::Write; use std::sync::Arc; use indoc::{formatdoc, indoc}; @@ -649,7 +628,6 @@ mod tests { transforms::OutputBuffer, }; use chrono::DateTime; - use tempfile::NamedTempFile; use tokio::sync::mpsc; use tokio_stream::wrappers::ReceiverStream; use vector_lib::enrichment::TableRegistry; @@ -2021,20 +1999,4 @@ mod tests { fn do_not_emit_metrics_when_errored() { assert_no_metrics("parse_key_value!(.message)".to_string()); } - - #[test] - fn redact_file_contents_from_diagnostics() { - let mut tmp_file = NamedTempFile::new().expect("Failed to create temporary file"); - tmp_file - .write_all(b"password: top secret") - .expect("Failed to write to temporary file"); - - let config = RemapConfig { - file: Some(tmp_file.path().to_path_buf()), - ..Default::default() - }; - let config_error = remap(config).unwrap_err().to_string(); - assert!(config_error.contains("File contents were redacted.")); - assert!(!config_error.contains("top secret")); - } } From 12c1866214e55869275afa5fc0741f2af8baa0fd Mon Sep 17 00:00:00 2001 From: neuronull Date: Mon, 18 Mar 2024 15:01:37 -0600 Subject: [PATCH 0163/1491] chore(testing): further adjustments to component validation framework (#20043) * chore(testing): further adjustments to component validation framework * feedback bruce- re-use detect json * feedback bruce- simplify --- src/components/validation/resources/event.rs | 3 +- src/components/validation/resources/http.rs | 35 ++++++++++++++++++-- src/components/validation/runner/mod.rs | 23 ++++++++++++- 3 files changed, 56 insertions(+), 5 deletions(-) diff --git a/src/components/validation/resources/event.rs b/src/components/validation/resources/event.rs index 4c03e3b4eed56..6d4ce818225ab 100644 --- a/src/components/validation/resources/event.rs +++ b/src/components/validation/resources/event.rs @@ -2,6 +2,7 @@ use std::collections::HashMap; use bytes::BytesMut; use serde::Deserialize; +use serde_json::Value; use snafu::Snafu; use tokio_util::codec::Encoder as _; @@ -43,7 +44,7 @@ pub enum EventData { /// A simple log event. Log(String), /// A log event built from key-value pairs - LogBuilder(HashMap), + LogBuilder(HashMap), } impl EventData { diff --git a/src/components/validation/resources/http.rs b/src/components/validation/resources/http.rs index 4932a2ff456c9..36f3a67e36619 100644 --- a/src/components/validation/resources/http.rs +++ b/src/components/validation/resources/http.rs @@ -11,7 +11,7 @@ use axum::{ routing::{MethodFilter, MethodRouter}, Router, }; -use bytes::BytesMut; +use bytes::{BufMut as _, BytesMut}; use http::{Method, Request, StatusCode, Uri}; use hyper::{Body, Client, Server}; use tokio::{ @@ -24,7 +24,10 @@ use crate::components::validation::{ sync::{Configuring, TaskCoordinator}, RunnerMetrics, }; -use vector_lib::{event::Event, EstimatedJsonEncodedSizeOf}; +use vector_lib::{ + codecs::encoding::Framer, codecs::encoding::Serializer::Json, + codecs::CharacterDelimitedEncoder, event::Event, EstimatedJsonEncodedSizeOf, +}; use super::{encode_test_event, ResourceCodec, ResourceDirection, TestEvent}; @@ -65,7 +68,7 @@ impl HttpResourceConfig { } // We'll push data to the source. ResourceDirection::Push => { - spawn_input_http_client(self, codec, input_rx, task_coordinator) + spawn_input_http_client(self, codec, input_rx, task_coordinator, runner_metrics) } } } @@ -213,12 +216,14 @@ fn spawn_input_http_client( codec: ResourceCodec, mut input_rx: mpsc::Receiver, task_coordinator: &TaskCoordinator, + runner_metrics: &Arc>, ) { // Spin up an HTTP client that will push the input data to the source on a // request-per-input-item basis. This runs serially and has no parallelism. let started = task_coordinator.track_started(); let completed = task_coordinator.track_completed(); let mut encoder = codec.into_encoder(); + let runner_metrics = Arc::clone(runner_metrics); tokio::spawn(async move { // Mark ourselves as started. We don't actually do anything until we get our first input @@ -235,8 +240,32 @@ fn spawn_input_http_client( debug!("Got event to send from runner."); let mut buffer = BytesMut::new(); + + let is_json = matches!(encoder.serializer(), Json(_)) + && matches!( + encoder.framer(), + Framer::CharacterDelimited(CharacterDelimitedEncoder { delimiter: b',' }) + ); + + if is_json { + buffer.put_u8(b'['); + } + encode_test_event(&mut encoder, &mut buffer, event); + if is_json { + if !buffer.is_empty() { + // remove trailing comma from last record + buffer.truncate(buffer.len() - 1); + } + buffer.put_u8(b']'); + + // in this edge case we have removed the trailing comma (one byte) and added + // opening and closing braces (2 bytes) for a net add of one byte. + let mut runner_metrics = runner_metrics.lock().await; + runner_metrics.sent_bytes_total += 1; + } + let mut request_builder = Request::builder() .uri(request_uri.clone()) .method(request_method.clone()); diff --git a/src/components/validation/runner/mod.rs b/src/components/validation/runner/mod.rs index 704444ded4c21..5df689267a8a7 100644 --- a/src/components/validation/runner/mod.rs +++ b/src/components/validation/runner/mod.rs @@ -593,7 +593,7 @@ fn spawn_input_driver( } } - let (failure_case, event) = input_event.clone().get(); + let (failure_case, mut event) = input_event.clone().get(); if let Some(encoder) = maybe_encoder.as_mut() { let mut buffer = BytesMut::new(); @@ -614,6 +614,27 @@ fn spawn_input_driver( if !failure_case || component_type == ComponentType::Sink { input_runner_metrics.sent_events_total += 1; + // Convert unix timestamp in input events to the Datetime string. + // This is necessary when a source expects the incoming event to have a + // unix timestamp but we convert it into a datetime string in the source. + // For example, the `datadog_agent` source. This only takes effect when + // the test case YAML file defining the event, constructs it with the log + // builder variant, and specifies an integer in milliseconds for the timestamp. + if component_type == ComponentType::Source { + if let Event::Log(ref mut log) = event { + if let Some(ts) = log.remove_timestamp() { + let ts = match ts.as_integer() { + Some(ts) => chrono::DateTime::from_timestamp_millis(ts) + .expect(&format!("invalid timestamp in input test event {ts}")) + .into(), + None => ts, + }; + log.parse_path_and_insert("timestamp", ts) + .expect("failed to insert timestamp"); + } + } + } + // This particular metric is tricky because a component can run the // EstimatedJsonSizeOf calculation on a single event or an array of // events. If it's an array of events, the size calculation includes From 80f63bb6b52561ae4a9f98783ae98472c0798845 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 18 Mar 2024 21:24:11 +0000 Subject: [PATCH 0164/1491] chore(deps): Bump the graphql group with 2 updates (#20107) * chore(deps): Bump the graphql group with 2 updates Bumps the graphql group with 2 updates: [async-graphql](https://github.com/async-graphql/async-graphql) and [async-graphql-warp](https://github.com/async-graphql/async-graphql). Updates `async-graphql` from 7.0.1 to 7.0.3 - [Release notes](https://github.com/async-graphql/async-graphql/releases) - [Changelog](https://github.com/async-graphql/async-graphql/blob/master/CHANGELOG.md) - [Commits](https://github.com/async-graphql/async-graphql/commits) Updates `async-graphql-warp` from 7.0.1 to 7.0.3 - [Release notes](https://github.com/async-graphql/async-graphql/releases) - [Changelog](https://github.com/async-graphql/async-graphql/blob/master/CHANGELOG.md) - [Commits](https://github.com/async-graphql/async-graphql/commits) --- updated-dependencies: - dependency-name: async-graphql dependency-type: direct:production update-type: version-update:semver-patch dependency-group: graphql - dependency-name: async-graphql-warp dependency-type: direct:production update-type: version-update:semver-patch dependency-group: graphql ... Signed-off-by: dependabot[bot] * Bump msrv Signed-off-by: Jesse Szwedko --------- Signed-off-by: dependabot[bot] Signed-off-by: Jesse Szwedko Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jesse Szwedko --- Cargo.lock | 27 ++++++++++++--------------- Cargo.toml | 6 +++--- lib/vector-core/Cargo.toml | 2 +- 3 files changed, 16 insertions(+), 19 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 70b2b76cf0bbb..33a195ef6d25d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -426,16 +426,16 @@ dependencies = [ [[package]] name = "async-graphql" -version = "7.0.1" +version = "7.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b16926f97f683ff3b47b035cc79622f3d6a374730b07a5d9051e81e88b5f1904" +checksum = "261fa27d5bff5afdf7beff291b3bc73f99d1529804c70e51b0fbc51e70b1c6a9" dependencies = [ "async-graphql-derive", "async-graphql-parser", "async-graphql-value", "async-stream", "async-trait", - "base64 0.13.1", + "base64 0.21.7", "bytes 1.5.0", "chrono", "fnv", @@ -457,9 +457,9 @@ dependencies = [ [[package]] name = "async-graphql-derive" -version = "7.0.1" +version = "7.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a7349168b79030e3172a620f4f0e0062268a954604e41475eff082380fe505" +checksum = "3188809947798ea6db736715a60cf645ba3b87ea031c710130e1476b48e45967" dependencies = [ "Inflector", "async-graphql-parser", @@ -467,16 +467,16 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.79", "quote 1.0.35", - "strum 0.25.0", + "strum 0.26.1", "syn 2.0.53", "thiserror", ] [[package]] name = "async-graphql-parser" -version = "7.0.1" +version = "7.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58fdc0adf9f53c2b65bb0ff5170cba1912299f248d0e48266f444b6f005deb1d" +checksum = "d4e65a0b83027f35b2a5d9728a098bc66ac394caa8191d2c65ed9eb2985cf3d8" dependencies = [ "async-graphql-value", "pest", @@ -486,9 +486,9 @@ dependencies = [ [[package]] name = "async-graphql-value" -version = "7.0.1" +version = "7.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7cf4d4e86208f4f9b81a503943c07e6e7f29ad3505e6c9ce6431fe64dc241681" +checksum = "68e40849c29a39012d38bff87bfed431f1ed6c53fbec493294c1045d61a7ae75" dependencies = [ "bytes 1.5.0", "indexmap 2.2.5", @@ -498,9 +498,9 @@ dependencies = [ [[package]] name = "async-graphql-warp" -version = "7.0.1" +version = "7.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d906b817c1499c0a814ea62b2a9cc03726e50d694d7e8cad3fcc1b24e8b62883" +checksum = "e901ea60bac5613a1c824da04c8e72906cf79efde5c56f657e3a4ac89624b0a5" dependencies = [ "async-graphql", "futures-util", @@ -8751,9 +8751,6 @@ name = "strum" version = "0.25.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "290d54ea6f91c969195bdbcd7442c8c2a2ba87da8bf60a7ee86a235d4bc1e125" -dependencies = [ - "strum_macros 0.25.3", -] [[package]] name = "strum" diff --git a/Cargo.toml b/Cargo.toml index 08a68b66caaf1..5a5799148a5a5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,7 @@ default-run = "vector" autobenches = false # our benchmarks are not runnable on their own either way # Minimum supported rust version # See docs/DEVELOPING.md for policy -rust-version = "1.74" +rust-version = "1.75" [[bin]] name = "vector" @@ -235,8 +235,8 @@ smpl_jwt = { version = "0.8.0", default-features = false, optional = true } lapin = { version = "2.3.1", default-features = false, features = ["native-tls"], optional = true } # API -async-graphql = { version = "7.0.1", default-features = false, optional = true, features = ["chrono", "playground"] } -async-graphql-warp = { version = "7.0.1", default-features = false, optional = true } +async-graphql = { version = "7.0.3", default-features = false, optional = true, features = ["chrono", "playground"] } +async-graphql-warp = { version = "7.0.3", default-features = false, optional = true } # API client crossterm = { version = "0.27.0", default-features = false, features = ["event-stream", "windows"], optional = true } diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index fccd51ff78d10..758ee576b3128 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" publish = false [dependencies] -async-graphql = { version = "7.0.1", default-features = false, features = ["playground" ], optional = true } +async-graphql = { version = "7.0.3", default-features = false, features = ["playground" ], optional = true } async-trait = { version = "0.1", default-features = false } bitmask-enum = { version = "2.2.3", default-features = false } bytes = { version = "1.5.0", default-features = false, features = ["serde"] } From ad6a48efc0f79b2c18a5c1394e5d8603fdfd1bab Mon Sep 17 00:00:00 2001 From: neuronull Date: Mon, 18 Mar 2024 17:04:35 -0600 Subject: [PATCH 0165/1491] fix(datadog_agent source): bugs in internal component metric reporting (#20044) * chore(testing): further adjustments to component validation framework * fix(datadog_agent source): bugs in internal component metric reporting * TODO for other endpoints * changelog * spell checker * feedback bruce- re-use detect json --- Cargo.toml | 1 + ...tadog_agent_source_internal_metrics.fix.md | 3 + src/internal_events/datadog_agent.rs | 26 ++++++++ src/internal_events/mod.rs | 4 ++ src/sources/datadog_agent/logs.rs | 16 +++-- src/sources/datadog_agent/tests.rs | 64 ++++++++++++++++++- .../components/sources/datadog_agent.yaml | 47 ++++++++++++++ 7 files changed, 155 insertions(+), 6 deletions(-) create mode 100644 changelog.d/datadog_agent_source_internal_metrics.fix.md create mode 100644 src/internal_events/datadog_agent.rs create mode 100644 tests/validation/components/sources/datadog_agent.yaml diff --git a/Cargo.toml b/Cargo.toml index 5a5799148a5a5..f83c6d3fdaffc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -921,6 +921,7 @@ component-validation-tests = [ "sinks-splunk_hec", "sources-splunk_hec", "sinks-datadog_logs", + "sources-datadog_agent", ] # Grouping together features for benchmarks. We exclude the API client due to it causing the build process to run out diff --git a/changelog.d/datadog_agent_source_internal_metrics.fix.md b/changelog.d/datadog_agent_source_internal_metrics.fix.md new file mode 100644 index 0000000000000..eb15082275713 --- /dev/null +++ b/changelog.d/datadog_agent_source_internal_metrics.fix.md @@ -0,0 +1,3 @@ +The `datadog_agent` source now correctly calculates the value for the metric `component_received_event_bytes_total` before enriching the event with Vector metadata. + +The source also now adheres to the Component Specification by incrementing `component_errors_total` when a request succeeded in decompression but JSON parsing failed. diff --git a/src/internal_events/datadog_agent.rs b/src/internal_events/datadog_agent.rs new file mode 100644 index 0000000000000..a0846809c9afe --- /dev/null +++ b/src/internal_events/datadog_agent.rs @@ -0,0 +1,26 @@ +use metrics::counter; + +use vector_lib::internal_event::InternalEvent; +use vector_lib::internal_event::{error_stage, error_type}; + +#[derive(Debug)] +pub struct DatadogAgentJsonParseError<'a> { + pub error: &'a serde_json::Error, +} + +impl InternalEvent for DatadogAgentJsonParseError<'_> { + fn emit(self) { + error!( + message = "Failed to parse JSON body.", + error = ?self.error, + error_type = error_type::PARSER_FAILED, + stage = error_stage::PROCESSING, + internal_log_rate_limit = true, + ); + counter!( + "component_errors_total", 1, + "error_type" => error_type::PARSER_FAILED, + "stage" => error_stage::PROCESSING, + ); + } +} diff --git a/src/internal_events/mod.rs b/src/internal_events/mod.rs index 0da2383b7eaee..8d69de5c7e095 100644 --- a/src/internal_events/mod.rs +++ b/src/internal_events/mod.rs @@ -30,6 +30,8 @@ mod batch; mod codecs; mod common; mod conditions; +#[cfg(feature = "sources-datadog_agent")] +mod datadog_agent; #[cfg(feature = "sinks-datadog_metrics")] mod datadog_metrics; #[cfg(feature = "sinks-datadog_traces")] @@ -165,6 +167,8 @@ pub(crate) use self::aws_kinesis_firehose::*; #[cfg(any(feature = "sources-aws_s3", feature = "sources-aws_sqs",))] pub(crate) use self::aws_sqs::*; pub(crate) use self::codecs::*; +#[cfg(feature = "sources-datadog_agent")] +pub(crate) use self::datadog_agent::*; #[cfg(feature = "sinks-datadog_metrics")] pub(crate) use self::datadog_metrics::*; #[cfg(feature = "sinks-datadog_traces")] diff --git a/src/sources/datadog_agent/logs.rs b/src/sources/datadog_agent/logs.rs index 7202376d2eac2..634293d4a9a79 100644 --- a/src/sources/datadog_agent/logs.rs +++ b/src/sources/datadog_agent/logs.rs @@ -6,6 +6,7 @@ use http::StatusCode; use tokio_util::codec::Decoder; use vector_lib::codecs::StreamDecodingError; use vector_lib::internal_event::{CountByteSize, InternalEventHandle as _}; +use vector_lib::json_size::JsonSize; use vector_lib::lookup::path; use vector_lib::{config::LegacyKey, EstimatedJsonEncodedSizeOf}; use vrl::core::Value; @@ -14,6 +15,7 @@ use warp::{filters::BoxedFilter, path as warp_path, path::FullPath, reply::Respo use crate::{ event::Event, + internal_events::DatadogAgentJsonParseError, sources::{ datadog_agent::{ handle_request, ApiKeyQueryParams, DatadogAgentConfig, DatadogAgentSource, LogMsg, @@ -80,6 +82,8 @@ pub(crate) fn decode_log_body( } let messages: Vec = serde_json::from_slice(&body).map_err(|error| { + emit!(DatadogAgentJsonParseError { error: &error }); + ErrorMessage::new( StatusCode::BAD_REQUEST, format!("Error parsing JSON: {:?}", error), @@ -88,6 +92,7 @@ pub(crate) fn decode_log_body( let now = Utc::now(); let mut decoded = Vec::new(); + let mut event_bytes_received = JsonSize::zero(); for LogMsg { message, @@ -102,6 +107,7 @@ pub(crate) fn decode_log_body( let mut decoder = source.decoder.clone(); let mut buffer = BytesMut::new(); buffer.put(message); + loop { match decoder.decode_eof(&mut buffer) { Ok(Some((events, _byte_size))) => { @@ -160,6 +166,9 @@ pub(crate) fn decode_log_body( ddtags, ); + // compute EstimatedJsonSizeOf before enrichment + event_bytes_received += log.estimated_json_encoded_size_of(); + namespace.insert_standard_vector_source_metadata( log, DatadogAgentConfig::NAME, @@ -194,10 +203,9 @@ pub(crate) fn decode_log_body( } } - source.events_received.emit(CountByteSize( - decoded.len(), - decoded.estimated_json_encoded_size_of(), - )); + source + .events_received + .emit(CountByteSize(decoded.len(), event_bytes_received)); Ok(decoded) } diff --git a/src/sources/datadog_agent/tests.rs b/src/sources/datadog_agent/tests.rs index fabf11947d2d9..cee767ec9d418 100644 --- a/src/sources/datadog_agent/tests.rs +++ b/src/sources/datadog_agent/tests.rs @@ -14,10 +14,13 @@ use ordered_float::NotNan; use prost::Message; use quickcheck::{Arbitrary, Gen, QuickCheck, TestResult}; use similar_asserts::assert_eq; -use vector_lib::lookup::{owned_value_path, OwnedTargetPath}; +use vector_lib::{ + codecs::{decoding::CharacterDelimitedDecoderOptions, CharacterDelimitedDecoderConfig}, + lookup::{owned_value_path, OwnedTargetPath}, +}; use vector_lib::{ codecs::{ - decoding::{Deserializer, DeserializerConfig, Framer}, + decoding::{BytesDeserializerConfig, Deserializer, DeserializerConfig, Framer}, BytesDecoder, BytesDeserializer, }, config::DataType, @@ -34,6 +37,7 @@ use vrl::value::{Kind, ObjectMap}; use crate::schema::Definition; use crate::{ common::datadog::{DatadogMetricType, DatadogPoint, DatadogSeriesMetric}, + components::validation::prelude::*, config::{SourceConfig, SourceContext}, event::{ into_event_stream, @@ -2501,3 +2505,59 @@ fn test_output_schema_definition_bytes_legacy_namespace() { fn assert_tags(metric: &Metric, tags: MetricTags) { assert_eq!(metric.tags().expect("Missing tags"), &tags); } + +impl ValidatableComponent for DatadogAgentConfig { + fn validation_configuration() -> ValidationConfiguration { + use crate::codecs::DecodingConfig; + + let config = DatadogAgentConfig { + address: "0.0.0.0:9007".parse().unwrap(), + tls: None, + store_api_key: false, + framing: CharacterDelimitedDecoderConfig { + character_delimited: CharacterDelimitedDecoderOptions { + delimiter: b',', + max_length: Some(usize::MAX), + }, + } + .into(), + decoding: BytesDeserializerConfig::new().into(), + acknowledgements: Default::default(), + multiple_outputs: false, + disable_logs: false, + disable_metrics: false, + disable_traces: false, + parse_ddtags: false, + log_namespace: Some(false), + keepalive: Default::default(), + }; + + // TODO set up separate test cases for metrics and traces endpoints + + let logs_addr = format!("http://{}/api/v2/logs", config.address); + let uri = http::Uri::try_from(&logs_addr).expect("should not fail to parse URI"); + + let decoder = DecodingConfig::new( + config.framing.clone(), + DeserializerConfig::Json(Default::default()), + false.into(), + ); + + let external_resource = ExternalResource::new( + ResourceDirection::Push, + HttpResourceConfig::from_parts(uri, None), + decoder, + ); + + ValidationConfiguration::from_source( + Self::NAME, + vec![ComponentTestCaseConfig::from_source( + config, + None, + Some(external_resource), + )], + ) + } +} + +register_validatable_component!(DatadogAgentConfig); diff --git a/tests/validation/components/sources/datadog_agent.yaml b/tests/validation/components/sources/datadog_agent.yaml new file mode 100644 index 0000000000000..a19032384e886 --- /dev/null +++ b/tests/validation/components/sources/datadog_agent.yaml @@ -0,0 +1,47 @@ +- name: happy path + expectation: success + events: + - log_builder: + message: simple message 1 + status: great + timestamp: 1709923304470 + hostname: the_best_host + service: could_be_better_tho + ddsource: happy_source + ddtags: tag1:value,bare,tag2:value + - log_builder: + message: simple message 2 + status: ok + timestamp: 1709923304472 + hostname: the_best_host + service: could_be_better_tho + ddsource: standard_source + ddtags: tag1:value,bare,tag2:value +- name: sad path + expectation: partial_success + events: + - log_builder: + message: simple message 1 + status: great + timestamp: 1709923304470 + hostname: the_best_host + service: could_be_better_tho + ddsource: happy_source + ddtags: tag1:value,bare,tag2:value + - log_builder: + message: simple message 2 + status: ok + timestamp: 1709923304472 + hostname: the_best_host + service: could_be_better_tho + ddsource: standard_source + ddtags: tag1:value,bare,tag2:value + - fail_encoding_of: + log_builder: + message: simple message 3 + status: sad + timestamp: 1709923304474 + hostname: the_best_host + service: could_be_better_tho + ddsource: sad_source + ddtags: tag1:value,bare,tag2:value From 62297dcb8caba651ed60f154c36b5a4e1a63046b Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 19 Mar 2024 10:28:08 -0400 Subject: [PATCH 0166/1491] chore(deps): Bump VRL to v0.13.0 (#20126) * chore(deps): Bump VRL to v0.13.0 Signed-off-by: Jesse Szwedko * Regenerate Cargo.lock and license file Signed-off-by: Jesse Szwedko --------- Signed-off-by: Jesse Szwedko --- Cargo.lock | 13 +++++++++++-- Cargo.toml | 2 +- LICENSE-3rdparty.csv | 1 + 3 files changed, 13 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 33a195ef6d25d..a5fa2fa9ab365 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -711,6 +711,12 @@ dependencies = [ "syn 2.0.53", ] +[[package]] +name = "atomic" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c59bdb34bc650a32731b31bd8f0829cc15d24a708ee31559e0bb34f2bc320cba" + [[package]] name = "atomic-waker" version = "1.1.2" @@ -9951,6 +9957,7 @@ version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" dependencies = [ + "atomic", "getrandom 0.2.12", "rand 0.8.5", "serde", @@ -10549,9 +10556,9 @@ checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" [[package]] name = "vrl" -version = "0.12.0" +version = "0.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8bfe806485288b36a6024f820b9874176130497673ab5a1935cb09ea0af88c6" +checksum = "81f1e48235e8db47d5010723fc32c38b09820a1a2a57eaea77b089493a375f52" dependencies = [ "aes", "ansi_term", @@ -10604,6 +10611,8 @@ dependencies = [ "pest_derive", "prettydiff", "prettytable-rs", + "prost 0.12.3", + "prost-reflect", "psl", "quickcheck", "quoted_printable", diff --git a/Cargo.toml b/Cargo.toml index f83c6d3fdaffc..e7d1030a7b3c5 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -138,7 +138,7 @@ proptest-derive = "0.4.0" serde_json = { version = "1.0.114", default-features = false, features = ["raw_value", "std"] } serde = { version = "1.0.197", default-features = false, features = ["alloc", "derive", "rc"] } toml = { version = "0.8.11", default-features = false, features = ["display", "parse"] } -vrl = { version = "0.12.0", features = ["arbitrary", "cli", "test", "test_framework"] } +vrl = { version = "0.13.0", features = ["arbitrary", "cli", "test", "test_framework"] } [dependencies] pin-project.workspace = true diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 50204bc3d070b..3387e033dc954 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -42,6 +42,7 @@ async-signal,https://github.com/smol-rs/async-signal,Apache-2.0 OR MIT,John Nunl async-stream,https://github.com/tokio-rs/async-stream,MIT,Carl Lerche async-task,https://github.com/smol-rs/async-task,Apache-2.0 OR MIT,Stjepan Glavina async-trait,https://github.com/dtolnay/async-trait,MIT OR Apache-2.0,David Tolnay +atomic,https://github.com/Amanieu/atomic-rs,Apache-2.0 OR MIT,Amanieu d'Antras atomic-waker,https://github.com/smol-rs/atomic-waker,Apache-2.0 OR MIT,"Stjepan Glavina , Contributors to futures-rs" atty,https://github.com/softprops/atty,MIT,softprops aws-config,https://github.com/smithy-lang/smithy-rs,Apache-2.0,"AWS Rust SDK Team , Russell Cohen " From 58a4a2ef52e606c0f9b9fa975cf114b661300584 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 19 Mar 2024 10:52:12 -0400 Subject: [PATCH 0167/1491] chore(api): Move host_metrics feature gate (#20134) Move host_metrics feature gate Signed-off-by: Jesse Szwedko --- src/api/schema/metrics/mod.rs | 6 +++--- src/api/schema/mod.rs | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/src/api/schema/metrics/mod.rs b/src/api/schema/metrics/mod.rs index 1a273dc5e9bee..08baa7cc12fd7 100644 --- a/src/api/schema/metrics/mod.rs +++ b/src/api/schema/metrics/mod.rs @@ -15,7 +15,7 @@ mod uptime; mod host; pub use allocated_bytes::{AllocatedBytes, ComponentAllocatedBytes}; -use async_graphql::{Interface, Object, Subscription}; +use async_graphql::{Interface, Subscription}; use chrono::{DateTime, Utc}; pub use errors::{ComponentErrorsTotal, ErrorsTotal}; pub use filter::*; @@ -45,9 +45,9 @@ pub enum MetricType { #[derive(Default)] pub struct MetricsQuery; -#[Object] +#[cfg(feature = "sources-host_metrics")] +#[async_graphql::Object] impl MetricsQuery { - #[cfg(feature = "sources-host_metrics")] /// Vector host metrics async fn host_metrics(&self) -> host::HostMetrics { host::HostMetrics::new() diff --git a/src/api/schema/mod.rs b/src/api/schema/mod.rs index d3e664f000d94..58c22a03b687f 100644 --- a/src/api/schema/mod.rs +++ b/src/api/schema/mod.rs @@ -13,7 +13,7 @@ use async_graphql::{EmptyMutation, MergedObject, MergedSubscription, Schema, Sch pub struct Query( health::HealthQuery, components::ComponentsQuery, - metrics::MetricsQuery, + #[cfg(feature = "sources-host_metrics")] metrics::MetricsQuery, meta::MetaQuery, ); From b184196d9760539db31a5238ee7b7254329b7c8d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Mar 2024 12:44:33 +0000 Subject: [PATCH 0168/1491] chore(deps): Bump uuid from 1.7.0 to 1.8.0 (#20131) Bumps [uuid](https://github.com/uuid-rs/uuid) from 1.7.0 to 1.8.0. - [Release notes](https://github.com/uuid-rs/uuid/releases) - [Commits](https://github.com/uuid-rs/uuid/compare/1.7.0...1.8.0) --- updated-dependencies: - dependency-name: uuid dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a5fa2fa9ab365..7a1c8f941f229 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9953,9 +9953,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f00cc9702ca12d3c81455259621e676d0f7251cec66a21e98fe2e9a37db93b2a" +checksum = "a183cf7feeba97b4dd1c0d46788634f6221d87fa961b305bed08c851829efcc0" dependencies = [ "atomic", "getrandom 0.2.12", From 2a88fc06b7c958f9787a3e050c677cbe5860d62d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 20 Mar 2024 12:44:43 +0000 Subject: [PATCH 0169/1491] chore(deps): Bump the aws group with 2 updates (#20129) Bumps the aws group with 2 updates: [aws-types](https://github.com/smithy-lang/smithy-rs) and [aws-sigv4](https://github.com/smithy-lang/smithy-rs). Updates `aws-types` from 1.1.7 to 1.1.8 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) Updates `aws-sigv4` from 1.1.7 to 1.2.0 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) --- updated-dependencies: - dependency-name: aws-types dependency-type: direct:production update-type: version-update:semver-patch dependency-group: aws - dependency-name: aws-sigv4 dependency-type: direct:production update-type: version-update:semver-minor dependency-group: aws ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 8 ++++---- Cargo.toml | 4 ++-- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 7a1c8f941f229..61a749954a83c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1028,9 +1028,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.1.7" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ada00a4645d7d89f296fe0ddbc3fe3554f03035937c849a05d37ddffc1f29a1" +checksum = "11d6f29688a4be9895c0ba8bef861ad0c0dac5c15e9618b9b7a6c233990fc263" dependencies = [ "aws-credential-types", "aws-smithy-eventstream", @@ -1206,9 +1206,9 @@ dependencies = [ [[package]] name = "aws-types" -version = "1.1.7" +version = "1.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d07c63521aa1ea9a9f92a701f1a08ce3fd20b46c6efc0d5c8947c1fd879e3df1" +checksum = "0dbf2f3da841a8930f159163175cf6a3d16ddde517c1b0fba7aa776822800f40" dependencies = [ "aws-credential-types", "aws-smithy-async", diff --git a/Cargo.toml b/Cargo.toml index e7d1030a7b3c5..78cd48bb74f4a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -188,8 +188,8 @@ aws-sdk-kinesis = { version = "1.3.0", default-features = false, features = ["be # `behavior-version-latest` feature. Without this we get a runtime panic when `auth.assume_role` authentication # is configured. aws-sdk-sts = { version = "1.3.1", default-features = false, features = ["behavior-version-latest"], optional = true } -aws-types = { version = "1.1.7", default-features = false, optional = true } -aws-sigv4 = { version = "1.1.7", default-features = false, features = ["sign-http"], optional = true } +aws-types = { version = "1.1.8", default-features = false, optional = true } +aws-sigv4 = { version = "1.2.0", default-features = false, features = ["sign-http"], optional = true } aws-config = { version = "1.0.1", default-features = false, features = ["behavior-version-latest", "credentials-process"], optional = true } aws-credential-types = { version = "1.1.8", default-features = false, features = ["hardcoded-credentials"], optional = true } aws-smithy-http = { version = "0.60", default-features = false, features = ["event-stream"], optional = true } From 04bff918cfcba087c18766ef81a8e2316b8790f4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 15:30:21 -0400 Subject: [PATCH 0170/1491] chore(deps): Bump smallvec from 1.13.1 to 1.13.2 (#20145) Bumps [smallvec](https://github.com/servo/rust-smallvec) from 1.13.1 to 1.13.2. - [Release notes](https://github.com/servo/rust-smallvec/releases) - [Commits](https://github.com/servo/rust-smallvec/compare/v1.13.1...v1.13.2) --- updated-dependencies: - dependency-name: smallvec dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 61a749954a83c..503bfd2ac7641 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8508,9 +8508,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "1.13.1" +version = "1.13.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6ecd384b10a64542d77071bd64bd7b231f4ed5940fba55e98c3de13824cf3d7" +checksum = "3c5e1a9a646d36c3599cd173a41282daf47c44583ad367b8e6837255952e5c67" dependencies = [ "serde", ] From db9c681fd99234f6cd4799185bace2f351e0712d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 19:31:23 +0000 Subject: [PATCH 0171/1491] chore(ci): Bump actions/add-to-project from 0.6.0 to 0.6.1 (#20137) Bumps [actions/add-to-project](https://github.com/actions/add-to-project) from 0.6.0 to 0.6.1. - [Release notes](https://github.com/actions/add-to-project/releases) - [Commits](https://github.com/actions/add-to-project/compare/v0.6.0...v0.6.1) --- updated-dependencies: - dependency-name: actions/add-to-project dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/gardener_open_issue.yml | 2 +- .github/workflows/gardener_open_pr.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/gardener_open_issue.yml b/.github/workflows/gardener_open_issue.yml index 56da3309db9f1..ae9ba7f10635f 100644 --- a/.github/workflows/gardener_open_issue.yml +++ b/.github/workflows/gardener_open_issue.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 5 steps: - - uses: actions/add-to-project@v0.6.0 + - uses: actions/add-to-project@v0.6.1 with: project-url: https://github.com/orgs/vectordotdev/projects/49 github-token: ${{ secrets.GH_PROJECT_PAT }} diff --git a/.github/workflows/gardener_open_pr.yml b/.github/workflows/gardener_open_pr.yml index a89c5d0542bd7..6079d1e5770f9 100644 --- a/.github/workflows/gardener_open_pr.yml +++ b/.github/workflows/gardener_open_pr.yml @@ -26,7 +26,7 @@ jobs: username: ${{ github.actor }} team: vector GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} - - uses: actions/add-to-project@v0.6.0 + - uses: actions/add-to-project@v0.6.1 if: ${{ steps.checkVectorMember.outputs.isTeamMember == 'false' }} with: project-url: https://github.com/orgs/vectordotdev/projects/49 @@ -37,7 +37,7 @@ jobs: timeout-minutes: 5 if: ${{ github.actor == 'dependabot[bot]' }} steps: - - uses: actions/add-to-project@v0.6.0 + - uses: actions/add-to-project@v0.6.1 with: project-url: https://github.com/orgs/vectordotdev/projects/49 github-token: ${{ secrets.GH_PROJECT_PAT }} From e012a80bb5d8e4f318fb4408d9e2ab6242a8883b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 19:31:26 +0000 Subject: [PATCH 0172/1491] chore(deps): Bump serde-toml-merge from 0.3.5 to 0.3.6 (#20132) Bumps [serde-toml-merge](https://github.com/jdrouet/serde-toml-merge) from 0.3.5 to 0.3.6. - [Release notes](https://github.com/jdrouet/serde-toml-merge/releases) - [Changelog](https://github.com/jdrouet/serde-toml-merge/blob/main/CHANGELOG.md) - [Commits](https://github.com/jdrouet/serde-toml-merge/compare/v0.3.5...v0.3.6) --- updated-dependencies: - dependency-name: serde-toml-merge dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 503bfd2ac7641..ba054163c378e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8120,9 +8120,9 @@ dependencies = [ [[package]] name = "serde-toml-merge" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "317213b881aea9bc19d7590b9cf467c58ad5f536f95b4d42129b5643f351d27f" +checksum = "88075e75b01384301454b1c188243552c674263c0c0c3c7ed5dd82291b20798f" dependencies = [ "toml", ] diff --git a/Cargo.toml b/Cargo.toml index 78cd48bb74f4a..477c48ee736a2 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -212,7 +212,7 @@ tower = { version = "0.4.13", default-features = false, features = ["buffer", "l tower-http = { version = "0.4.4", default-features = false, features = ["decompression-gzip", "trace"]} # Serde serde.workspace = true -serde-toml-merge = { version = "0.3.5", default-features = false } +serde-toml-merge = { version = "0.3.6", default-features = false } serde_bytes = { version = "0.11.14", default-features = false, features = ["std"], optional = true } serde_json.workspace = true serde_with = { version = "3.7.0", default-features = false, features = ["macros", "std"] } From 20e56d3080ec3cb04c750966c2722799ed920225 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 19:31:37 +0000 Subject: [PATCH 0173/1491] chore(deps): Bump toml from 0.8.11 to 0.8.12 (#20130) Bumps [toml](https://github.com/toml-rs/toml) from 0.8.11 to 0.8.12. - [Commits](https://github.com/toml-rs/toml/compare/toml-v0.8.11...toml-v0.8.12) --- updated-dependencies: - dependency-name: toml dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 10 +++++----- Cargo.toml | 2 +- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ba054163c378e..33d033aa42754 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9305,14 +9305,14 @@ dependencies = [ [[package]] name = "toml" -version = "0.8.11" +version = "0.8.12" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af06656561d28735e9c1cd63dfd57132c8155426aa6af24f36a00a351f88c48e" +checksum = "e9dd1545e8208b4a5af1aa9bbd0b4cf7e9ea08fabc5d0a5c67fcaafa17433aa3" dependencies = [ "serde", "serde_spanned", "toml_datetime", - "toml_edit 0.22.7", + "toml_edit 0.22.8", ] [[package]] @@ -9348,9 +9348,9 @@ dependencies = [ [[package]] name = "toml_edit" -version = "0.22.7" +version = "0.22.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "18769cd1cec395d70860ceb4d932812a0b4d06b1a4bb336745a4d21b9496e992" +checksum = "c12219811e0c1ba077867254e5ad62ee2c9c190b0d957110750ac0cda1ae96cd" dependencies = [ "indexmap 2.2.5", "serde", diff --git a/Cargo.toml b/Cargo.toml index 477c48ee736a2..8b727738ccf75 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -137,7 +137,7 @@ proptest = "1.4" proptest-derive = "0.4.0" serde_json = { version = "1.0.114", default-features = false, features = ["raw_value", "std"] } serde = { version = "1.0.197", default-features = false, features = ["alloc", "derive", "rc"] } -toml = { version = "0.8.11", default-features = false, features = ["display", "parse"] } +toml = { version = "0.8.12", default-features = false, features = ["display", "parse"] } vrl = { version = "0.13.0", features = ["arbitrary", "cli", "test", "test_framework"] } [dependencies] From 3f83ea32e06c8e3575e6b82bdf8e25a7eb97dcc0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 21 Mar 2024 19:31:48 +0000 Subject: [PATCH 0174/1491] chore(deps): Bump h2 from 0.4.2 to 0.4.3 (#20110) Bumps [h2](https://github.com/hyperium/h2) from 0.4.2 to 0.4.3. - [Release notes](https://github.com/hyperium/h2/releases) - [Changelog](https://github.com/hyperium/h2/blob/master/CHANGELOG.md) - [Commits](https://github.com/hyperium/h2/compare/v0.4.2...v0.4.3) --- updated-dependencies: - dependency-name: h2 dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 6 +++--- Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 33d033aa42754..155a138284865 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3766,9 +3766,9 @@ dependencies = [ [[package]] name = "h2" -version = "0.4.2" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "31d030e59af851932b72ceebadf4a2b5986dba4c3b99dd2493f8273a0f151943" +checksum = "51ee2dd2e4f378392eeff5d51618cd9a63166a2513846bbc55f21cfacd9199d4" dependencies = [ "bytes 1.5.0", "fnv", @@ -10083,7 +10083,7 @@ dependencies = [ "governor", "greptimedb-client", "grok", - "h2 0.4.2", + "h2 0.4.3", "hash_hasher", "hashbrown 0.14.3", "headers", diff --git a/Cargo.toml b/Cargo.toml index 8b727738ccf75..eeb76455b6a6d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -278,7 +278,7 @@ futures-util = { version = "0.3.29", default-features = false } glob = { version = "0.3.1", default-features = false } governor = { version = "0.6.0", default-features = false, features = ["dashmap", "jitter", "std"], optional = true } grok = { version = "2.0.0", default-features = false, optional = true } -h2 = { version = "0.4.1", default-features = false, optional = true } +h2 = { version = "0.4.3", default-features = false, optional = true } hash_hasher = { version = "2.0.0", default-features = false } hashbrown = { version = "0.14.3", default-features = false, optional = true, features = ["ahash"] } headers = { version = "0.3.9", default-features = false } From 4c68f9699749d17fa926983e2a90bdeec92b112a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ensar=20Saraj=C4=8Di=C4=87?= Date: Thu, 21 Mar 2024 21:05:32 +0100 Subject: [PATCH 0175/1491] docs(vrl): add documentation for `sieve` function (#20000) * docs(vrl): add documentation for `sieve` function Related: https://github.com/vectordotdev/vrl/pull/724 * Fix typo in sieve docs Co-authored-by: jhgilbert * Update function docs after removing string pattern option * cue fmt Signed-off-by: Jesse Szwedko * Fix example Signed-off-by: Jesse Szwedko --------- Signed-off-by: Jesse Szwedko Co-authored-by: jhgilbert Co-authored-by: Jesse Szwedko --- .../cue/reference/remap/functions/sieve.cue | 65 +++++++++++++++++++ 1 file changed, 65 insertions(+) create mode 100644 website/cue/reference/remap/functions/sieve.cue diff --git a/website/cue/reference/remap/functions/sieve.cue b/website/cue/reference/remap/functions/sieve.cue new file mode 100644 index 0000000000000..0388bb7f72c60 --- /dev/null +++ b/website/cue/reference/remap/functions/sieve.cue @@ -0,0 +1,65 @@ +package metadata + +remap: functions: sieve: { + category: "String" + description: """ + Keeps only matches of `pattern` in `value`. + + This can be used to define patterns that are allowed in the string and + remove everything else. + """ + + arguments: [ + { + name: "value" + description: "The original string." + required: true + type: ["string"] + }, + { + name: "pattern" + description: """ + Keep all matches of this pattern. + """ + required: true + type: ["regex"] + }, + { + name: "replace_single" + description: """ + The string to use to replace single rejected characters. + """ + required: false + default: "" + type: ["string"] + }, + { + name: "replace_repeated" + description: """ + The string to use to replace multiple sequential instances of rejected characters. + """ + required: false + default: "" + type: ["string"] + }, + ] + internal_failure_reasons: [] + return: types: ["string"] + + examples: [ + { + title: "Sieve with regex" + source: #""" + sieve("test123%456.فوائد.net.", r'[a-z0-9.]') + """# + return: "test123456..net." + }, + { + title: "Custom replacements" + source: #""" + sieve("test123%456.فوائد.net.", r'[a-z.0-9]', replace_single: "X", replace_repeated: "") + """# + return: "test123X456..net." + }, + ] +} From abd776d7c74ae48968fa34829d3683f68115a9e0 Mon Sep 17 00:00:00 2001 From: Bruce Guenter Date: Fri, 22 Mar 2024 08:48:19 -0600 Subject: [PATCH 0176/1491] chore(deps): Bump Rust to 1.77.0 (#20149) * chore(deps): Bump Rust to 1.77.0 * More clippy fixes * Fix journald checkpointer --- .../src/topology/channel/receiver.rs | 6 ++--- lib/vector-buffers/src/topology/test_util.rs | 1 + lib/vector-stream/src/driver.rs | 9 ++++--- rust-toolchain.toml | 2 +- src/sinks/aws_s3/integration_tests.rs | 6 ++++- src/sinks/azure_blob/integration_tests.rs | 7 +++--- src/sinks/datadog/metrics/request_builder.rs | 6 ++--- src/sinks/file/mod.rs | 2 +- src/sinks/util/service/health.rs | 12 ++++++---- src/sources/file_descriptors/mod.rs | 2 +- src/sources/http_client/client.rs | 2 +- src/sources/journald.rs | 1 + src/sources/kafka.rs | 24 ++++++------------- vdev/src/testing/integration.rs | 2 +- 14 files changed, 40 insertions(+), 42 deletions(-) diff --git a/lib/vector-buffers/src/topology/channel/receiver.rs b/lib/vector-buffers/src/topology/channel/receiver.rs index f6b7120d23323..d21aa1ed67e4f 100644 --- a/lib/vector-buffers/src/topology/channel/receiver.rs +++ b/lib/vector-buffers/src/topology/channel/receiver.rs @@ -160,7 +160,7 @@ impl BufferReceiver { enum StreamState { Idle(BufferReceiver), Polling, - Closed(BufferReceiver), + Closed, } pub struct BufferReceiverStream { @@ -183,7 +183,7 @@ impl Stream for BufferReceiverStream { fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll> { loop { match mem::replace(&mut self.state, StreamState::Polling) { - s @ StreamState::Closed(_) => { + s @ StreamState::Closed => { self.state = s; return Poll::Ready(None); } @@ -193,7 +193,7 @@ impl Stream for BufferReceiverStream { StreamState::Polling => { let (result, receiver) = ready!(self.recv_fut.poll(cx)); self.state = if result.is_none() { - StreamState::Closed(receiver) + StreamState::Closed } else { StreamState::Idle(receiver) }; diff --git a/lib/vector-buffers/src/topology/test_util.rs b/lib/vector-buffers/src/topology/test_util.rs index 10d5aa2fb7a0e..ec0acac7416d2 100644 --- a/lib/vector-buffers/src/topology/test_util.rs +++ b/lib/vector-buffers/src/topology/test_util.rs @@ -72,6 +72,7 @@ impl EventCount for Sample { } #[derive(Debug)] +#[allow(dead_code)] // The inner _is_ read by the `Debug` impl, but that's ignored pub struct BasicError(pub(crate) String); impl fmt::Display for BasicError { diff --git a/lib/vector-stream/src/driver.rs b/lib/vector-stream/src/driver.rs index 7a7b8386f8250..89d87101eae39 100644 --- a/lib/vector-stream/src/driver.rs +++ b/lib/vector-stream/src/driver.rs @@ -272,7 +272,7 @@ mod tests { type Counter = Arc; #[derive(Debug)] - struct DelayRequest(usize, EventFinalizers, RequestMetadata); + struct DelayRequest(EventFinalizers, RequestMetadata); impl DelayRequest { fn new(value: usize, counter: &Counter) -> Self { @@ -283,7 +283,6 @@ mod tests { counter.fetch_add(value, Ordering::Relaxed); }); Self( - value, EventFinalizers::new(EventFinalizer::new(batch)), RequestMetadata::default(), ) @@ -292,17 +291,17 @@ mod tests { impl Finalizable for DelayRequest { fn take_finalizers(&mut self) -> vector_core::event::EventFinalizers { - std::mem::take(&mut self.1) + std::mem::take(&mut self.0) } } impl MetaDescriptive for DelayRequest { fn get_metadata(&self) -> &RequestMetadata { - &self.2 + &self.1 } fn metadata_mut(&mut self) -> &mut RequestMetadata { - &mut self.2 + &mut self.1 } } diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 1c8cfba9f594b..2fe891cfea632 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,3 +1,3 @@ [toolchain] -channel = "1.75.0" +channel = "1.77.0" profile = "default" diff --git a/src/sinks/aws_s3/integration_tests.rs b/src/sinks/aws_s3/integration_tests.rs index 8d723f09b9587..d6255933847a4 100644 --- a/src/sinks/aws_s3/integration_tests.rs +++ b/src/sinks/aws_s3/integration_tests.rs @@ -388,7 +388,11 @@ async fn s3_healthchecks() { .create_service(&ProxyConfig::from_env()) .await .unwrap(); - config.build_healthcheck(service.client()).unwrap(); + config + .build_healthcheck(service.client()) + .unwrap() + .await + .unwrap(); } #[tokio::test] diff --git a/src/sinks/azure_blob/integration_tests.rs b/src/sinks/azure_blob/integration_tests.rs index 754425806d16d..9e36ed1a6acd7 100644 --- a/src/sinks/azure_blob/integration_tests.rs +++ b/src/sinks/azure_blob/integration_tests.rs @@ -40,9 +40,10 @@ async fn azure_blob_healthcheck_passed() { ) .expect("Failed to create client"); - let response = azure_common::config::build_healthcheck(config.container_name, client); - - response.expect("Failed to pass healthcheck"); + azure_common::config::build_healthcheck(config.container_name, client) + .expect("Failed to build healthcheck") + .await + .expect("Failed to pass healthcheck"); } #[tokio::test] diff --git a/src/sinks/datadog/metrics/request_builder.rs b/src/sinks/datadog/metrics/request_builder.rs index e9b2fcb1514c6..794d8b5a58a2e 100644 --- a/src/sinks/datadog/metrics/request_builder.rs +++ b/src/sinks/datadog/metrics/request_builder.rs @@ -159,7 +159,7 @@ impl IncrementalRequestBuilder<((Option>, DatadogMetricsEndpoint), Vec< Ok((encode_result, mut metrics)) => { let finalizers = metrics.take_finalizers(); let metadata = DDMetricsMetadata { - api_key: api_key.as_ref().map(Arc::clone), + api_key: api_key.clone(), endpoint, finalizers, }; @@ -203,7 +203,7 @@ impl IncrementalRequestBuilder<((Option>, DatadogMetricsEndpoint), Vec< let chunk = metrics.split_off(split_idx); results.push(encode_now_or_never( encoder, - api_key.as_ref().map(Arc::clone), + api_key.clone(), endpoint, chunk, )); @@ -211,7 +211,7 @@ impl IncrementalRequestBuilder<((Option>, DatadogMetricsEndpoint), Vec< } results.push(encode_now_or_never( encoder, - api_key.as_ref().map(Arc::clone), + api_key.clone(), endpoint, metrics, )); diff --git a/src/sinks/file/mod.rs b/src/sinks/file/mod.rs index 8603a57741e8b..64110daff576f 100644 --- a/src/sinks/file/mod.rs +++ b/src/sinks/file/mod.rs @@ -581,7 +581,7 @@ mod tests { run_assert_sink(config, input.clone().into_iter()).await; - let output = vec![ + let output = [ lines_from_file(directory.join("warnings-2019-26-07.log")), lines_from_file(directory.join("errors-2019-26-07.log")), lines_from_file(directory.join("warnings-2019-27-07.log")), diff --git a/src/sinks/util/service/health.rs b/src/sinks/util/service/health.rs index c2f97c0cd296b..bd04b6ba660da 100644 --- a/src/sinks/util/service/health.rs +++ b/src/sinks/util/service/health.rs @@ -18,7 +18,7 @@ use tower::Service; use vector_lib::{configurable::configurable_component, emit}; use crate::{ - internal_events::{EndpointsActive, OpenGauge, OpenToken}, + internal_events::{EndpointsActive, OpenGauge}, sinks::util::retries::ExponentialBackoff, }; @@ -65,13 +65,14 @@ impl HealthConfig { let counters = Arc::new(HealthCounters::new()); let snapshot = counters.snapshot(); + open.clone().open(emit_active_endpoints); HealthService { inner, logic, counters, snapshot, endpoint, - state: CircuitState::Closed(open.clone().open(emit_active_endpoints)), + state: CircuitState::Closed, open, // An exponential backoff starting from retry_initial_backoff_sec and doubling every time // up to retry_max_duration_secs. @@ -103,7 +104,7 @@ enum CircuitState { }, /// Service is healthy and passing requests downstream. - Closed(OpenToken), + Closed, } /// A service which monitors the health of a service. @@ -163,7 +164,8 @@ where info!(message = "Endpoint is healthy.", endpoint = %&self.endpoint); self.backoff.reset(); - CircuitState::Closed(self.open.clone().open(emit_active_endpoints)) + self.open.clone().open(emit_active_endpoints); + CircuitState::Closed } else { debug!(message = "Endpoint failed probation.", endpoint = %&self.endpoint); @@ -172,7 +174,7 @@ where ) } } - CircuitState::Closed(_) => { + CircuitState::Closed => { // Check for errors match self.counters.healthy(self.snapshot) { Ok(snapshot) => { diff --git a/src/sources/file_descriptors/mod.rs b/src/sources/file_descriptors/mod.rs index 69b0987e2af94..18e6bde6a73d8 100644 --- a/src/sources/file_descriptors/mod.rs +++ b/src/sources/file_descriptors/mod.rs @@ -97,7 +97,7 @@ where { loop { let (buffer, len) = match reader.fill_buf() { - Ok(buffer) if buffer.is_empty() => break, // EOF. + Ok([]) => break, // EOF. Ok(buffer) => (Ok(Bytes::copy_from_slice(buffer)), buffer.len()), Err(error) if error.kind() == io::ErrorKind::Interrupted => continue, Err(error) => (Err(error), 0), diff --git a/src/sources/http_client/client.rs b/src/sources/http_client/client.rs index 67fa609980f93..b036b98500219 100644 --- a/src/sources/http_client/client.rs +++ b/src/sources/http_client/client.rs @@ -175,7 +175,7 @@ impl_generate_config_from_default!(HttpClientConfig); impl SourceConfig for HttpClientConfig { async fn build(&self, cx: SourceContext) -> Result { // build the url - let endpoints = vec![self.endpoint.clone()]; + let endpoints = [self.endpoint.clone()]; let urls = endpoints .iter() .map(|s| s.parse::().context(sources::UriParseSnafu)) diff --git a/src/sources/journald.rs b/src/sources/journald.rs index 2f2cb9b7141e8..40f4e29822b58 100644 --- a/src/sources/journald.rs +++ b/src/sources/journald.rs @@ -992,6 +992,7 @@ impl Checkpointer { .read(true) .write(true) .create(true) + .truncate(false) .open(&filename) .await?; Ok(Checkpointer { file, filename }) diff --git a/src/sources/kafka.rs b/src/sources/kafka.rs index 08313dde581d2..823406b1a9b82 100644 --- a/src/sources/kafka.rs +++ b/src/sources/kafka.rs @@ -503,7 +503,6 @@ struct ConsumerStateInner { consumer_state: S, } struct Consuming; -struct Complete; struct Draining { /// The rendezvous channel sender from the revoke or shutdown callback. Sending on this channel /// indicates to the kafka client task that one or more partitions have been drained, while @@ -527,7 +526,7 @@ type OptionDeadline = OptionFuture>>; enum ConsumerState { Consuming(ConsumerStateInner), Draining(ConsumerStateInner), - Complete(ConsumerStateInner), + Complete, } impl Draining { fn new(signal: SyncSender<()>, shutdown: bool) -> Self { @@ -545,16 +544,7 @@ impl Draining { impl ConsumerStateInner { fn complete(self, _deadline: OptionDeadline) -> (OptionDeadline, ConsumerState) { - ( - None.into(), - ConsumerState::Complete(ConsumerStateInner { - config: self.config, - decoder: self.decoder, - out: self.out, - log_namespace: self.log_namespace, - consumer_state: Complete, - }), - ) + (None.into(), ConsumerState::Complete) } } @@ -765,7 +755,7 @@ async fn coordinate_kafka_callbacks( abort_handles.remove(&finished_partition); (drain_deadline, consumer_state) = match consumer_state { - ConsumerState::Complete(_) => unreachable!("Partition consumer finished after completion."), + ConsumerState::Complete => unreachable!("Partition consumer finished after completion."), ConsumerState::Draining(mut state) => { state.partition_drained(finished_partition); @@ -798,7 +788,7 @@ async fn coordinate_kafka_callbacks( }, Some(callback) = callbacks.recv() => match callback { KafkaCallback::PartitionsAssigned(mut assigned_partitions, done) => match consumer_state { - ConsumerState::Complete(_) => unreachable!("Partition assignment received after completion."), + ConsumerState::Complete => unreachable!("Partition assignment received after completion."), ConsumerState::Draining(_) => error!("Partition assignment received while draining revoked partitions, maybe an invalid assignment."), ConsumerState::Consuming(ref consumer_state) => { let acks = consumer.context().acknowledgements; @@ -819,7 +809,7 @@ async fn coordinate_kafka_callbacks( } }, KafkaCallback::PartitionsRevoked(mut revoked_partitions, drain) => (drain_deadline, consumer_state) = match consumer_state { - ConsumerState::Complete(_) => unreachable!("Partitions revoked after completion."), + ConsumerState::Complete => unreachable!("Partitions revoked after completion."), ConsumerState::Draining(d) => { // NB: This would only happen if the task driving the kafka client (i.e. rebalance handlers) // is not handling shutdown signals, and a revoke happens during a shutdown drain; otherwise @@ -843,7 +833,7 @@ async fn coordinate_kafka_callbacks( } }, KafkaCallback::ShuttingDown(drain) => (drain_deadline, consumer_state) = match consumer_state { - ConsumerState::Complete(_) => unreachable!("Shutdown received after completion."), + ConsumerState::Complete => unreachable!("Shutdown received after completion."), // Shutting down is just like a full assignment revoke, but we also close the // callback channels, since we don't expect additional assignments or rebalances ConsumerState::Draining(state) => { @@ -882,7 +872,7 @@ async fn coordinate_kafka_callbacks( }, Some(_) = &mut drain_deadline => (drain_deadline, consumer_state) = match consumer_state { - ConsumerState::Complete(_) => unreachable!("Drain deadline received after completion."), + ConsumerState::Complete => unreachable!("Drain deadline received after completion."), ConsumerState::Consuming(state) => { warn!("A drain deadline fired outside of draining mode."); state.keep_consuming(None.into()) diff --git a/vdev/src/testing/integration.rs b/vdev/src/testing/integration.rs index be5c714b7739a..c490336aa32b7 100644 --- a/vdev/src/testing/integration.rs +++ b/vdev/src/testing/integration.rs @@ -45,7 +45,7 @@ pub(crate) trait ComposeTestT { let environment = environment.into(); let (test_dir, config) = ComposeTestConfig::load(Self::DIRECTORY, &test_name)?; let envs_dir = EnvsDir::new(&test_name); - let Some(mut env_config) = config.environments().get(&environment).map(Clone::clone) else { + let Some(mut env_config) = config.environments().get(&environment).cloned() else { bail!("Could not find environment named {environment:?}"); }; From 314ea367302fb95a3ec0c2fcdfbe19df6a0e7603 Mon Sep 17 00:00:00 2001 From: Philipp Paulweber Date: Fri, 22 Mar 2024 20:54:00 +0100 Subject: [PATCH 0177/1491] feat(vrl): add `uuid_v7` function (#20048) feat(vrl): add uuid_v7 function * add remap behavioral test for VRL `uuid_v7()` function * provided cue website documentation for `uuid_v7()` function * defined `uuidv7` URL to point to the RFC section --- lib/vector-vrl/tests/src/docs.rs | 1 + tests/behavior/transforms/remap.toml | 26 +++++++++++ .../cue/reference/remap/functions/uuid_v7.cue | 44 +++++++++++++++++++ website/cue/reference/urls.cue | 1 + 4 files changed, 72 insertions(+) create mode 100644 website/cue/reference/remap/functions/uuid_v7.cue diff --git a/lib/vector-vrl/tests/src/docs.rs b/lib/vector-vrl/tests/src/docs.rs index 8e08ef39b17cf..ec694ce316f6e 100644 --- a/lib/vector-vrl/tests/src/docs.rs +++ b/lib/vector-vrl/tests/src/docs.rs @@ -16,6 +16,7 @@ const SKIP_FUNCTION_EXAMPLES: &[&str] = &[ "type_def", // Not supported on VM runtime "random_bytes", "uuid_v4", + "uuid_v7", "strip_ansi_escape_codes", "get_hostname", "now", diff --git a/tests/behavior/transforms/remap.toml b/tests/behavior/transforms/remap.toml index fdecabdf7d30a..c4401c9494141 100644 --- a/tests/behavior/transforms/remap.toml +++ b/tests/behavior/transforms/remap.toml @@ -529,6 +529,32 @@ .b == "bar" ''' +[transforms.remap_function_uuid_v7] + inputs = [] + type = "remap" + source = """ + .a = uuid_v7() + + if uuid_v7() != "" { + .b = "bar" + } + """ +[[tests]] + name = "remap_function_uuid_v7" + [tests.input] + insert_at = "remap_function_uuid_v7" + type = "log" + [tests.input.log_fields] + b = "foo" + [[tests.outputs]] + extract_from = "remap_function_uuid_v7" + [[tests.outputs.conditions]] + type = "vrl" + source = ''' + match(string!(.a), r'(?i)^[0-9a-f]{8}-[0-9a-f]{4}-7[0-9a-f]{3}-[0-9a-f]{4}-[0-9a-f]{12}$') && + .b == "bar" + ''' + [transforms.remap_function_sha1] inputs = [] type = "remap" diff --git a/website/cue/reference/remap/functions/uuid_v7.cue b/website/cue/reference/remap/functions/uuid_v7.cue new file mode 100644 index 0000000000000..8b6be2040e871 --- /dev/null +++ b/website/cue/reference/remap/functions/uuid_v7.cue @@ -0,0 +1,44 @@ +package metadata + +remap: functions: uuid_v7: { + category: "Random" + description: """ + Generates a random [UUIDv7](\(urls.uuidv7)) string. + """ + + arguments: [ + { + name: "timestamp" + description: "The timestamp used to generate the UUIDv7." + required: false + type: ["timestamp"] + default: "`now()`" + }, + ] + internal_failure_reasons: [] + return: types: ["string"] + + examples: [ + { + title: "Create a UUIDv7 with implicit `now()`" + source: #""" + uuid_v7() + """# + return: "06338364-8305-7b74-8000-de4963503139" + }, + { + title: "Create a UUIDv7 with explicit `now()`" + source: #""" + uuid_v7(now()) + """# + return: "018e29b3-0bea-7f78-8af3-d32ccb1b93c1" + }, + { + title: "Create a UUIDv7 with custom timestamp" + source: #""" + uuid_v7(t'2020-12-30T22:20:53.824727Z') + """# + return: "0176b5bd-5d19-7394-bb60-c21028c6152b" + }, + ] +} diff --git a/website/cue/reference/urls.cue b/website/cue/reference/urls.cue index f7c81a62462a1..9bb04804afedb 100644 --- a/website/cue/reference/urls.cue +++ b/website/cue/reference/urls.cue @@ -527,6 +527,7 @@ urls: { unix_timestamp: "\(wikipedia)/wiki/Unix_time" utf8: "\(wikipedia)/wiki/UTF-8" uuidv4: "\(wikipedia)/wiki/Universally_unique_identifier#Version_4_(random)" + uuidv7: "https://datatracker.ietf.org/doc/html/draft-peabody-dispatch-new-uuid-format-04#name-uuid-version-7" url: "\(wikipedia)/wiki/URL" us_social_security_number: "https://www.ssa.gov/history/ssn/geocard.html" user_agent: "https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent" From 4d23e66dc22c499ad8263b937c21800d1b68d1c7 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Fri, 22 Mar 2024 16:45:38 -0400 Subject: [PATCH 0178/1491] docs(security): Update TLS docs for `verify_certificate` (#20153) Closes: #20152 Signed-off-by: Jesse Szwedko --- lib/vector-core/src/tls/settings.rs | 6 +++--- website/cue/reference/components/sinks/base/amqp.cue | 6 +++--- .../reference/components/sinks/base/appsignal.cue | 6 +++--- .../components/sinks/base/aws_cloudwatch_logs.cue | 6 +++--- .../components/sinks/base/aws_cloudwatch_metrics.cue | 6 +++--- .../components/sinks/base/aws_kinesis_firehose.cue | 6 +++--- .../components/sinks/base/aws_kinesis_streams.cue | 6 +++--- .../cue/reference/components/sinks/base/aws_s3.cue | 6 +++--- .../cue/reference/components/sinks/base/aws_sns.cue | 6 +++--- .../cue/reference/components/sinks/base/aws_sqs.cue | 6 +++--- .../cue/reference/components/sinks/base/axiom.cue | 6 +++--- .../components/sinks/base/azure_monitor_logs.cue | 6 +++--- .../reference/components/sinks/base/clickhouse.cue | 6 +++--- .../cue/reference/components/sinks/base/databend.cue | 6 +++--- .../components/sinks/base/datadog_events.cue | 6 +++--- .../reference/components/sinks/base/datadog_logs.cue | 6 +++--- .../components/sinks/base/datadog_metrics.cue | 6 +++--- .../components/sinks/base/datadog_traces.cue | 6 +++--- .../components/sinks/base/elasticsearch.cue | 6 +++--- .../sinks/base/gcp_chronicle_unstructured.cue | 6 +++--- .../components/sinks/base/gcp_cloud_storage.cue | 6 +++--- .../reference/components/sinks/base/gcp_pubsub.cue | 6 +++--- .../components/sinks/base/gcp_stackdriver_logs.cue | 6 +++--- .../sinks/base/gcp_stackdriver_metrics.cue | 6 +++--- .../reference/components/sinks/base/greptimedb.cue | 6 +++--- website/cue/reference/components/sinks/base/http.cue | 6 +++--- .../reference/components/sinks/base/humio_logs.cue | 6 +++--- .../components/sinks/base/humio_metrics.cue | 6 +++--- .../components/sinks/base/influxdb_logs.cue | 6 +++--- .../components/sinks/base/influxdb_metrics.cue | 6 +++--- .../cue/reference/components/sinks/base/kafka.cue | 6 +++--- website/cue/reference/components/sinks/base/loki.cue | 6 +++--- website/cue/reference/components/sinks/base/mqtt.cue | 6 +++--- website/cue/reference/components/sinks/base/nats.cue | 6 +++--- .../reference/components/sinks/base/papertrail.cue | 6 +++--- .../components/sinks/base/prometheus_exporter.cue | 6 +++--- .../sinks/base/prometheus_remote_write.cue | 6 +++--- .../cue/reference/components/sinks/base/socket.cue | 6 +++--- .../components/sinks/base/splunk_hec_logs.cue | 6 +++--- .../components/sinks/base/splunk_hec_metrics.cue | 6 +++--- .../cue/reference/components/sinks/base/statsd.cue | 6 +++--- .../cue/reference/components/sinks/base/vector.cue | 6 +++--- .../reference/components/sinks/base/websocket.cue | 6 +++--- .../cue/reference/components/sources/base/amqp.cue | 6 +++--- .../components/sources/base/aws_kinesis_firehose.cue | 6 +++--- .../cue/reference/components/sources/base/aws_s3.cue | 12 ++++++------ .../reference/components/sources/base/aws_sqs.cue | 6 +++--- .../components/sources/base/datadog_agent.cue | 6 +++--- .../cue/reference/components/sources/base/dnstap.cue | 6 +++--- .../cue/reference/components/sources/base/fluent.cue | 6 +++--- .../reference/components/sources/base/gcp_pubsub.cue | 6 +++--- .../components/sources/base/heroku_logs.cue | 6 +++--- .../cue/reference/components/sources/base/http.cue | 6 +++--- .../components/sources/base/http_client.cue | 6 +++--- .../components/sources/base/http_server.cue | 6 +++--- .../cue/reference/components/sources/base/kafka.cue | 6 +++--- .../reference/components/sources/base/logstash.cue | 6 +++--- .../cue/reference/components/sources/base/nats.cue | 6 +++--- .../components/sources/base/nginx_metrics.cue | 6 +++--- .../components/sources/base/opentelemetry.cue | 12 ++++++------ .../sources/base/prometheus_pushgateway.cue | 6 +++--- .../sources/base/prometheus_remote_write.cue | 6 +++--- .../components/sources/base/prometheus_scrape.cue | 6 +++--- .../cue/reference/components/sources/base/socket.cue | 6 +++--- .../reference/components/sources/base/splunk_hec.cue | 6 +++--- .../cue/reference/components/sources/base/statsd.cue | 6 +++--- .../cue/reference/components/sources/base/syslog.cue | 6 +++--- .../cue/reference/components/sources/base/vector.cue | 6 +++--- 68 files changed, 210 insertions(+), 210 deletions(-) diff --git a/lib/vector-core/src/tls/settings.rs b/lib/vector-core/src/tls/settings.rs index 5d96c51abbaa6..23725e47496dd 100644 --- a/lib/vector-core/src/tls/settings.rs +++ b/lib/vector-core/src/tls/settings.rs @@ -85,15 +85,15 @@ pub struct TlsSourceConfig { #[derive(Clone, Debug, Default)] #[serde(deny_unknown_fields)] pub struct TlsConfig { - /// Enables certificate verification. + /// Enables certificate verification. For components that create a server, this requires that the + /// client connections have a valid client certificate. For components that initiate requests, + /// this validates that the upstream has a valid certificate. /// /// If enabled, certificates must not be expired and must be issued by a trusted /// issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the /// certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and /// so on until the verification process reaches a root certificate. /// - /// Relevant for both incoming and outgoing connections. - /// /// Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. pub verify_certificate: Option, diff --git a/website/cue/reference/components/sinks/base/amqp.cue b/website/cue/reference/components/sinks/base/amqp.cue index 0e7f44a433bdf..333293b890827 100644 --- a/website/cue/reference/components/sinks/base/amqp.cue +++ b/website/cue/reference/components/sinks/base/amqp.cue @@ -374,15 +374,15 @@ base: components: sinks: amqp: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/appsignal.cue b/website/cue/reference/components/sinks/base/appsignal.cue index 0046a653a3188..38530c1dc534b 100644 --- a/website/cue/reference/components/sinks/base/appsignal.cue +++ b/website/cue/reference/components/sinks/base/appsignal.cue @@ -388,15 +388,15 @@ base: components: sinks: appsignal: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/aws_cloudwatch_logs.cue b/website/cue/reference/components/sinks/base/aws_cloudwatch_logs.cue index e1b600c1e89c8..089e8b08cdbc8 100644 --- a/website/cue/reference/components/sinks/base/aws_cloudwatch_logs.cue +++ b/website/cue/reference/components/sinks/base/aws_cloudwatch_logs.cue @@ -773,15 +773,15 @@ base: components: sinks: aws_cloudwatch_logs: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/aws_cloudwatch_metrics.cue b/website/cue/reference/components/sinks/base/aws_cloudwatch_metrics.cue index b487d069d375a..799922d02d5e3 100644 --- a/website/cue/reference/components/sinks/base/aws_cloudwatch_metrics.cue +++ b/website/cue/reference/components/sinks/base/aws_cloudwatch_metrics.cue @@ -462,15 +462,15 @@ base: components: sinks: aws_cloudwatch_metrics: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/aws_kinesis_firehose.cue b/website/cue/reference/components/sinks/base/aws_kinesis_firehose.cue index 54226a2e0a0bf..10fbde2a50fba 100644 --- a/website/cue/reference/components/sinks/base/aws_kinesis_firehose.cue +++ b/website/cue/reference/components/sinks/base/aws_kinesis_firehose.cue @@ -722,15 +722,15 @@ base: components: sinks: aws_kinesis_firehose: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/aws_kinesis_streams.cue b/website/cue/reference/components/sinks/base/aws_kinesis_streams.cue index 98a78c69aa3c2..b725956698c9d 100644 --- a/website/cue/reference/components/sinks/base/aws_kinesis_streams.cue +++ b/website/cue/reference/components/sinks/base/aws_kinesis_streams.cue @@ -722,15 +722,15 @@ base: components: sinks: aws_kinesis_streams: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/aws_s3.cue b/website/cue/reference/components/sinks/base/aws_s3.cue index f8b7e29a7a15d..b8cb9be0db9fe 100644 --- a/website/cue/reference/components/sinks/base/aws_s3.cue +++ b/website/cue/reference/components/sinks/base/aws_s3.cue @@ -1032,15 +1032,15 @@ base: components: sinks: aws_s3: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/aws_sns.cue b/website/cue/reference/components/sinks/base/aws_sns.cue index f4a6bf8c98ac8..80fdfd174727d 100644 --- a/website/cue/reference/components/sinks/base/aws_sns.cue +++ b/website/cue/reference/components/sinks/base/aws_sns.cue @@ -651,15 +651,15 @@ base: components: sinks: aws_sns: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/aws_sqs.cue b/website/cue/reference/components/sinks/base/aws_sqs.cue index 06267be9baf98..aa19d564cfc27 100644 --- a/website/cue/reference/components/sinks/base/aws_sqs.cue +++ b/website/cue/reference/components/sinks/base/aws_sqs.cue @@ -656,15 +656,15 @@ base: components: sinks: aws_sqs: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/axiom.cue b/website/cue/reference/components/sinks/base/axiom.cue index 910ef98a29a39..80e0c4fc0ac19 100644 --- a/website/cue/reference/components/sinks/base/axiom.cue +++ b/website/cue/reference/components/sinks/base/axiom.cue @@ -325,15 +325,15 @@ base: components: sinks: axiom: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/azure_monitor_logs.cue b/website/cue/reference/components/sinks/base/azure_monitor_logs.cue index 66564517290cd..7cc05d03d2ae5 100644 --- a/website/cue/reference/components/sinks/base/azure_monitor_logs.cue +++ b/website/cue/reference/components/sinks/base/azure_monitor_logs.cue @@ -392,15 +392,15 @@ base: components: sinks: azure_monitor_logs: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/clickhouse.cue b/website/cue/reference/components/sinks/base/clickhouse.cue index 631f2c98b47b4..5aea1187eb38d 100644 --- a/website/cue/reference/components/sinks/base/clickhouse.cue +++ b/website/cue/reference/components/sinks/base/clickhouse.cue @@ -456,15 +456,15 @@ base: components: sinks: clickhouse: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/databend.cue b/website/cue/reference/components/sinks/base/databend.cue index 8302802b59f18..f8e7632abe9d3 100644 --- a/website/cue/reference/components/sinks/base/databend.cue +++ b/website/cue/reference/components/sinks/base/databend.cue @@ -530,15 +530,15 @@ base: components: sinks: databend: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/datadog_events.cue b/website/cue/reference/components/sinks/base/datadog_events.cue index 9e09180bc7305..df1b306a7f304 100644 --- a/website/cue/reference/components/sinks/base/datadog_events.cue +++ b/website/cue/reference/components/sinks/base/datadog_events.cue @@ -322,15 +322,15 @@ base: components: sinks: datadog_events: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/datadog_logs.cue b/website/cue/reference/components/sinks/base/datadog_logs.cue index 0b14236535db2..d5c16d40cbb14 100644 --- a/website/cue/reference/components/sinks/base/datadog_logs.cue +++ b/website/cue/reference/components/sinks/base/datadog_logs.cue @@ -425,15 +425,15 @@ base: components: sinks: datadog_logs: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/datadog_metrics.cue b/website/cue/reference/components/sinks/base/datadog_metrics.cue index a0b927d5c3d7c..61057ff040564 100644 --- a/website/cue/reference/components/sinks/base/datadog_metrics.cue +++ b/website/cue/reference/components/sinks/base/datadog_metrics.cue @@ -364,15 +364,15 @@ base: components: sinks: datadog_metrics: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/datadog_traces.cue b/website/cue/reference/components/sinks/base/datadog_traces.cue index c1b4278e24bfc..e83805625ec21 100644 --- a/website/cue/reference/components/sinks/base/datadog_traces.cue +++ b/website/cue/reference/components/sinks/base/datadog_traces.cue @@ -388,15 +388,15 @@ base: components: sinks: datadog_traces: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/elasticsearch.cue b/website/cue/reference/components/sinks/base/elasticsearch.cue index 2ef047689c975..18386dcf74687 100644 --- a/website/cue/reference/components/sinks/base/elasticsearch.cue +++ b/website/cue/reference/components/sinks/base/elasticsearch.cue @@ -814,15 +814,15 @@ base: components: sinks: elasticsearch: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/gcp_chronicle_unstructured.cue b/website/cue/reference/components/sinks/base/gcp_chronicle_unstructured.cue index c5c8ee0034565..3bddfbd844955 100644 --- a/website/cue/reference/components/sinks/base/gcp_chronicle_unstructured.cue +++ b/website/cue/reference/components/sinks/base/gcp_chronicle_unstructured.cue @@ -612,15 +612,15 @@ base: components: sinks: gcp_chronicle_unstructured: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/gcp_cloud_storage.cue b/website/cue/reference/components/sinks/base/gcp_cloud_storage.cue index 956e244d0218c..3965aaae26333 100644 --- a/website/cue/reference/components/sinks/base/gcp_cloud_storage.cue +++ b/website/cue/reference/components/sinks/base/gcp_cloud_storage.cue @@ -807,15 +807,15 @@ base: components: sinks: gcp_cloud_storage: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/gcp_pubsub.cue b/website/cue/reference/components/sinks/base/gcp_pubsub.cue index a1d9a916a1d22..1361b65080694 100644 --- a/website/cue/reference/components/sinks/base/gcp_pubsub.cue +++ b/website/cue/reference/components/sinks/base/gcp_pubsub.cue @@ -603,15 +603,15 @@ base: components: sinks: gcp_pubsub: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/gcp_stackdriver_logs.cue b/website/cue/reference/components/sinks/base/gcp_stackdriver_logs.cue index ba6457243ab07..f028d562c3e15 100644 --- a/website/cue/reference/components/sinks/base/gcp_stackdriver_logs.cue +++ b/website/cue/reference/components/sinks/base/gcp_stackdriver_logs.cue @@ -468,15 +468,15 @@ base: components: sinks: gcp_stackdriver_logs: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/gcp_stackdriver_metrics.cue b/website/cue/reference/components/sinks/base/gcp_stackdriver_metrics.cue index 4d37b29ad7514..6187df388da02 100644 --- a/website/cue/reference/components/sinks/base/gcp_stackdriver_metrics.cue +++ b/website/cue/reference/components/sinks/base/gcp_stackdriver_metrics.cue @@ -389,15 +389,15 @@ base: components: sinks: gcp_stackdriver_metrics: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/greptimedb.cue b/website/cue/reference/components/sinks/base/greptimedb.cue index 9d4ce20f8a530..d227be3aa29ff 100644 --- a/website/cue/reference/components/sinks/base/greptimedb.cue +++ b/website/cue/reference/components/sinks/base/greptimedb.cue @@ -341,15 +341,15 @@ base: components: sinks: greptimedb: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/http.cue b/website/cue/reference/components/sinks/base/http.cue index 9bbd2457c0936..9155d24f8e380 100644 --- a/website/cue/reference/components/sinks/base/http.cue +++ b/website/cue/reference/components/sinks/base/http.cue @@ -728,15 +728,15 @@ base: components: sinks: http: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/humio_logs.cue b/website/cue/reference/components/sinks/base/humio_logs.cue index 4a9d6d2ed7c64..c2d13e5c445d9 100644 --- a/website/cue/reference/components/sinks/base/humio_logs.cue +++ b/website/cue/reference/components/sinks/base/humio_logs.cue @@ -677,15 +677,15 @@ base: components: sinks: humio_logs: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/humio_metrics.cue b/website/cue/reference/components/sinks/base/humio_metrics.cue index f41836314e6b3..e2e5afc024c5c 100644 --- a/website/cue/reference/components/sinks/base/humio_metrics.cue +++ b/website/cue/reference/components/sinks/base/humio_metrics.cue @@ -464,15 +464,15 @@ base: components: sinks: humio_metrics: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/influxdb_logs.cue b/website/cue/reference/components/sinks/base/influxdb_logs.cue index 7b817c559466c..908a6699eb3b1 100644 --- a/website/cue/reference/components/sinks/base/influxdb_logs.cue +++ b/website/cue/reference/components/sinks/base/influxdb_logs.cue @@ -451,15 +451,15 @@ base: components: sinks: influxdb_logs: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/influxdb_metrics.cue b/website/cue/reference/components/sinks/base/influxdb_metrics.cue index 72f09ac1366d3..72106cbf0563e 100644 --- a/website/cue/reference/components/sinks/base/influxdb_metrics.cue +++ b/website/cue/reference/components/sinks/base/influxdb_metrics.cue @@ -395,15 +395,15 @@ base: components: sinks: influxdb_metrics: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/kafka.cue b/website/cue/reference/components/sinks/base/kafka.cue index 0de81c9e00bee..e6870a8515562 100644 --- a/website/cue/reference/components/sinks/base/kafka.cue +++ b/website/cue/reference/components/sinks/base/kafka.cue @@ -493,15 +493,15 @@ base: components: sinks: kafka: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/loki.cue b/website/cue/reference/components/sinks/base/loki.cue index 6f151b9d2f3dd..0594110e84048 100644 --- a/website/cue/reference/components/sinks/base/loki.cue +++ b/website/cue/reference/components/sinks/base/loki.cue @@ -730,15 +730,15 @@ base: components: sinks: loki: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/mqtt.cue b/website/cue/reference/components/sinks/base/mqtt.cue index 2a747cd13c3e9..a93de9aa10452 100644 --- a/website/cue/reference/components/sinks/base/mqtt.cue +++ b/website/cue/reference/components/sinks/base/mqtt.cue @@ -380,15 +380,15 @@ base: components: sinks: mqtt: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/nats.cue b/website/cue/reference/components/sinks/base/nats.cue index 574768b06ba8e..afd87e7d0f247 100644 --- a/website/cue/reference/components/sinks/base/nats.cue +++ b/website/cue/reference/components/sinks/base/nats.cue @@ -636,15 +636,15 @@ base: components: sinks: nats: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/papertrail.cue b/website/cue/reference/components/sinks/base/papertrail.cue index 428b4eda1cead..bff6fef8a2516 100644 --- a/website/cue/reference/components/sinks/base/papertrail.cue +++ b/website/cue/reference/components/sinks/base/papertrail.cue @@ -366,15 +366,15 @@ base: components: sinks: papertrail: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/prometheus_exporter.cue b/website/cue/reference/components/sinks/base/prometheus_exporter.cue index 3dcb37506116c..6c93e374cdc48 100644 --- a/website/cue/reference/components/sinks/base/prometheus_exporter.cue +++ b/website/cue/reference/components/sinks/base/prometheus_exporter.cue @@ -230,15 +230,15 @@ base: components: sinks: prometheus_exporter: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/prometheus_remote_write.cue b/website/cue/reference/components/sinks/base/prometheus_remote_write.cue index 2ecd4d3899878..7d4d8a0541246 100644 --- a/website/cue/reference/components/sinks/base/prometheus_remote_write.cue +++ b/website/cue/reference/components/sinks/base/prometheus_remote_write.cue @@ -562,15 +562,15 @@ base: components: sinks: prometheus_remote_write: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/socket.cue b/website/cue/reference/components/sinks/base/socket.cue index 23eecf31a3e1c..9fd330b312d93 100644 --- a/website/cue/reference/components/sinks/base/socket.cue +++ b/website/cue/reference/components/sinks/base/socket.cue @@ -426,15 +426,15 @@ base: components: sinks: socket: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/splunk_hec_logs.cue b/website/cue/reference/components/sinks/base/splunk_hec_logs.cue index f7be2f936e603..82df9bb2f658e 100644 --- a/website/cue/reference/components/sinks/base/splunk_hec_logs.cue +++ b/website/cue/reference/components/sinks/base/splunk_hec_logs.cue @@ -745,15 +745,15 @@ base: components: sinks: splunk_hec_logs: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/splunk_hec_metrics.cue b/website/cue/reference/components/sinks/base/splunk_hec_metrics.cue index 111c9ba088159..918401d1ba5f1 100644 --- a/website/cue/reference/components/sinks/base/splunk_hec_metrics.cue +++ b/website/cue/reference/components/sinks/base/splunk_hec_metrics.cue @@ -441,15 +441,15 @@ base: components: sinks: splunk_hec_metrics: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/statsd.cue b/website/cue/reference/components/sinks/base/statsd.cue index 3a9273c8b5132..facbefa9f1a6d 100644 --- a/website/cue/reference/components/sinks/base/statsd.cue +++ b/website/cue/reference/components/sinks/base/statsd.cue @@ -193,15 +193,15 @@ base: components: sinks: statsd: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/vector.cue b/website/cue/reference/components/sinks/base/vector.cue index 49e4f9dcf8934..7b7f43f478794 100644 --- a/website/cue/reference/components/sinks/base/vector.cue +++ b/website/cue/reference/components/sinks/base/vector.cue @@ -332,15 +332,15 @@ base: components: sinks: vector: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sinks/base/websocket.cue b/website/cue/reference/components/sinks/base/websocket.cue index 3922b1c11a4fe..aa4df23e6e8d3 100644 --- a/website/cue/reference/components/sinks/base/websocket.cue +++ b/website/cue/reference/components/sinks/base/websocket.cue @@ -410,15 +410,15 @@ base: components: sinks: websocket: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/amqp.cue b/website/cue/reference/components/sources/base/amqp.cue index 4660464d47b13..2ad195cb12ecb 100644 --- a/website/cue/reference/components/sources/base/amqp.cue +++ b/website/cue/reference/components/sources/base/amqp.cue @@ -431,15 +431,15 @@ base: components: sources: amqp: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/aws_kinesis_firehose.cue b/website/cue/reference/components/sources/base/aws_kinesis_firehose.cue index fc119058adfce..2084545bee574 100644 --- a/website/cue/reference/components/sources/base/aws_kinesis_firehose.cue +++ b/website/cue/reference/components/sources/base/aws_kinesis_firehose.cue @@ -503,15 +503,15 @@ base: components: sources: aws_kinesis_firehose: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/aws_s3.cue b/website/cue/reference/components/sources/base/aws_s3.cue index d72f8745ace47..a35d7318669cc 100644 --- a/website/cue/reference/components/sources/base/aws_s3.cue +++ b/website/cue/reference/components/sources/base/aws_s3.cue @@ -651,15 +651,15 @@ base: components: sources: aws_s3: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false @@ -753,15 +753,15 @@ base: components: sources: aws_s3: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/aws_sqs.cue b/website/cue/reference/components/sources/base/aws_sqs.cue index 6cfbc8efea494..6e18067869298 100644 --- a/website/cue/reference/components/sources/base/aws_sqs.cue +++ b/website/cue/reference/components/sources/base/aws_sqs.cue @@ -545,15 +545,15 @@ base: components: sources: aws_sqs: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/datadog_agent.cue b/website/cue/reference/components/sources/base/datadog_agent.cue index 48bdbd6d61494..b87253a37fa7a 100644 --- a/website/cue/reference/components/sources/base/datadog_agent.cue +++ b/website/cue/reference/components/sources/base/datadog_agent.cue @@ -482,15 +482,15 @@ base: components: sources: datadog_agent: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/dnstap.cue b/website/cue/reference/components/sources/base/dnstap.cue index ea0df54556c2b..24a20f6ecc7f1 100644 --- a/website/cue/reference/components/sources/base/dnstap.cue +++ b/website/cue/reference/components/sources/base/dnstap.cue @@ -244,15 +244,15 @@ base: components: sources: dnstap: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/fluent.cue b/website/cue/reference/components/sources/base/fluent.cue index b5f07df7da264..d5574a65ca790 100644 --- a/website/cue/reference/components/sources/base/fluent.cue +++ b/website/cue/reference/components/sources/base/fluent.cue @@ -135,15 +135,15 @@ base: components: sources: fluent: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/gcp_pubsub.cue b/website/cue/reference/components/sources/base/gcp_pubsub.cue index 6afa5c9ebd2a9..7e841f24fc57a 100644 --- a/website/cue/reference/components/sources/base/gcp_pubsub.cue +++ b/website/cue/reference/components/sources/base/gcp_pubsub.cue @@ -515,15 +515,15 @@ base: components: sources: gcp_pubsub: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/heroku_logs.cue b/website/cue/reference/components/sources/base/heroku_logs.cue index 0318c732d1151..57e4d46d323c5 100644 --- a/website/cue/reference/components/sources/base/heroku_logs.cue +++ b/website/cue/reference/components/sources/base/heroku_logs.cue @@ -464,15 +464,15 @@ base: components: sources: heroku_logs: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/http.cue b/website/cue/reference/components/sources/base/http.cue index 4a6a6deb90fed..0aa83c2e4b6ef 100644 --- a/website/cue/reference/components/sources/base/http.cue +++ b/website/cue/reference/components/sources/base/http.cue @@ -546,15 +546,15 @@ base: components: sources: http: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/http_client.cue b/website/cue/reference/components/sources/base/http_client.cue index 49217412dafa8..6209840f6f728 100644 --- a/website/cue/reference/components/sources/base/http_client.cue +++ b/website/cue/reference/components/sources/base/http_client.cue @@ -487,15 +487,15 @@ base: components: sources: http_client: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/http_server.cue b/website/cue/reference/components/sources/base/http_server.cue index 4c01e96250517..7af3f843fba7e 100644 --- a/website/cue/reference/components/sources/base/http_server.cue +++ b/website/cue/reference/components/sources/base/http_server.cue @@ -546,15 +546,15 @@ base: components: sources: http_server: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/kafka.cue b/website/cue/reference/components/sources/base/kafka.cue index 41ba03077c970..d0cf6c8e8f076 100644 --- a/website/cue/reference/components/sources/base/kafka.cue +++ b/website/cue/reference/components/sources/base/kafka.cue @@ -602,15 +602,15 @@ base: components: sources: kafka: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/logstash.cue b/website/cue/reference/components/sources/base/logstash.cue index de60b7c977e7c..267c4fe2b8df4 100644 --- a/website/cue/reference/components/sources/base/logstash.cue +++ b/website/cue/reference/components/sources/base/logstash.cue @@ -131,15 +131,15 @@ base: components: sources: logstash: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/nats.cue b/website/cue/reference/components/sources/base/nats.cue index fe43ef138a191..dca98ef007cd8 100644 --- a/website/cue/reference/components/sources/base/nats.cue +++ b/website/cue/reference/components/sources/base/nats.cue @@ -506,15 +506,15 @@ base: components: sources: nats: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/nginx_metrics.cue b/website/cue/reference/components/sources/base/nginx_metrics.cue index b63cfe1b60012..9021091a1ffa7 100644 --- a/website/cue/reference/components/sources/base/nginx_metrics.cue +++ b/website/cue/reference/components/sources/base/nginx_metrics.cue @@ -132,15 +132,15 @@ base: components: sources: nginx_metrics: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/opentelemetry.cue b/website/cue/reference/components/sources/base/opentelemetry.cue index d56cf10c463be..8b0f42c145f4d 100644 --- a/website/cue/reference/components/sources/base/opentelemetry.cue +++ b/website/cue/reference/components/sources/base/opentelemetry.cue @@ -104,15 +104,15 @@ base: components: sources: opentelemetry: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false @@ -257,15 +257,15 @@ base: components: sources: opentelemetry: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/prometheus_pushgateway.cue b/website/cue/reference/components/sources/base/prometheus_pushgateway.cue index 8f2c3cd400823..5c3f093a1d9b4 100644 --- a/website/cue/reference/components/sources/base/prometheus_pushgateway.cue +++ b/website/cue/reference/components/sources/base/prometheus_pushgateway.cue @@ -156,15 +156,15 @@ base: components: sources: prometheus_pushgateway: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/prometheus_remote_write.cue b/website/cue/reference/components/sources/base/prometheus_remote_write.cue index 10d495c6504b6..45a7d8038dab9 100644 --- a/website/cue/reference/components/sources/base/prometheus_remote_write.cue +++ b/website/cue/reference/components/sources/base/prometheus_remote_write.cue @@ -146,15 +146,15 @@ base: components: sources: prometheus_remote_write: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/prometheus_scrape.cue b/website/cue/reference/components/sources/base/prometheus_scrape.cue index 9e2aa806ce989..13d5d934e4467 100644 --- a/website/cue/reference/components/sources/base/prometheus_scrape.cue +++ b/website/cue/reference/components/sources/base/prometheus_scrape.cue @@ -178,15 +178,15 @@ base: components: sources: prometheus_scrape: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/socket.cue b/website/cue/reference/components/sources/base/socket.cue index 1ce0c35487c09..70c21935094d0 100644 --- a/website/cue/reference/components/sources/base/socket.cue +++ b/website/cue/reference/components/sources/base/socket.cue @@ -510,15 +510,15 @@ base: components: sources: socket: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/splunk_hec.cue b/website/cue/reference/components/sources/base/splunk_hec.cue index ddb34bd96e779..44a7604d0064a 100644 --- a/website/cue/reference/components/sources/base/splunk_hec.cue +++ b/website/cue/reference/components/sources/base/splunk_hec.cue @@ -181,15 +181,15 @@ base: components: sources: splunk_hec: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/statsd.cue b/website/cue/reference/components/sources/base/statsd.cue index 5b1c6ea8bff94..74c96b73d032a 100644 --- a/website/cue/reference/components/sources/base/statsd.cue +++ b/website/cue/reference/components/sources/base/statsd.cue @@ -139,15 +139,15 @@ base: components: sources: statsd: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/syslog.cue b/website/cue/reference/components/sources/base/syslog.cue index 77ac76e222954..f067f60129510 100644 --- a/website/cue/reference/components/sources/base/syslog.cue +++ b/website/cue/reference/components/sources/base/syslog.cue @@ -175,15 +175,15 @@ base: components: sources: syslog: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false diff --git a/website/cue/reference/components/sources/base/vector.cue b/website/cue/reference/components/sources/base/vector.cue index f50d65070a229..affa6c0fd01c8 100644 --- a/website/cue/reference/components/sources/base/vector.cue +++ b/website/cue/reference/components/sources/base/vector.cue @@ -96,15 +96,15 @@ base: components: sources: vector: configuration: { } verify_certificate: { description: """ - Enables certificate verification. + Enables certificate verification. For components that create a server, this requires that the + client connections have a valid client certificate. For components that initiate requests, + this validates that the upstream has a valid certificate. If enabled, certificates must not be expired and must be issued by a trusted issuer. This verification operates in a hierarchical manner, checking that the leaf certificate (the certificate presented by the client/server) is not only valid, but that the issuer of that certificate is also valid, and so on until the verification process reaches a root certificate. - Relevant for both incoming and outgoing connections. - Do NOT set this to `false` unless you understand the risks of not verifying the validity of certificates. """ required: false From 4279bf0018055de68f59dffe9532fab96c80d3ac Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fl=C3=A1vio=20Cruz?= Date: Mon, 25 Mar 2024 16:35:53 -0400 Subject: [PATCH 0179/1491] docs(vrl): Add documentation for parse_proto and encode_proto (#20139) * docs(vrl): Add documentation for parse_proto and encode_proto (https://github.com/vectordotdev/vrl/pull/739) * Add spellcheck exception Signed-off-by: Jesse Szwedko * Add ignore pattern for base64 Signed-off-by: Jesse Szwedko * Update pattern Signed-off-by: Jesse Szwedko * Fix pattern Signed-off-by: Jesse Szwedko * another try Signed-off-by: Jesse Szwedko * cue formatting Signed-off-by: Jesse Szwedko * update tests Signed-off-by: Jesse Szwedko * Ignore desc files in syntax check Signed-off-by: Jesse Szwedko --------- Signed-off-by: Jesse Szwedko Co-authored-by: Jesse Szwedko --- .github/actions/spelling/patterns.txt | 3 + .../resources/protobuf_descriptor_set.desc | Bin 0 -> 1183 bytes scripts/check-style.sh | 1 + .../remap/functions/encode_proto.cue | 52 ++++++++++++++ .../reference/remap/functions/parse_proto.cue | 65 ++++++++++++++++++ 5 files changed, 121 insertions(+) create mode 100644 lib/vector-vrl/tests/resources/protobuf_descriptor_set.desc create mode 100644 website/cue/reference/remap/functions/encode_proto.cue create mode 100644 website/cue/reference/remap/functions/parse_proto.cue diff --git a/.github/actions/spelling/patterns.txt b/.github/actions/spelling/patterns.txt index 8dd94f985e88c..28109b552e719 100644 --- a/.github/actions/spelling/patterns.txt +++ b/.github/actions/spelling/patterns.txt @@ -225,6 +225,9 @@ user:P@ssw0rd # Ignore base64 encoded values in Prometheus Pushgateway URL paths /.+@base64/.+ +# Ignore base64 encoded values in VRL examples (requires padding to avoid false positives) +"[A-Za-z0-9]*==" + # Ignore punycode \bxn--[-0-9a-z]+ diff --git a/lib/vector-vrl/tests/resources/protobuf_descriptor_set.desc b/lib/vector-vrl/tests/resources/protobuf_descriptor_set.desc new file mode 100644 index 0000000000000000000000000000000000000000..43e7acf6cf7716af7c2a751c2ece9283a5033f47 GIT binary patch literal 1183 zcmaJ<+int36rFQn2KF#890r&|FO;Vyji`Oo_+pbL-fA0S;-j`f^dJ9EkiFDzt}ZWTHf3^34O@W; zL*!mwoxlBX5jv($Ru&45K3~n=hjQ|Y{MYmOI@xT_)AWaVg29q>b791-(-2%bH?GJJjwCl^kPady;(7g9hjb382RHnqshAdE;;h0y{9tx^u!*Fi}O zi|ZAJ-1nQM9I&|~=Qf+Vl1t_xw~Z<^+U9b0or?dYO})%Y*+xTslvQGgZG^g6VeBIe zx)nxx7!50oorDL6r5)&weqPf2L!BKv;Q|N!if8>`RAJb44y$%V9eDh#;yvKgr};lZ z9LY&Gm{cegmH((vDDsONQYaG*4hkjbjgRfQ3Zn;n&U~)C^+COB%?1BpEVC2=Y5{{rq%g!BLa literal 0 HcmV?d00001 diff --git a/scripts/check-style.sh b/scripts/check-style.sh index bf4f18fdf65ad..f905b6f22d34f 100755 --- a/scripts/check-style.sh +++ b/scripts/check-style.sh @@ -33,6 +33,7 @@ for FILE in $(git ls-files); do *ico) continue;; *sig) continue;; *html) continue;; + *desc) continue;; tests/data*) continue;; lib/codecs/tests/data*) continue;; lib/vector-core/tests/data*) continue;; diff --git a/website/cue/reference/remap/functions/encode_proto.cue b/website/cue/reference/remap/functions/encode_proto.cue new file mode 100644 index 0000000000000..253bcf01e9241 --- /dev/null +++ b/website/cue/reference/remap/functions/encode_proto.cue @@ -0,0 +1,52 @@ +package metadata + +remap: functions: encode_proto: { + category: "Codec" + description: """ + Encodes the `value` into a protocol buffer payload. + """ + + arguments: [ + { + name: "value" + description: "The object to convert to a protocol buffer payload." + required: true + type: ["object"] + }, + { + name: "desc_file" + description: """ + The path to the protobuf descriptor set file. Must be a literal string. + + This file is the output of protoc -o ... + """ + required: true + type: ["string"] + }, + { + name: "message_type" + description: """ + The name of the message type to use for serializing. + + Must be a literal string. + """ + required: true + type: ["string"] + }, + ] + internal_failure_reasons: [ + "`desc_file` file does not exist.", + "`message_type` message type does not exist in the descriptor file.", + ] + return: types: ["string"] + + examples: [ + { + title: "Encode to proto" + source: #""" + .payload = encode_base64(encode_proto!({"name": "someone", "phones": [{"number": "123456"}]}, "resources/protobuf_descriptor_set.desc", "test_protobuf.Person")) + """# + return: #"Cgdzb21lb25lIggKBjEyMzQ1Ng=="# + }, + ] +} diff --git a/website/cue/reference/remap/functions/parse_proto.cue b/website/cue/reference/remap/functions/parse_proto.cue new file mode 100644 index 0000000000000..29557895e0df3 --- /dev/null +++ b/website/cue/reference/remap/functions/parse_proto.cue @@ -0,0 +1,65 @@ +package metadata + +remap: functions: parse_proto: { + category: "Parse" + description: """ + Parses the `value` as a protocol buffer payload. + """ + notices: [ + """ + Only proto messages are parsed and returned. + """, + ] + + arguments: [ + { + name: "value" + description: "The protocol buffer payload to parse." + required: true + type: ["string"] + }, + { + name: "desc_file" + description: """ + The path to the protobuf descriptor set file. Must be a literal string. + + This file is the output of protoc -o ... + """ + required: true + type: ["string"] + }, + { + name: "message_type" + description: """ + The name of the message type to use for serializing. + + Must be a literal string. + """ + required: true + type: ["string"] + }, + ] + internal_failure_reasons: [ + "`value` is not a valid proto payload.", + "`desc_file` file does not exist.", + "`message_type` message type does not exist in the descriptor file.", + ] + return: types: ["object"] + + examples: [ + { + title: "Parse proto" + source: #""" + parse_proto!(decode_base64!("Cgdzb21lb25lIggKBjEyMzQ1Ng=="), "resources/protobuf_descriptor_set.desc", "test_protobuf.Person") + """# + return: { + name: "someone" + phones: [ + { + number: "123456" + }, + ] + } + }, + ] +} From c7dde0312a6d04201eb9641fe8b8cc8967ffd3fe Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ensar=20Saraj=C4=8Di=C4=87?= Date: Tue, 26 Mar 2024 15:17:16 +0100 Subject: [PATCH 0180/1491] feat(codecs): add options to `length_delimited` framing (#20154) * feat(codecs): add options to `length_delimited` framing Adds options for `length_delimited` framing to enable configuring length of the length field, as well as some other options supported by `LengthDelimitedCodec`. This adds that support for both encoding and decoding. Fixes: #19323 * Add changelog entry --- ...ength_delimited_framing_options.feature.md | 3 + lib/codecs/src/common/length_delimited.rs | 66 +++++++++++ lib/codecs/src/common/mod.rs | 1 + .../src/decoding/framing/length_delimited.rs | 105 +++++++++++------- lib/codecs/src/decoding/mod.rs | 12 +- .../src/encoding/framing/length_delimited.rs | 67 ++++++----- lib/codecs/src/encoding/mod.rs | 14 +-- src/codecs/encoding/config.rs | 4 +- src/components/validation/resources/event.rs | 2 +- src/components/validation/resources/mod.rs | 12 +- .../components/sinks/base/aws_s3.cue | 27 +++++ .../components/sinks/base/azure_blob.cue | 27 +++++ .../components/sinks/base/console.cue | 27 +++++ .../reference/components/sinks/base/file.cue | 27 +++++ .../sinks/base/gcp_cloud_storage.cue | 27 +++++ .../reference/components/sinks/base/http.cue | 27 +++++ .../components/sinks/base/socket.cue | 27 +++++ .../components/sinks/base/webhdfs.cue | 27 +++++ .../components/sources/base/amqp.cue | 27 +++++ .../sources/base/aws_kinesis_firehose.cue | 27 +++++ .../components/sources/base/aws_s3.cue | 27 +++++ .../components/sources/base/aws_sqs.cue | 27 +++++ .../components/sources/base/datadog_agent.cue | 27 +++++ .../components/sources/base/demo_logs.cue | 27 +++++ .../components/sources/base/exec.cue | 27 +++++ .../sources/base/file_descriptor.cue | 27 +++++ .../components/sources/base/gcp_pubsub.cue | 27 +++++ .../components/sources/base/heroku_logs.cue | 27 +++++ .../components/sources/base/http.cue | 27 +++++ .../components/sources/base/http_client.cue | 27 +++++ .../components/sources/base/http_server.cue | 27 +++++ .../components/sources/base/kafka.cue | 27 +++++ .../components/sources/base/nats.cue | 27 +++++ .../components/sources/base/pulsar.cue | 27 +++++ .../components/sources/base/redis.cue | 27 +++++ .../components/sources/base/socket.cue | 27 +++++ .../components/sources/base/stdin.cue | 27 +++++ 37 files changed, 920 insertions(+), 95 deletions(-) create mode 100644 changelog.d/20154_length_delimited_framing_options.feature.md create mode 100644 lib/codecs/src/common/length_delimited.rs diff --git a/changelog.d/20154_length_delimited_framing_options.feature.md b/changelog.d/20154_length_delimited_framing_options.feature.md new file mode 100644 index 0000000000000..7774075bf3aba --- /dev/null +++ b/changelog.d/20154_length_delimited_framing_options.feature.md @@ -0,0 +1,3 @@ +Added support for additional config options for `length_delimited` framing. + +authors: esensar diff --git a/lib/codecs/src/common/length_delimited.rs b/lib/codecs/src/common/length_delimited.rs new file mode 100644 index 0000000000000..ae211beb88d43 --- /dev/null +++ b/lib/codecs/src/common/length_delimited.rs @@ -0,0 +1,66 @@ +use tokio_util::codec::LengthDelimitedCodec; +use vector_config::configurable_component; + +/// Options for building a `LengthDelimitedDecoder` or `LengthDelimitedEncoder`. +#[configurable_component] +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct LengthDelimitedCoderOptions { + /// Maximum frame length + #[serde(default = "default_max_frame_length")] + pub max_frame_length: usize, + + /// Number of bytes representing the field length + #[serde(default = "default_length_field_length")] + pub length_field_length: usize, + + /// Number of bytes in the header before the length field + #[serde(default = "default_length_field_offset")] + pub length_field_offset: usize, + + /// Length field byte order (little or big endian) + #[serde(default = "default_length_field_is_big_endian")] + pub length_field_is_big_endian: bool, +} + +const fn default_max_frame_length() -> usize { + 8 * 1_024 * 1_024 +} + +const fn default_length_field_length() -> usize { + 4 +} + +const fn default_length_field_offset() -> usize { + 0 +} + +const fn default_length_field_is_big_endian() -> bool { + true +} + +impl Default for LengthDelimitedCoderOptions { + fn default() -> Self { + Self { + max_frame_length: default_max_frame_length(), + length_field_length: default_length_field_length(), + length_field_offset: default_length_field_offset(), + length_field_is_big_endian: default_length_field_is_big_endian(), + } + } +} + +impl LengthDelimitedCoderOptions { + pub fn build_codec(&self) -> LengthDelimitedCodec { + let mut builder = tokio_util::codec::LengthDelimitedCodec::builder() + .length_field_length(self.length_field_length) + .length_field_offset(self.length_field_offset) + .max_frame_length(self.max_frame_length) + .to_owned(); + if self.length_field_is_big_endian { + builder.big_endian(); + } else { + builder.little_endian(); + }; + builder.new_codec() + } +} diff --git a/lib/codecs/src/common/mod.rs b/lib/codecs/src/common/mod.rs index 230f3b31d2f97..64493b9fe0d1b 100644 --- a/lib/codecs/src/common/mod.rs +++ b/lib/codecs/src/common/mod.rs @@ -1,3 +1,4 @@ //! A collection of common utility features used by both encoding and decoding logic. +pub mod length_delimited; pub mod protobuf; diff --git a/lib/codecs/src/decoding/framing/length_delimited.rs b/lib/codecs/src/decoding/framing/length_delimited.rs index 8a98d9778fd8f..b72442e1ce4db 100644 --- a/lib/codecs/src/decoding/framing/length_delimited.rs +++ b/lib/codecs/src/decoding/framing/length_delimited.rs @@ -1,57 +1,43 @@ use bytes::{Bytes, BytesMut}; -use serde::{Deserialize, Serialize}; +use derivative::Derivative; use tokio_util::codec::Decoder; +use vector_config::configurable_component; + +use crate::common::length_delimited::LengthDelimitedCoderOptions; use super::BoxedFramingError; /// Config used to build a `LengthDelimitedDecoder`. -#[derive(Debug, Clone, Default, Deserialize, Serialize)] -pub struct LengthDelimitedDecoderConfig; +#[configurable_component] +#[derive(Debug, Clone, Derivative)] +#[derivative(Default)] +pub struct LengthDelimitedDecoderConfig { + /// Options for the length delimited decoder. + #[serde(skip_serializing_if = "vector_core::serde::is_default")] + pub length_delimited: LengthDelimitedCoderOptions, +} impl LengthDelimitedDecoderConfig { /// Build the `LengthDelimitedDecoder` from this configuration. pub fn build(&self) -> LengthDelimitedDecoder { - LengthDelimitedDecoder::new() + LengthDelimitedDecoder::new(&self.length_delimited) } } /// A codec for handling bytes sequences whose length is encoded in a frame head. -/// -/// Currently, this expects a length header in 32-bit MSB by default; options to -/// control the format of the header can be added in the future. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct LengthDelimitedDecoder(tokio_util::codec::LengthDelimitedCodec); impl LengthDelimitedDecoder { /// Creates a new `LengthDelimitedDecoder`. - pub fn new() -> Self { - Self(tokio_util::codec::LengthDelimitedCodec::new()) + pub fn new(config: &LengthDelimitedCoderOptions) -> Self { + Self(config.build_codec()) } } impl Default for LengthDelimitedDecoder { fn default() -> Self { - Self::new() - } -} - -impl Clone for LengthDelimitedDecoder { - fn clone(&self) -> Self { - // This has been fixed with https://github.com/tokio-rs/tokio/pull/4089, - // however we are blocked on upgrading to a new release of `tokio-util` - // that includes the `Clone` implementation: - // https://github.com/vectordotdev/vector/issues/11257. - // - // This is an awful implementation for `Clone` since it resets the - // internal state. However, it works for our use case because we - // generally only clone a codec that has not been mutated yet. - // - // Ideally, `tokio_util::codec::LengthDelimitedCodec` should implement - // `Clone` and it doesn't look like it was a deliberate decision to - // leave out the implementation. All of its internal fields implement - // `Clone`, so adding an implementation for `Clone` could be contributed - // to the upstream repo easily by adding it to the `derive` macro. - Self::new() + Self(tokio_util::codec::LengthDelimitedCodec::new()) } } @@ -81,7 +67,44 @@ mod tests { #[test] fn decode_frame() { let mut input = BytesMut::from("\x00\x00\x00\x03foo"); - let mut decoder = LengthDelimitedDecoder::new(); + let mut decoder = LengthDelimitedDecoder::default(); + + assert_eq!(decoder.decode(&mut input).unwrap().unwrap(), "foo"); + assert_eq!(decoder.decode(&mut input).unwrap(), None); + } + + #[test] + fn decode_frame_2byte_length() { + let mut input = BytesMut::from("\x00\x03foo"); + let mut decoder = LengthDelimitedDecoder::new(&LengthDelimitedCoderOptions { + length_field_length: 2, + ..Default::default() + }); + + assert_eq!(decoder.decode(&mut input).unwrap().unwrap(), "foo"); + assert_eq!(decoder.decode(&mut input).unwrap(), None); + } + + #[test] + fn decode_frame_little_endian() { + let mut input = BytesMut::from("\x03\x00\x00\x00foo"); + let mut decoder = LengthDelimitedDecoder::new(&LengthDelimitedCoderOptions { + length_field_is_big_endian: false, + ..Default::default() + }); + + assert_eq!(decoder.decode(&mut input).unwrap().unwrap(), "foo"); + assert_eq!(decoder.decode(&mut input).unwrap(), None); + } + + #[test] + fn decode_frame_2byte_length_with_offset() { + let mut input = BytesMut::from("\x00\x00\x00\x03foo"); + let mut decoder = LengthDelimitedDecoder::new(&LengthDelimitedCoderOptions { + length_field_length: 2, + length_field_offset: 2, + ..Default::default() + }); assert_eq!(decoder.decode(&mut input).unwrap().unwrap(), "foo"); assert_eq!(decoder.decode(&mut input).unwrap(), None); @@ -90,7 +113,7 @@ mod tests { #[test] fn decode_frame_ignore_unexpected_eof() { let mut input = BytesMut::from("\x00\x00\x00\x03fo"); - let mut decoder = LengthDelimitedDecoder::new(); + let mut decoder = LengthDelimitedDecoder::default(); assert_eq!(decoder.decode(&mut input).unwrap(), None); } @@ -98,7 +121,7 @@ mod tests { #[test] fn decode_frame_ignore_exceeding_bytes_without_header() { let mut input = BytesMut::from("\x00\x00\x00\x03fooo"); - let mut decoder = LengthDelimitedDecoder::new(); + let mut decoder = LengthDelimitedDecoder::default(); assert_eq!(decoder.decode(&mut input).unwrap().unwrap(), "foo"); assert_eq!(decoder.decode(&mut input).unwrap(), None); @@ -107,7 +130,7 @@ mod tests { #[test] fn decode_frame_ignore_missing_header() { let mut input = BytesMut::from("foo"); - let mut decoder = LengthDelimitedDecoder::new(); + let mut decoder = LengthDelimitedDecoder::default(); assert_eq!(decoder.decode(&mut input).unwrap(), None); } @@ -115,7 +138,7 @@ mod tests { #[test] fn decode_frames() { let mut input = BytesMut::from("\x00\x00\x00\x03foo\x00\x00\x00\x03bar"); - let mut decoder = LengthDelimitedDecoder::new(); + let mut decoder = LengthDelimitedDecoder::default(); assert_eq!(decoder.decode(&mut input).unwrap().unwrap(), "foo"); assert_eq!(decoder.decode(&mut input).unwrap().unwrap(), "bar"); @@ -125,7 +148,7 @@ mod tests { #[test] fn decode_eof_frame() { let mut input = BytesMut::from("\x00\x00\x00\x03foo"); - let mut decoder = LengthDelimitedDecoder::new(); + let mut decoder = LengthDelimitedDecoder::default(); assert_eq!(decoder.decode_eof(&mut input).unwrap().unwrap(), "foo"); assert_eq!(decoder.decode_eof(&mut input).unwrap(), None); @@ -134,7 +157,7 @@ mod tests { #[test] fn decode_eof_frame_unexpected_eof() { let mut input = BytesMut::from("\x00\x00\x00\x03fo"); - let mut decoder = LengthDelimitedDecoder::new(); + let mut decoder = LengthDelimitedDecoder::default(); assert!(decoder.decode_eof(&mut input).is_err()); } @@ -142,7 +165,7 @@ mod tests { #[test] fn decode_eof_frame_exceeding_bytes_without_header() { let mut input = BytesMut::from("\x00\x00\x00\x03fooo"); - let mut decoder = LengthDelimitedDecoder::new(); + let mut decoder = LengthDelimitedDecoder::default(); assert_eq!(decoder.decode_eof(&mut input).unwrap().unwrap(), "foo"); assert!(decoder.decode_eof(&mut input).is_err()); @@ -151,7 +174,7 @@ mod tests { #[test] fn decode_eof_frame_missing_header() { let mut input = BytesMut::from("foo"); - let mut decoder = LengthDelimitedDecoder::new(); + let mut decoder = LengthDelimitedDecoder::default(); assert!(decoder.decode_eof(&mut input).is_err()); } @@ -159,7 +182,7 @@ mod tests { #[test] fn decode_eof_frames() { let mut input = BytesMut::from("\x00\x00\x00\x03foo\x00\x00\x00\x03bar"); - let mut decoder = LengthDelimitedDecoder::new(); + let mut decoder = LengthDelimitedDecoder::default(); assert_eq!(decoder.decode_eof(&mut input).unwrap().unwrap(), "foo"); assert_eq!(decoder.decode_eof(&mut input).unwrap().unwrap(), "bar"); diff --git a/lib/codecs/src/decoding/mod.rs b/lib/codecs/src/decoding/mod.rs index e3d5e41337760..2db0dfcfcb327 100644 --- a/lib/codecs/src/decoding/mod.rs +++ b/lib/codecs/src/decoding/mod.rs @@ -89,7 +89,7 @@ pub enum FramingConfig { CharacterDelimited(CharacterDelimitedDecoderConfig), /// Byte frames which are prefixed by an unsigned big-endian 32-bit integer indicating the length. - LengthDelimited, + LengthDelimited(LengthDelimitedDecoderConfig), /// Byte frames which are delimited by a newline character. NewlineDelimited(NewlineDelimitedDecoderConfig), @@ -113,8 +113,8 @@ impl From for FramingConfig { } impl From for FramingConfig { - fn from(_: LengthDelimitedDecoderConfig) -> Self { - Self::LengthDelimited + fn from(config: LengthDelimitedDecoderConfig) -> Self { + Self::LengthDelimited(config) } } @@ -136,9 +136,7 @@ impl FramingConfig { match self { FramingConfig::Bytes => Framer::Bytes(BytesDecoderConfig.build()), FramingConfig::CharacterDelimited(config) => Framer::CharacterDelimited(config.build()), - FramingConfig::LengthDelimited => { - Framer::LengthDelimited(LengthDelimitedDecoderConfig.build()) - } + FramingConfig::LengthDelimited(config) => Framer::LengthDelimited(config.build()), FramingConfig::NewlineDelimited(config) => Framer::NewlineDelimited(config.build()), FramingConfig::OctetCounting(config) => Framer::OctetCounting(config.build()), } @@ -333,7 +331,7 @@ impl DeserializerConfig { pub fn default_stream_framing(&self) -> FramingConfig { match self { DeserializerConfig::Avro { .. } => FramingConfig::Bytes, - DeserializerConfig::Native => FramingConfig::LengthDelimited, + DeserializerConfig::Native => FramingConfig::LengthDelimited(Default::default()), DeserializerConfig::Bytes | DeserializerConfig::Json(_) | DeserializerConfig::Gelf(_) diff --git a/lib/codecs/src/encoding/framing/length_delimited.rs b/lib/codecs/src/encoding/framing/length_delimited.rs index f25cdba4edf7d..1e450073b9496 100644 --- a/lib/codecs/src/encoding/framing/length_delimited.rs +++ b/lib/codecs/src/encoding/framing/length_delimited.rs @@ -1,59 +1,43 @@ use bytes::BytesMut; -use serde::{Deserialize, Serialize}; +use derivative::Derivative; use tokio_util::codec::{Encoder, LengthDelimitedCodec}; +use vector_config::configurable_component; + +use crate::common::length_delimited::LengthDelimitedCoderOptions; use super::BoxedFramingError; /// Config used to build a `LengthDelimitedEncoder`. -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct LengthDelimitedEncoderConfig; +#[configurable_component] +#[derive(Debug, Clone, Derivative, Eq, PartialEq)] +#[derivative(Default)] +pub struct LengthDelimitedEncoderConfig { + /// Options for the length delimited decoder. + #[serde(skip_serializing_if = "vector_core::serde::is_default")] + pub length_delimited: LengthDelimitedCoderOptions, +} impl LengthDelimitedEncoderConfig { - /// Creates a `LengthDelimitedEncoderConfig`. - pub const fn new() -> Self { - Self - } - /// Build the `LengthDelimitedEncoder` from this configuration. pub fn build(&self) -> LengthDelimitedEncoder { - LengthDelimitedEncoder::new() + LengthDelimitedEncoder::new(&self.length_delimited) } } /// An encoder for handling bytes that are delimited by a length header. -#[derive(Debug)] +#[derive(Debug, Clone)] pub struct LengthDelimitedEncoder(LengthDelimitedCodec); impl LengthDelimitedEncoder { - /// Creates a `LengthDelimitedEncoder`. - pub fn new() -> Self { - Self(LengthDelimitedCodec::new()) + /// Creates a new `LengthDelimitedEncoder`. + pub fn new(config: &LengthDelimitedCoderOptions) -> Self { + Self(config.build_codec()) } } impl Default for LengthDelimitedEncoder { fn default() -> Self { - Self::new() - } -} - -impl Clone for LengthDelimitedEncoder { - fn clone(&self) -> Self { - // This has been fixed with https://github.com/tokio-rs/tokio/pull/4089, - // however we are blocked on upgrading to a new release of `tokio-util` - // that includes the `Clone` implementation: - // https://github.com/vectordotdev/vector/issues/11257. - // - // This is an awful implementation for `Clone` since it resets the - // internal state. However, it works for our use case because we - // generally only clone a codec that has not been mutated yet. - // - // Ideally, `tokio_util::codec::LengthDelimitedCodec` should implement - // `Clone` and it doesn't look like it was a deliberate decision to - // leave out the implementation. All of its internal fields implement - // `Clone`, so adding an implementation for `Clone` could be contributed - // to the upstream repo easily by adding it to the `derive` macro. - Self::new() + Self(LengthDelimitedCodec::new()) } } @@ -73,11 +57,24 @@ mod tests { #[test] fn encode() { - let mut codec = LengthDelimitedEncoder::new(); + let mut codec = LengthDelimitedEncoder::default(); let mut buffer = BytesMut::from("abc"); codec.encode((), &mut buffer).unwrap(); assert_eq!(&buffer[..], b"\0\0\0\x03abc"); } + + #[test] + fn encode_2byte_length() { + let mut codec = LengthDelimitedEncoder::new(&LengthDelimitedCoderOptions { + length_field_length: 2, + ..Default::default() + }); + + let mut buffer = BytesMut::from("abc"); + codec.encode((), &mut buffer).unwrap(); + + assert_eq!(&buffer[..], b"\0\x03abc"); + } } diff --git a/lib/codecs/src/encoding/mod.rs b/lib/codecs/src/encoding/mod.rs index 2f28c27ec7bf8..0d766f73e4d17 100644 --- a/lib/codecs/src/encoding/mod.rs +++ b/lib/codecs/src/encoding/mod.rs @@ -67,7 +67,7 @@ pub enum FramingConfig { /// Event data is prefixed with its length in bytes. /// /// The prefix is a 32-bit unsigned integer, little endian. - LengthDelimited, + LengthDelimited(LengthDelimitedEncoderConfig), /// Event data is delimited by a newline (LF) character. NewlineDelimited, @@ -86,8 +86,8 @@ impl From for FramingConfig { } impl From for FramingConfig { - fn from(_: LengthDelimitedEncoderConfig) -> Self { - Self::LengthDelimited + fn from(config: LengthDelimitedEncoderConfig) -> Self { + Self::LengthDelimited(config) } } @@ -103,9 +103,7 @@ impl FramingConfig { match self { FramingConfig::Bytes => Framer::Bytes(BytesEncoderConfig.build()), FramingConfig::CharacterDelimited(config) => Framer::CharacterDelimited(config.build()), - FramingConfig::LengthDelimited => { - Framer::LengthDelimited(LengthDelimitedEncoderConfig.build()) - } + FramingConfig::LengthDelimited(config) => Framer::LengthDelimited(config.build()), FramingConfig::NewlineDelimited => { Framer::NewlineDelimited(NewlineDelimitedEncoderConfig.build()) } @@ -360,7 +358,9 @@ impl SerializerConfig { // [1]: https://avro.apache.org/docs/1.11.1/specification/_print/#message-framing SerializerConfig::Avro { .. } | SerializerConfig::Native - | SerializerConfig::Protobuf(_) => FramingConfig::LengthDelimited, + | SerializerConfig::Protobuf(_) => { + FramingConfig::LengthDelimited(LengthDelimitedEncoderConfig::default()) + } SerializerConfig::Csv(_) | SerializerConfig::Gelf | SerializerConfig::Json(_) diff --git a/src/codecs/encoding/config.rs b/src/codecs/encoding/config.rs index d16ec78b627e4..4a124f8987060 100644 --- a/src/codecs/encoding/config.rs +++ b/src/codecs/encoding/config.rs @@ -105,7 +105,7 @@ impl EncodingConfigWithFraming { SinkType::MessageBased => CharacterDelimitedEncoder::new(b',').into(), }, (None, Serializer::Avro(_) | Serializer::Native(_)) => { - LengthDelimitedEncoder::new().into() + LengthDelimitedEncoder::default().into() } (None, Serializer::Gelf(_)) => { // Graylog/GELF always uses null byte delimiter on TCP, see @@ -115,7 +115,7 @@ impl EncodingConfigWithFraming { (None, Serializer::Protobuf(_)) => { // Protobuf uses length-delimited messages, see: // https://developers.google.com/protocol-buffers/docs/techniques#streaming - LengthDelimitedEncoder::new().into() + LengthDelimitedEncoder::default().into() } ( None, diff --git a/src/components/validation/resources/event.rs b/src/components/validation/resources/event.rs index 6d4ce818225ab..fa4564a6076c0 100644 --- a/src/components/validation/resources/event.rs +++ b/src/components/validation/resources/event.rs @@ -173,7 +173,7 @@ pub fn encode_test_event( // versa. let mut alt_encoder = if encoder.serializer().supports_json() { Encoder::::new( - LengthDelimitedEncoder::new().into(), + LengthDelimitedEncoder::default().into(), LogfmtSerializer::new().into(), ) } else { diff --git a/src/components/validation/resources/mod.rs b/src/components/validation/resources/mod.rs index a9b39a560988c..c605d182d8a16 100644 --- a/src/components/validation/resources/mod.rs +++ b/src/components/validation/resources/mod.rs @@ -182,7 +182,11 @@ fn decoder_framing_to_encoding_framer(framing: &decoding::FramingConfig) -> enco }, }) } - decoding::FramingConfig::LengthDelimited => encoding::FramingConfig::LengthDelimited, + decoding::FramingConfig::LengthDelimited(config) => { + encoding::FramingConfig::LengthDelimited(encoding::LengthDelimitedEncoderConfig { + length_delimited: config.length_delimited.clone(), + }) + } decoding::FramingConfig::NewlineDelimited(_) => encoding::FramingConfig::NewlineDelimited, // TODO: There's no equivalent octet counting framer for encoding... although // there's no particular reason that would make it hard to write. @@ -228,7 +232,11 @@ fn encoder_framing_to_decoding_framer(framing: encoding::FramingConfig) -> decod }, }) } - encoding::FramingConfig::LengthDelimited => decoding::FramingConfig::LengthDelimited, + encoding::FramingConfig::LengthDelimited(config) => { + decoding::FramingConfig::LengthDelimited(decoding::LengthDelimitedDecoderConfig { + length_delimited: config.length_delimited.clone(), + }) + } encoding::FramingConfig::NewlineDelimited => { decoding::FramingConfig::NewlineDelimited(Default::default()) } diff --git a/website/cue/reference/components/sinks/base/aws_s3.cue b/website/cue/reference/components/sinks/base/aws_s3.cue index b8cb9be0db9fe..22cd5fe05c844 100644 --- a/website/cue/reference/components/sinks/base/aws_s3.cue +++ b/website/cue/reference/components/sinks/base/aws_s3.cue @@ -619,6 +619,33 @@ base: components: sinks: aws_s3: configuration: { type: uint: {} } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sinks/base/azure_blob.cue b/website/cue/reference/components/sinks/base/azure_blob.cue index 30b3d9771a9a8..6500fd4fe7a04 100644 --- a/website/cue/reference/components/sinks/base/azure_blob.cue +++ b/website/cue/reference/components/sinks/base/azure_blob.cue @@ -436,6 +436,33 @@ base: components: sinks: azure_blob: configuration: { type: uint: {} } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sinks/base/console.cue b/website/cue/reference/components/sinks/base/console.cue index 21c24d5356026..465669604aa88 100644 --- a/website/cue/reference/components/sinks/base/console.cue +++ b/website/cue/reference/components/sinks/base/console.cue @@ -287,6 +287,33 @@ base: components: sinks: console: configuration: { type: uint: {} } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sinks/base/file.cue b/website/cue/reference/components/sinks/base/file.cue index db12805aee1d3..a84ac357af819 100644 --- a/website/cue/reference/components/sinks/base/file.cue +++ b/website/cue/reference/components/sinks/base/file.cue @@ -307,6 +307,33 @@ base: components: sinks: file: configuration: { type: uint: {} } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sinks/base/gcp_cloud_storage.cue b/website/cue/reference/components/sinks/base/gcp_cloud_storage.cue index 3965aaae26333..7a8ff63ec1b6f 100644 --- a/website/cue/reference/components/sinks/base/gcp_cloud_storage.cue +++ b/website/cue/reference/components/sinks/base/gcp_cloud_storage.cue @@ -490,6 +490,33 @@ base: components: sinks: gcp_cloud_storage: configuration: { type: uint: {} } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sinks/base/http.cue b/website/cue/reference/components/sinks/base/http.cue index 9155d24f8e380..bc886fd29ec61 100644 --- a/website/cue/reference/components/sinks/base/http.cue +++ b/website/cue/reference/components/sinks/base/http.cue @@ -400,6 +400,33 @@ base: components: sinks: http: configuration: { type: uint: {} } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sinks/base/socket.cue b/website/cue/reference/components/sinks/base/socket.cue index 9fd330b312d93..ed9622fa25366 100644 --- a/website/cue/reference/components/sinks/base/socket.cue +++ b/website/cue/reference/components/sinks/base/socket.cue @@ -300,6 +300,33 @@ base: components: sinks: socket: configuration: { type: uint: {} } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sinks/base/webhdfs.cue b/website/cue/reference/components/sinks/base/webhdfs.cue index e0099827499f6..e570de139f4b4 100644 --- a/website/cue/reference/components/sinks/base/webhdfs.cue +++ b/website/cue/reference/components/sinks/base/webhdfs.cue @@ -369,6 +369,33 @@ base: components: sinks: webhdfs: configuration: { type: uint: {} } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sources/base/amqp.cue b/website/cue/reference/components/sources/base/amqp.cue index 2ad195cb12ecb..667b607d88044 100644 --- a/website/cue/reference/components/sources/base/amqp.cue +++ b/website/cue/reference/components/sources/base/amqp.cue @@ -309,6 +309,33 @@ base: components: sources: amqp: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/aws_kinesis_firehose.cue b/website/cue/reference/components/sources/base/aws_kinesis_firehose.cue index 2084545bee574..12418c29fc811 100644 --- a/website/cue/reference/components/sources/base/aws_kinesis_firehose.cue +++ b/website/cue/reference/components/sources/base/aws_kinesis_firehose.cue @@ -307,6 +307,33 @@ base: components: sources: aws_kinesis_firehose: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/aws_s3.cue b/website/cue/reference/components/sources/base/aws_s3.cue index a35d7318669cc..6bb6087373d07 100644 --- a/website/cue/reference/components/sources/base/aws_s3.cue +++ b/website/cue/reference/components/sources/base/aws_s3.cue @@ -407,6 +407,33 @@ base: components: sources: aws_s3: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/aws_sqs.cue b/website/cue/reference/components/sources/base/aws_sqs.cue index 6e18067869298..7573e19e67068 100644 --- a/website/cue/reference/components/sources/base/aws_sqs.cue +++ b/website/cue/reference/components/sources/base/aws_sqs.cue @@ -411,6 +411,33 @@ base: components: sources: aws_sqs: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/datadog_agent.cue b/website/cue/reference/components/sources/base/datadog_agent.cue index b87253a37fa7a..66f0027427344 100644 --- a/website/cue/reference/components/sources/base/datadog_agent.cue +++ b/website/cue/reference/components/sources/base/datadog_agent.cue @@ -304,6 +304,33 @@ base: components: sources: datadog_agent: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/demo_logs.cue b/website/cue/reference/components/sources/base/demo_logs.cue index 0c94d69dbc63b..3d2a892210d8c 100644 --- a/website/cue/reference/components/sources/base/demo_logs.cue +++ b/website/cue/reference/components/sources/base/demo_logs.cue @@ -300,6 +300,33 @@ base: components: sources: demo_logs: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/exec.cue b/website/cue/reference/components/sources/base/exec.cue index 884c9da76b1ec..0369f085bb761 100644 --- a/website/cue/reference/components/sources/base/exec.cue +++ b/website/cue/reference/components/sources/base/exec.cue @@ -288,6 +288,33 @@ base: components: sources: exec: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sources/base/file_descriptor.cue b/website/cue/reference/components/sources/base/file_descriptor.cue index 66476a0b8616e..63387d882f290 100644 --- a/website/cue/reference/components/sources/base/file_descriptor.cue +++ b/website/cue/reference/components/sources/base/file_descriptor.cue @@ -266,6 +266,33 @@ base: components: sources: file_descriptor: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sources/base/gcp_pubsub.cue b/website/cue/reference/components/sources/base/gcp_pubsub.cue index 7e841f24fc57a..cd655c4c3dd0b 100644 --- a/website/cue/reference/components/sources/base/gcp_pubsub.cue +++ b/website/cue/reference/components/sources/base/gcp_pubsub.cue @@ -343,6 +343,33 @@ base: components: sources: gcp_pubsub: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/heroku_logs.cue b/website/cue/reference/components/sources/base/heroku_logs.cue index 57e4d46d323c5..33f6be7194f6a 100644 --- a/website/cue/reference/components/sources/base/heroku_logs.cue +++ b/website/cue/reference/components/sources/base/heroku_logs.cue @@ -301,6 +301,33 @@ base: components: sources: heroku_logs: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/http.cue b/website/cue/reference/components/sources/base/http.cue index 0aa83c2e4b6ef..99c9289c9ec49 100644 --- a/website/cue/reference/components/sources/base/http.cue +++ b/website/cue/reference/components/sources/base/http.cue @@ -316,6 +316,33 @@ base: components: sources: http: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sources/base/http_client.cue b/website/cue/reference/components/sources/base/http_client.cue index 6209840f6f728..38485fb373cd2 100644 --- a/website/cue/reference/components/sources/base/http_client.cue +++ b/website/cue/reference/components/sources/base/http_client.cue @@ -304,6 +304,33 @@ base: components: sources: http_client: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/http_server.cue b/website/cue/reference/components/sources/base/http_server.cue index 7af3f843fba7e..a69a2a27faf79 100644 --- a/website/cue/reference/components/sources/base/http_server.cue +++ b/website/cue/reference/components/sources/base/http_server.cue @@ -316,6 +316,33 @@ base: components: sources: http_server: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sources/base/kafka.cue b/website/cue/reference/components/sources/base/kafka.cue index d0cf6c8e8f076..f431399ba5c37 100644 --- a/website/cue/reference/components/sources/base/kafka.cue +++ b/website/cue/reference/components/sources/base/kafka.cue @@ -337,6 +337,33 @@ base: components: sources: kafka: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/nats.cue b/website/cue/reference/components/sources/base/nats.cue index dca98ef007cd8..36cf215f3f55a 100644 --- a/website/cue/reference/components/sources/base/nats.cue +++ b/website/cue/reference/components/sources/base/nats.cue @@ -356,6 +356,33 @@ base: components: sources: nats: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/pulsar.cue b/website/cue/reference/components/sources/base/pulsar.cue index f157854d6f1fa..f12f205944955 100644 --- a/website/cue/reference/components/sources/base/pulsar.cue +++ b/website/cue/reference/components/sources/base/pulsar.cue @@ -367,6 +367,33 @@ base: components: sources: pulsar: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/redis.cue b/website/cue/reference/components/sources/base/redis.cue index 6c894b9666036..039ba3c0fe1b1 100644 --- a/website/cue/reference/components/sources/base/redis.cue +++ b/website/cue/reference/components/sources/base/redis.cue @@ -274,6 +274,33 @@ base: components: sources: redis: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: false diff --git a/website/cue/reference/components/sources/base/socket.cue b/website/cue/reference/components/sources/base/socket.cue index 70c21935094d0..55cbd60be49d7 100644 --- a/website/cue/reference/components/sources/base/socket.cue +++ b/website/cue/reference/components/sources/base/socket.cue @@ -276,6 +276,33 @@ base: components: sources: socket: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true diff --git a/website/cue/reference/components/sources/base/stdin.cue b/website/cue/reference/components/sources/base/stdin.cue index e1adde0392ebb..7e5fdefe6c44e 100644 --- a/website/cue/reference/components/sources/base/stdin.cue +++ b/website/cue/reference/components/sources/base/stdin.cue @@ -259,6 +259,33 @@ base: components: sources: stdin: configuration: { } } } + length_delimited: { + description: "Options for the length delimited decoder." + relevant_when: "method = \"length_delimited\"" + required: true + type: object: options: { + length_field_is_big_endian: { + description: "Length field byte order (little or big endian)" + required: false + type: bool: default: true + } + length_field_length: { + description: "Number of bytes representing the field length" + required: false + type: uint: default: 4 + } + length_field_offset: { + description: "Number of bytes in the header before the length field" + required: false + type: uint: default: 0 + } + max_frame_length: { + description: "Maximum frame length" + required: false + type: uint: default: 8388608 + } + } + } method: { description: "The framing method." required: true From a4bb02556f5ca3ade33d99f2f62f79c596fb6ee4 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Mon, 25 Mar 2024 12:54:43 -0700 Subject: [PATCH 0181/1491] chore(releasing): Prepare v0.37.0 release Signed-off-by: Jesse Szwedko --- .github/actions/spelling/expect.txt | 10 +- .../18445_armv6_binaries.enhancement.md | 3 - .../18863_k8s_logs_rotate_wait.enhancement.md | 3 - changelog.d/19813_add_mqtt_sink.feature.md | 3 - ...press_onezone_storage_class.enhancement.md | 3 - changelog.d/19892_dnstap_over_tcp.breaking.md | 3 - ...21_missing_dns_record_types.enhancement.md | 3 - changelog.d/19937_edns_ede_support.feature.md | 3 - ...0006_improve_greptimedb_tls.enhancement.md | 3 - .../20032_gzip_zlib_performance.fix.md | 3 - ...0035_dnstap_lowercase_hostnames.feature.md | 4 - .../20051_permit_origin_tcp.feature.md | 3 - .../20054_custom_mmdb_types.feature.md | 5 - changelog.d/aws_credentials_process.fix.md | 2 - .../clickhouse_acknowledgements.breaking.md | 1 - ...atadog_agent_ddtags_parsing.enhancement.md | 4 - ...tadog_agent_source_internal_metrics.fix.md | 3 - changelog.d/dd_logs_semantic_meaning.fix.md | 7 - .../deprecate_strict_env_vars.breaking.md | 4 - changelog.d/elasticsearch_log.fix.md | 3 - .../file_ignored_header_bytes_default.fix.md | 1 - ...include_paths_glob_patterns.enhancement.md | 3 - changelog.d/pulsar_source.feature.md | 3 - .../remove_remap_file_filter.enhancement.md | 2 - .../splunk_hec_logs_raw_timestamp.fix.md | 1 - .../splunk_hec_received_event_bytes.fix.md | 3 - distribution/install.sh | 2 +- .../2023-03-26-0-37-0-upgrade-guide.md | 52 +++ website/content/en/releases/0.37.0.md | 4 + .../administration/interfaces/kubectl.cue | 2 +- website/cue/reference/releases/0.37.0.cue | 379 ++++++++++++++++++ website/cue/reference/versions.cue | 1 + 32 files changed, 440 insertions(+), 86 deletions(-) delete mode 100644 changelog.d/18445_armv6_binaries.enhancement.md delete mode 100644 changelog.d/18863_k8s_logs_rotate_wait.enhancement.md delete mode 100644 changelog.d/19813_add_mqtt_sink.feature.md delete mode 100644 changelog.d/19891_s3_sink_add_express_onezone_storage_class.enhancement.md delete mode 100644 changelog.d/19892_dnstap_over_tcp.breaking.md delete mode 100644 changelog.d/19921_missing_dns_record_types.enhancement.md delete mode 100644 changelog.d/19937_edns_ede_support.feature.md delete mode 100644 changelog.d/20006_improve_greptimedb_tls.enhancement.md delete mode 100644 changelog.d/20032_gzip_zlib_performance.fix.md delete mode 100644 changelog.d/20035_dnstap_lowercase_hostnames.feature.md delete mode 100644 changelog.d/20051_permit_origin_tcp.feature.md delete mode 100644 changelog.d/20054_custom_mmdb_types.feature.md delete mode 100644 changelog.d/aws_credentials_process.fix.md delete mode 100644 changelog.d/clickhouse_acknowledgements.breaking.md delete mode 100644 changelog.d/datadog_agent_ddtags_parsing.enhancement.md delete mode 100644 changelog.d/datadog_agent_source_internal_metrics.fix.md delete mode 100644 changelog.d/dd_logs_semantic_meaning.fix.md delete mode 100644 changelog.d/deprecate_strict_env_vars.breaking.md delete mode 100644 changelog.d/elasticsearch_log.fix.md delete mode 100644 changelog.d/file_ignored_header_bytes_default.fix.md delete mode 100644 changelog.d/include_paths_glob_patterns.enhancement.md delete mode 100644 changelog.d/pulsar_source.feature.md delete mode 100644 changelog.d/remove_remap_file_filter.enhancement.md delete mode 100644 changelog.d/splunk_hec_logs_raw_timestamp.fix.md delete mode 100644 changelog.d/splunk_hec_received_event_bytes.fix.md create mode 100644 website/content/en/highlights/2023-03-26-0-37-0-upgrade-guide.md create mode 100644 website/content/en/releases/0.37.0.md create mode 100644 website/cue/reference/releases/0.37.0.cue diff --git a/.github/actions/spelling/expect.txt b/.github/actions/spelling/expect.txt index 1c974b4b4d847..743eb16c52620 100644 --- a/.github/actions/spelling/expect.txt +++ b/.github/actions/spelling/expect.txt @@ -8,7 +8,6 @@ Acq AEAD agentpayload aimd -akx allowerased amka amping @@ -32,7 +31,6 @@ aqf architecting archivable ARNOTAREALIDD -arshiyasolei asdf asdfasdf assertverify @@ -220,7 +218,6 @@ customizability customtype cwl Dailywarehousing -dalegaard daschl dashmap datadir @@ -288,7 +285,6 @@ Doop downcasted droptest dsl -dsmith dstat dstport dtype @@ -319,6 +315,7 @@ endler enduml eni enp +Ensar enumdecl enumflags ENVARS @@ -328,7 +325,6 @@ EOL'ed Err'ing errorf esb -esensar esque etheus etl @@ -416,7 +412,6 @@ fsevent fslock FSTRM fsyncdata -fuchsnj fullhuman futs fuzzcheck @@ -818,7 +813,6 @@ overaligned overalignment owo oyaml -pablosichert pageheap paq parallelizable @@ -951,6 +945,7 @@ samerole sameuser sandboxed sandboxing +Saraj sccache schemaless schemars @@ -1249,7 +1244,6 @@ wtime wtimeouts wtr wurstmeister -wwang xact xcatsy Xcg diff --git a/changelog.d/18445_armv6_binaries.enhancement.md b/changelog.d/18445_armv6_binaries.enhancement.md deleted file mode 100644 index 5b92362cc711c..0000000000000 --- a/changelog.d/18445_armv6_binaries.enhancement.md +++ /dev/null @@ -1,3 +0,0 @@ -ARMv6 builds are now provided as binaries, `.deb` archives and container images (alpine and debian). - -authors: wtaylor diff --git a/changelog.d/18863_k8s_logs_rotate_wait.enhancement.md b/changelog.d/18863_k8s_logs_rotate_wait.enhancement.md deleted file mode 100644 index 042a8a6118cfd..0000000000000 --- a/changelog.d/18863_k8s_logs_rotate_wait.enhancement.md +++ /dev/null @@ -1,3 +0,0 @@ -A new configuration option `rotate_wait_secs` was added to the `file` and `kubernetes_logs` sources. `rotate_wait_secs` determines for how long Vector keeps trying to read from a log file that has been deleted. Once that time span has expired, Vector stops reading from and closes the file descriptor of the deleted file, thus allowing the OS to reclaim the storage space occupied by the file. - -authors: syedriko diff --git a/changelog.d/19813_add_mqtt_sink.feature.md b/changelog.d/19813_add_mqtt_sink.feature.md deleted file mode 100644 index ba1c105fc4540..0000000000000 --- a/changelog.d/19813_add_mqtt_sink.feature.md +++ /dev/null @@ -1,3 +0,0 @@ -Vector can send logs to a MQTT broker through the new mqtt sink. - -authors: astro zamazan4ik StephenWakely mladedav diff --git a/changelog.d/19891_s3_sink_add_express_onezone_storage_class.enhancement.md b/changelog.d/19891_s3_sink_add_express_onezone_storage_class.enhancement.md deleted file mode 100644 index f27511bf2abe7..0000000000000 --- a/changelog.d/19891_s3_sink_add_express_onezone_storage_class.enhancement.md +++ /dev/null @@ -1,3 +0,0 @@ -A new `EXPRESS_ONEZONE` option was added to `storage_class` for `aws_s3` sink. - -authors: siavashs diff --git a/changelog.d/19892_dnstap_over_tcp.breaking.md b/changelog.d/19892_dnstap_over_tcp.breaking.md deleted file mode 100644 index bb9e1dca6353b..0000000000000 --- a/changelog.d/19892_dnstap_over_tcp.breaking.md +++ /dev/null @@ -1,3 +0,0 @@ -Added support for TCP mode for DNSTAP source. As the `dnstap` source now supports multiple socket types, you will need to update your configuration to specify which type - either `mode: unix` for the existing unix sockets mode or `mode: tcp` for the new tcp mode. - -authors: esensar diff --git a/changelog.d/19921_missing_dns_record_types.enhancement.md b/changelog.d/19921_missing_dns_record_types.enhancement.md deleted file mode 100644 index d8eca30498423..0000000000000 --- a/changelog.d/19921_missing_dns_record_types.enhancement.md +++ /dev/null @@ -1,3 +0,0 @@ -Added support for more DNS record types (HINFO, CSYNC, OPT, DNSSEC CDS, DNSSEC CDNSKEY, DNSSEC KEY) - -authors: esensar diff --git a/changelog.d/19937_edns_ede_support.feature.md b/changelog.d/19937_edns_ede_support.feature.md deleted file mode 100644 index 5366ea3a0e7dd..0000000000000 --- a/changelog.d/19937_edns_ede_support.feature.md +++ /dev/null @@ -1,3 +0,0 @@ -Added support for parsing EDNS EDE (Extended DNS errors) options - -authors: esensar diff --git a/changelog.d/20006_improve_greptimedb_tls.enhancement.md b/changelog.d/20006_improve_greptimedb_tls.enhancement.md deleted file mode 100644 index 9f92f508948fe..0000000000000 --- a/changelog.d/20006_improve_greptimedb_tls.enhancement.md +++ /dev/null @@ -1,3 +0,0 @@ -Improves TLS support for greptimedb sink. `tls.ca_file` is no longer required for enabling TLS. Just use `tls = {}` in toml configuration when your server is hosting a public CA. - -authors: sunng87 diff --git a/changelog.d/20032_gzip_zlib_performance.fix.md b/changelog.d/20032_gzip_zlib_performance.fix.md deleted file mode 100644 index ef58f4384c7d7..0000000000000 --- a/changelog.d/20032_gzip_zlib_performance.fix.md +++ /dev/null @@ -1,3 +0,0 @@ -Fixed gzip and zlib compression performance degradation introduced in v0.34.0. - -authors: Hexta diff --git a/changelog.d/20035_dnstap_lowercase_hostnames.feature.md b/changelog.d/20035_dnstap_lowercase_hostnames.feature.md deleted file mode 100644 index 197a189cf2231..0000000000000 --- a/changelog.d/20035_dnstap_lowercase_hostnames.feature.md +++ /dev/null @@ -1,4 +0,0 @@ -Added `lowercase_hostnames` option to `dnstap` source, to filter hostnames in DNS records and -lowercase them for consistency. - -authors: esensar diff --git a/changelog.d/20051_permit_origin_tcp.feature.md b/changelog.d/20051_permit_origin_tcp.feature.md deleted file mode 100644 index e8e2cecb9f5b3..0000000000000 --- a/changelog.d/20051_permit_origin_tcp.feature.md +++ /dev/null @@ -1,3 +0,0 @@ -Added support for `permit_origin` config option for all sources with TCP mode (`fluent`, `logstash`, `statsd`, `syslog`). - -authors: esensar diff --git a/changelog.d/20054_custom_mmdb_types.feature.md b/changelog.d/20054_custom_mmdb_types.feature.md deleted file mode 100644 index 08e2f7917e4c5..0000000000000 --- a/changelog.d/20054_custom_mmdb_types.feature.md +++ /dev/null @@ -1,5 +0,0 @@ -Added support for custom MMDB enrichment tables. GeoIP enrichment tables will no longer fall back to -City type for unknown types and will instead return an error. New MMDB enrichment table should be -used for such types. - -authors: esensar diff --git a/changelog.d/aws_credentials_process.fix.md b/changelog.d/aws_credentials_process.fix.md deleted file mode 100644 index 11fa14dbbed4f..0000000000000 --- a/changelog.d/aws_credentials_process.fix.md +++ /dev/null @@ -1,2 +0,0 @@ -AWS components again support the use of `credential_process` in AWS config files to load AWS -credentials from an external process. This was a regression in v0.36.0. diff --git a/changelog.d/clickhouse_acknowledgements.breaking.md b/changelog.d/clickhouse_acknowledgements.breaking.md deleted file mode 100644 index 5171179e20582..0000000000000 --- a/changelog.d/clickhouse_acknowledgements.breaking.md +++ /dev/null @@ -1 +0,0 @@ -When end-to-end acknowledgments are enabled, outgoing requests in the ClickHouse sink that encounter 500-level errors will now correctly report an errored (retriable) status, rather than a rejected (permanent) status, to Vector's clients. diff --git a/changelog.d/datadog_agent_ddtags_parsing.enhancement.md b/changelog.d/datadog_agent_ddtags_parsing.enhancement.md deleted file mode 100644 index e68bc346d45cc..0000000000000 --- a/changelog.d/datadog_agent_ddtags_parsing.enhancement.md +++ /dev/null @@ -1,4 +0,0 @@ -The `datadog_agent` source now contains a configuration setting `parse_ddtags`, which is disabled by default. - -When enabled, the `ddtags` field (a comma separated list of key-value strings) is parsed and expanded into an -object in the event. diff --git a/changelog.d/datadog_agent_source_internal_metrics.fix.md b/changelog.d/datadog_agent_source_internal_metrics.fix.md deleted file mode 100644 index eb15082275713..0000000000000 --- a/changelog.d/datadog_agent_source_internal_metrics.fix.md +++ /dev/null @@ -1,3 +0,0 @@ -The `datadog_agent` source now correctly calculates the value for the metric `component_received_event_bytes_total` before enriching the event with Vector metadata. - -The source also now adheres to the Component Specification by incrementing `component_errors_total` when a request succeeded in decompression but JSON parsing failed. diff --git a/changelog.d/dd_logs_semantic_meaning.fix.md b/changelog.d/dd_logs_semantic_meaning.fix.md deleted file mode 100644 index 3700593631c09..0000000000000 --- a/changelog.d/dd_logs_semantic_meaning.fix.md +++ /dev/null @@ -1,7 +0,0 @@ -The `datadog_logs` sink no longer requires a semantic meaning input definition for `message` and `timestamp` fields. - -While the Datadog logs intake does handle these fields if they are present, they aren't required. - -The only impact is that configurations which enable the [Log Namespace](https://vector.dev/blog/log-namespacing/) feature and use a Source input to this sink which does not itself define a semantic meaning for `message` and `timestamp`, no longer need to manually set the semantic meaning for these two fields through a remap transform. - -Existing configurations that utilize the Legacy namespace are unaffected, as are configurations using the Vector namespace where the input source has defined the `message` and `timestamp` semantic meanings. diff --git a/changelog.d/deprecate_strict_env_vars.breaking.md b/changelog.d/deprecate_strict_env_vars.breaking.md deleted file mode 100644 index 5b66ffeae9e38..0000000000000 --- a/changelog.d/deprecate_strict_env_vars.breaking.md +++ /dev/null @@ -1,4 +0,0 @@ -The default of `--strict-env-vars` has been changed to `true`. This option has been deprecated. In -a future version it will be removed and Vector will have the behavior it currently has when set -to `true` which is that missing environment variables will cause Vector to fail to start up with an -error instead of a warning. Set `--strict-env-vars=false` to opt into deprecated behavior. diff --git a/changelog.d/elasticsearch_log.fix.md b/changelog.d/elasticsearch_log.fix.md deleted file mode 100644 index 4867a4954b3cc..0000000000000 --- a/changelog.d/elasticsearch_log.fix.md +++ /dev/null @@ -1,3 +0,0 @@ -An error log for the Elasticsearch sink that logs out the response body when errors occur. This was -a log that used to exist in Vector v0.24.0, but was removed in v0.25.0. Some users were depending on -this log to count the number of errors so it was re-added. diff --git a/changelog.d/file_ignored_header_bytes_default.fix.md b/changelog.d/file_ignored_header_bytes_default.fix.md deleted file mode 100644 index 442d848e2da6a..0000000000000 --- a/changelog.d/file_ignored_header_bytes_default.fix.md +++ /dev/null @@ -1 +0,0 @@ -The `fingerprint.ignored_header_bytes` option on the `file` source now has a default of `0`. diff --git a/changelog.d/include_paths_glob_patterns.enhancement.md b/changelog.d/include_paths_glob_patterns.enhancement.md deleted file mode 100644 index 96cc54fc037ef..0000000000000 --- a/changelog.d/include_paths_glob_patterns.enhancement.md +++ /dev/null @@ -1,3 +0,0 @@ -A new configuration option include_paths_glob_patterns has been introduced in the Kubernetes Logs source. This option works alongside the existing exclude_paths_glob_patterns to help narrow down the selection of logs to be considered. include_paths_glob_patterns is evaluated before exclude_paths_glob_patterns. - -authors: syedriko diff --git a/changelog.d/pulsar_source.feature.md b/changelog.d/pulsar_source.feature.md deleted file mode 100644 index f0697dc184460..0000000000000 --- a/changelog.d/pulsar_source.feature.md +++ /dev/null @@ -1,3 +0,0 @@ -A new source has been added that can receive logs from Apache Pulsar. - -authors: zamazan4ik WarmSnowy diff --git a/changelog.d/remove_remap_file_filter.enhancement.md b/changelog.d/remove_remap_file_filter.enhancement.md deleted file mode 100644 index 495920e7c6b0e..0000000000000 --- a/changelog.d/remove_remap_file_filter.enhancement.md +++ /dev/null @@ -1,2 +0,0 @@ -The `remap` component no longer filters out the file contents from error messages when the VRL -program is passed in via the `file` option. diff --git a/changelog.d/splunk_hec_logs_raw_timestamp.fix.md b/changelog.d/splunk_hec_logs_raw_timestamp.fix.md deleted file mode 100644 index b5b553d486380..0000000000000 --- a/changelog.d/splunk_hec_logs_raw_timestamp.fix.md +++ /dev/null @@ -1 +0,0 @@ -The `splunk_hec_logs` sink when configured with the `raw` endpoint target, was removing the timestamp from the event. This was due to a bug in the handling of the `auto_extract_timestamp` configuration option, which is only supposed to apply to the `event` endpoint target. diff --git a/changelog.d/splunk_hec_received_event_bytes.fix.md b/changelog.d/splunk_hec_received_event_bytes.fix.md deleted file mode 100644 index 842b2abefd507..0000000000000 --- a/changelog.d/splunk_hec_received_event_bytes.fix.md +++ /dev/null @@ -1,3 +0,0 @@ -We now correctly calculate the estimated JSON size in bytes for the metric `component_received_event_bytes_total` for the `splunk_hec` source. - -Previously this was being calculated after event enrichment. It is now calculated before enrichment, for both `raw` and `event` endpoints. diff --git a/distribution/install.sh b/distribution/install.sh index b069b3dddd1f4..c4a20dbd47a1b 100755 --- a/distribution/install.sh +++ b/distribution/install.sh @@ -13,7 +13,7 @@ set -u # If PACKAGE_ROOT is unset or empty, default it. PACKAGE_ROOT="${PACKAGE_ROOT:-"https://packages.timber.io/vector"}" # If VECTOR_VERSION is unset or empty, default it. -VECTOR_VERSION="${VECTOR_VERSION:-"0.36.1"}" +VECTOR_VERSION="${VECTOR_VERSION:-"0.37.0"}" _divider="--------------------------------------------------------------------------------" _prompt=">>>" _indent=" " diff --git a/website/content/en/highlights/2023-03-26-0-37-0-upgrade-guide.md b/website/content/en/highlights/2023-03-26-0-37-0-upgrade-guide.md new file mode 100644 index 0000000000000..a556c0236e2a9 --- /dev/null +++ b/website/content/en/highlights/2023-03-26-0-37-0-upgrade-guide.md @@ -0,0 +1,52 @@ +--- +date: "2023-03-26" +title: "0.37 Upgrade Guide" +description: "An upgrade guide that addresses breaking changes in 0.37.0" +authors: ["jszwedko"] +release: "0.37.0" +hide_on_release_notes: false +badges: + type: breaking change +--- + +Vector's 0.37.0 release includes **breaking changes**: + +1. [Vector defaults to requiring non-optional environment variables](#strict-env-vars) + +and **potentially impactful changes**: + +1. [`geoip` enrichment tables no longer fallback to the `GeoIP-City` type](#geoip) + +We cover them below to help you upgrade quickly: + +## Upgrade guide + +### Breaking Changes + +#### Vector defaults to requiring non-optional environment variables {#strict-env-vars} + +Vector has the ability to [interpolate environment variables into its +configuration](/docs/reference/configuration/#environment-variables) as a means of templating +configuration. Now, if an environment variable is undefined, unless the `-` interpolation syntax is +used to provide a default, Vector will output an error and stop. This behavior can be opted out of +by setting the `--strict-env-vars` flag (or `VECTOR_STRICT_ENV_VARS` environment variable) to +`false` when running Vector; however, this behavior is deprecated and this flag to opt out will be +removed in a future release. If you want to have Vector start-up even if an environment variable is +undefined you can provide a default like `${FOO-bar}` to default `FOO` to `bar` if it is unset. + +The reasoning for this change is that users often miss the undefined variable warning and are +confused by Vector not behaving as the expected. In particular, this happens when using `$1` in +regex capture groups in VRL without realizing they need to be escaped as `$$1` to avoid +interpolation. + +### Potentially impactful changes + +#### `geoip` enrichment tables no longer fallback to the `GeoIP-City` type {#geoip} + +With the addition of the new `mmdb` enrichment table type, the existing `geoip2` enrichment table +type no longer falls back assuming the database is a `GeoIP-City` database when the type is unknown. +We don't expect this to affect anyone, but the fix is to ensure that the MMDB database type is +correctly set to `GeoIP2-City` if the database is a GeoIP2-City database. + +For custom MMDBs, that are not GeoIP2 databases, the new `mmdb` enrichment table can be used instead +to read the MMDB records exactly as they are. diff --git a/website/content/en/releases/0.37.0.md b/website/content/en/releases/0.37.0.md new file mode 100644 index 0000000000000..33b3cc47ef6db --- /dev/null +++ b/website/content/en/releases/0.37.0.md @@ -0,0 +1,4 @@ +--- +title: Vector v0.37.0 release notes +weight: 21 +--- diff --git a/website/cue/reference/administration/interfaces/kubectl.cue b/website/cue/reference/administration/interfaces/kubectl.cue index 883c1d554a9a7..fd952aee1e0be 100644 --- a/website/cue/reference/administration/interfaces/kubectl.cue +++ b/website/cue/reference/administration/interfaces/kubectl.cue @@ -19,7 +19,7 @@ administration: interfaces: kubectl: { role_implementations: [Name=string]: { commands: { _deployment_variant: string - _vector_version: "0.36" + _vector_version: "0.37" _namespace: string | *"vector" _controller_resource_type: string _controller_resource_name: string | *_deployment_variant diff --git a/website/cue/reference/releases/0.37.0.cue b/website/cue/reference/releases/0.37.0.cue new file mode 100644 index 0000000000000..29420f87dac0c --- /dev/null +++ b/website/cue/reference/releases/0.37.0.cue @@ -0,0 +1,379 @@ +package metadata + +releases: "0.37.0": { + date: "2024-03-26" + codename: "" + + whats_next: [] + + description: """ + The Vector team is pleased to announce version 0.37.0! + + Be sure to check out the [upgrade guide](/highlights/2023-03-26-0-37-0-upgrade-guide) for + breaking changes in this release. + + In addition to the usual enhancements and bug fixes, this release also includes + + - ARMv6 builds of Vector including Debian archives and container images. The Debian + archives, for now, are only hosted as release assets and are not published to the Debian + repository. We are looking into publishing them there in the future. No RPM packages are + built at this time. Kudos to [@wtaylor](https://github.com/wtaylor) for this contribution. + - A new `mqtt` sink to emit events from Vector using the MQTT protocol. A source is [in the + works](https://github.com/vectordotdev/vector/pull/19931). Kudos to the contributors that + pushed this forward: [@astro](https://github.com/astro), + [@zamazan4ik](https://github.com/zamazan4ik), and [@mladedav](https://github.com/mladedav). + - The `dnstap` source now supports reading events over TCP. Kudos to + [@esensar](https://github.com/esensar) for this contribution. + - A new `mmdb` enrichment table type for loading arbitrary mmdb databases and not just GeoIP + ones. Kudos to [@esensar](https://github.com/esensar) for this contribution. + - A new `pulsar` source for receiving events from Pulsar. Kudos to + [@zamazan4ik](https://github.com/zamazan4ik) and [@WarmSnowy](https://github.com/WarmSnowy) + for this contribution. + """ + + changelog: [ + { + type: "enhancement" + description: """ + ARMv6 builds are now provided as binaries, `.deb` archives and container images (alpine and debian). + """ + contributors: ["wtaylor"] + }, + { + type: "enhancement" + description: """ + A new configuration option `rotate_wait_secs` was added to the `file` and `kubernetes_logs` sources. `rotate_wait_secs` determines for how long Vector keeps trying to read from a log file that has been deleted. Once that time span has expired, Vector stops reading from and closes the file descriptor of the deleted file, thus allowing the OS to reclaim the storage space occupied by the file. + """ + contributors: ["syedriko"] + }, + { + type: "feat" + description: """ + Vector can send logs to a MQTT broker through the new mqtt sink. + """ + contributors: ["astro", "zamazan4ik", "StephenWakely", "mladedav"] + }, + { + type: "enhancement" + description: """ + A new `EXPRESS_ONEZONE` option was added to `storage_class` for `aws_s3` sink. + """ + contributors: ["siavashs"] + }, + { + type: "chore" + description: """ + Added support for TCP mode for DNSTAP source. As the `dnstap` source now supports multiple socket types, you will need to update your configuration to specify which type - either `mode: unix` for the existing unix sockets mode or `mode: tcp` for the new tcp mode. + """ + contributors: ["esensar"] + }, + { + type: "enhancement" + description: """ + Added support for more DNS record types (HINFO, CSYNC, OPT, DNSSEC CDS, DNSSEC CDNSKEY, DNSSEC KEY) + """ + contributors: ["esensar"] + }, + { + type: "feat" + description: """ + Added support for parsing EDNS EDE (Extended DNS errors) options + """ + contributors: ["esensar"] + }, + { + type: "enhancement" + description: """ + Improves TLS support for greptimedb sink. `tls.ca_file` is no longer required for enabling TLS. Just use `tls = {}` in toml configuration when your server is hosting a public CA. + """ + contributors: ["sunng87"] + }, + { + type: "fix" + description: """ + Fixed gzip and zlib compression performance degradation introduced in v0.34.0. + """ + contributors: ["Hexta"] + }, + { + type: "feat" + description: """ + Added `lowercase_hostnames` option to `dnstap` source, to filter hostnames in DNS records and + lowercase them for consistency. + """ + contributors: ["esensar"] + }, + { + type: "feat" + description: """ + Added support for `permit_origin` config option for all sources with TCP mode (`fluent`, `logstash`, `statsd`, `syslog`). + """ + contributors: ["esensar"] + }, + { + type: "feat" + description: """ + Added support for custom MMDB enrichment tables. GeoIP enrichment tables will no longer fall back to + City type for unknown types and will instead return an error. New MMDB enrichment table should be + used for such types. + """ + contributors: ["esensar"] + }, + { + type: "chore" + description: """ + When end-to-end acknowledgments are enabled, outgoing requests in the ClickHouse sink that encounter 500-level errors will now correctly report an errored (retriable) status, rather than a rejected (permanent) status, to Vector's clients. + """ + }, + { + type: "enhancement" + description: """ + The `datadog_agent` source now contains a configuration setting `parse_ddtags`, which is disabled by default. + + When enabled, the `ddtags` field (a comma separated list of key-value strings) is parsed and expanded into an + object in the event. + """ + }, + { + type: "fix" + description: """ + The `datadog_agent` source now correctly calculates the value for the metric `component_received_event_bytes_total` before enriching the event with Vector metadata. + + The source also now adheres to the Component Specification by incrementing `component_errors_total` when a request succeeded in decompression but JSON parsing failed. + """ + }, + { + type: "fix" + description: """ + The `datadog_logs` sink no longer requires a semantic meaning input definition for `message` and `timestamp` fields. + + While the Datadog logs intake does handle these fields if they are present, they aren't required. + + The only impact is that configurations which enable the [Log Namespace](https://vector.dev/blog/log-namespacing/) feature and use a Source input to this sink which does not itself define a semantic meaning for `message` and `timestamp`, no longer need to manually set the semantic meaning for these two fields through a remap transform. + + Existing configurations that utilize the Legacy namespace are unaffected, as are configurations using the Vector namespace where the input source has defined the `message` and `timestamp` semantic meanings. + """ + }, + { + type: "chore" + description: """ + The default of `--strict-env-vars` has been changed to `true`. This option has been deprecated. In + a future version it will be removed and Vector will have the behavior it currently has when set + to `true` which is that missing environment variables will cause Vector to fail to start up with an + error instead of a warning. Set `--strict-env-vars=false` to opt into deprecated behavior. + """ + }, + { + type: "fix" + description: """ + An error log for the Elasticsearch sink that logs out the response body when errors occur. This was + a log that used to exist in Vector v0.24.0, but was removed in v0.25.0. Some users were depending on + this log to count the number of errors so it was re-added. + """ + }, + { + type: "fix" + description: """ + The `fingerprint.ignored_header_bytes` option on the `file` source now has a default of `0`. + """ + }, + { + type: "enhancement" + description: """ + A new configuration option `include_paths_glob_patterns` has been introduced in the Kubernetes Logs source. This option works alongside the existing `exclude_paths_glob_patterns` to help narrow down the selection of logs to be considered. `include_paths_glob_patterns` is evaluated before `exclude_paths_glob_patterns`. + """ + contributors: ["syedriko"] + }, + { + type: "feat" + description: """ + A new source has been added that can receive logs from Apache Pulsar. + """ + contributors: ["zamazan4ik", "WarmSnowy"] + }, + { + type: "enhancement" + description: """ + The `remap` component no longer filters out the file contents from error messages when the VRL + program is passed in via the `file` option. + """ + }, + { + type: "fix" + description: """ + The `splunk_hec_logs` sink when configured with the `raw` endpoint target, was removing the timestamp from the event. This was due to a bug in the handling of the `auto_extract_timestamp` configuration option, which is only supposed to apply to the `event` endpoint target. + """ + }, + { + type: "fix" + description: """ + We now correctly calculate the estimated JSON size in bytes for the metric `component_received_event_bytes_total` for the `splunk_hec` source. + + Previously this was being calculated after event enrichment. It is now calculated before enrichment, for both `raw` and `event` endpoints. + """ + }, + ] + + commits: [ + {sha: "7c0072689fba435640e26e63d46343064c477b0f", date: "2024-02-13 01:04:56 UTC", description: "Add a note that GH usernames shouldn't start with @", pr_number: 19859, scopes: ["dev"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 2, deletions_count: 0}, + {sha: "8d897af2f621a0402678141a3a94e1196ea56037", date: "2024-02-13 01:17:57 UTC", description: "Fix API address example", pr_number: 19858, scopes: ["api"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "e0d5f1e4dbd433165c525e941c95dd8eea2ebee6", date: "2024-02-13 10:09:28 UTC", description: "Bump the aws group with 2 updates", pr_number: 19848, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 6, deletions_count: 6}, + {sha: "1637e566c08f5dc2b09e5c85ad49a93762647c06", date: "2024-02-13 02:25:25 UTC", description: "Bump manifists to chart v0.30.2", pr_number: 19860, scopes: ["kubernetes"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 18, insertions_count: 28, deletions_count: 24}, + {sha: "79ab38947f5869afe154f83cf15868c01b43ac4b", date: "2024-02-13 08:20:01 UTC", description: "expose VRL deserializer options", pr_number: 19862, scopes: ["codecs"], type: "fix", breaking_change: false, author: "Pavlos Rontidis", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "e8401c473fb0334c36ac91a411392f1ac7ae9ce5", date: "2024-02-13 09:06:15 UTC", description: "expose test utils (feature flag)", pr_number: 19863, scopes: ["tests"], type: "chore", breaking_change: false, author: "Pavlos Rontidis", files_count: 3, insertions_count: 9, deletions_count: 9}, + {sha: "99c2207932894d362975fa81000b4819d5e7bb52", date: "2024-02-13 22:43:08 UTC", description: "Bump chrono-tz from 0.8.5 to 0.8.6", pr_number: 19866, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 5, insertions_count: 6, deletions_count: 6}, + {sha: "c654207d5a41c8ec9fff4ac497ac3cec7a40c55c", date: "2024-02-13 22:43:19 UTC", description: "Bump crc32fast from 1.3.2 to 1.4.0", pr_number: 19867, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "0922c3f67f57e2d8c29029a91e1f60ab4d699f50", date: "2024-02-14 06:43:34 UTC", description: "Bump ratatui from 0.26.0 to 0.26.1", pr_number: 19868, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "0a89cb13714876da089ea09d4881e98a890b3976", date: "2024-02-14 07:04:04 UTC", description: "add rotate_wait_ms config option", pr_number: 18904, scopes: ["file source", "kubernetes_logs source"], type: "enhancement", breaking_change: false, author: "Sergey Yedrikov", files_count: 10, insertions_count: 94, deletions_count: 6}, + {sha: "f88316cce7665c6dbf83a81a8261fa126b50542e", date: "2024-02-15 10:41:26 UTC", description: "add MQTT sink", pr_number: 19813, scopes: ["mqtt sink"], type: "feat", breaking_change: false, author: "David Mládek", files_count: 24, insertions_count: 1328, deletions_count: 2}, + {sha: "a935c30785ad50adfea5a3344e2fb3673fffb73c", date: "2024-02-15 03:44:56 UTC", description: "Bump manifests to chart v0.36.0", pr_number: 19877, scopes: ["kubernetes"], type: "feat", breaking_change: false, author: "Jesse Szwedko", files_count: 18, insertions_count: 22, deletions_count: 22}, + {sha: "b91be34a3c890505e7faeaeffa4a1bea54944ebf", date: "2024-02-15 04:33:09 UTC", description: "Bump development version to v0.37.0", pr_number: 19874, scopes: ["releasing"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 2, deletions_count: 2}, + {sha: "342b48c0f7c0aa1147a3a2a1b00089a482436560", date: "2024-02-16 01:42:36 UTC", description: "Bump darling from 0.20.5 to 0.20.6", pr_number: 19882, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 13, deletions_count: 13}, + {sha: "2f1c7850fbc039a894f51b844e919adf2fdc925d", date: "2024-02-16 08:02:24 UTC", description: "RFC for return expression", pr_number: 19828, scopes: ["vrl"], type: "enhancement", breaking_change: false, author: "David Mládek", files_count: 1, insertions_count: 74, deletions_count: 0}, + {sha: "4f0dbf4d2792dc266e0b9ea74158a6a96a1adccb", date: "2024-02-16 08:49:31 UTC", description: "Bump openssl-src from 300.2.2+3.2.1 to 300.2.3+3.2.1", pr_number: 19869, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "c89099768af4ee63542dcb8c039e35bd7a6f2832", date: "2024-02-16 05:01:47 UTC", description: "expose more test utils", pr_number: 19885, scopes: ["tests"], type: "chore", breaking_change: false, author: "Pavlos Rontidis", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "f920675d2658d5ea410847390d7ba3be435a932a", date: "2024-02-16 10:09:29 UTC", description: "Bump enumflags2 from 0.7.8 to 0.7.9", pr_number: 19870, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 5, deletions_count: 5}, + {sha: "a7fe0dbfbd41197bb09fb6a8f2d8562a22384c99", date: "2024-02-16 05:16:09 UTC", description: "add support for include_paths_glob_patterns", pr_number: 19521, scopes: ["kubernetes"], type: "enhancement", breaking_change: false, author: "Sergey Yedrikov", files_count: 5, insertions_count: 127, deletions_count: 15}, + {sha: "9a0a5e4784bf80af8be7a7e8cfa8516a70d39704", date: "2024-02-16 10:16:40 UTC", description: "Update HttpRequest struct to pass additional metadata", pr_number: 19780, scopes: ["http sink"], type: "enhancement", breaking_change: false, author: "Sebastian Tia", files_count: 22, insertions_count: 271, deletions_count: 302}, + {sha: "448c9d19148c3707af54c7e2be90440de3a0316c", date: "2024-02-16 06:32:58 UTC", description: "Bump MSRV from 1.71.1 to 1.74", pr_number: 19884, scopes: ["deps"], type: "chore", breaking_change: false, author: "Sebastian Tia", files_count: 0, insertions_count: 0, deletions_count: 0}, + {sha: "2b0f06eb5de6dc008bd4c98e49ce82a5f0837942", date: "2024-02-17 06:52:33 UTC", description: "Bump syn from 2.0.48 to 2.0.49", pr_number: 19890, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 47, deletions_count: 47}, + {sha: "8223dca26efd790ec4fdbf5eb7626f2cc32d99a2", date: "2024-02-17 07:24:15 UTC", description: "Bump roaring from 0.10.2 to 0.10.3", pr_number: 19889, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 3, deletions_count: 16}, + {sha: "788f0c30ee259d5e918be074d059085107bd69bc", date: "2024-02-17 07:46:52 UTC", description: "Bump the aws group with 4 updates", pr_number: 19888, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 13, deletions_count: 12}, + {sha: "5d8160d72743df1e02fff9f69a8d4e37e1f2577a", date: "2024-02-17 09:32:26 UTC", description: "add express one zone storage class", pr_number: 19893, scopes: ["s3 sink"], type: "enhancement", breaking_change: false, author: "Siavash Safi", files_count: 5, insertions_count: 12, deletions_count: 3}, + {sha: "a798f681d392e761d3e1e185ca9d7e8075a892c5", date: "2024-02-17 08:39:40 UTC", description: "update HTTP request builder to return error", pr_number: 19886, scopes: ["http sink"], type: "enhancement", breaking_change: false, author: "Sebastian Tia", files_count: 8, insertions_count: 69, deletions_count: 39}, + {sha: "50a0c9bc118ee282144b14b3ed49f84cb5ce7c93", date: "2024-02-17 05:41:48 UTC", description: "Add a timeout to all CI jobs", pr_number: 19895, scopes: ["ci"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 29, insertions_count: 69, deletions_count: 1}, + {sha: "78f0e31c8445355203fb5295224af7da1de19e1b", date: "2024-02-21 22:53:03 UTC", description: "Bump the aws group with 1 update", pr_number: 19919, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "6a76be2173ad5a3d919e20e0661a7f3fc543427d", date: "2024-02-22 06:55:56 UTC", description: "Bump mock_instant from 0.3.1 to 0.3.2", pr_number: 19900, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "837c64cffd3624e32178a1e5078ed5ed3e6ebc8a", date: "2024-02-22 06:56:27 UTC", description: "Bump serde_yaml from 0.9.31 to 0.9.32", pr_number: 19907, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 10, deletions_count: 10}, + {sha: "bb4190b028f24c51fa6296830aa6036f68c5596b", date: "2024-02-22 06:58:13 UTC", description: "Bump assert_cmd from 2.0.13 to 2.0.14", pr_number: 19908, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "7311c0aaa01cac20d4cdc71c21c516de7326405c", date: "2024-02-22 06:58:35 UTC", description: "Bump serde from 1.0.196 to 1.0.197", pr_number: 19910, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 5, deletions_count: 5}, + {sha: "b8d89a03459a32f9c227b6fab21b5081c75d934f", date: "2024-02-22 06:58:49 UTC", description: "Bump semver from 1.0.21 to 1.0.22", pr_number: 19911, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 5, deletions_count: 5}, + {sha: "1d91742e70a3c5ef4ae3a86c26a6d89846e35157", date: "2024-02-22 06:59:00 UTC", description: "Bump ryu from 1.0.16 to 1.0.17", pr_number: 19912, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "7fb4513424aa9c3d19fa0e43c7be2360d2ac412d", date: "2024-02-22 06:59:18 UTC", description: "Bump anyhow from 1.0.79 to 1.0.80", pr_number: 19914, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 5, deletions_count: 5}, + {sha: "282a58d410a05f2bf0def7cfcca98e84342134ff", date: "2024-02-22 07:22:44 UTC", description: "Update release instructions for deploying vector.dev", pr_number: 19925, scopes: ["dev"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 4, deletions_count: 2}, + {sha: "a32895ec096c5c55c449c8d3ad6bed658d69b71b", date: "2024-02-22 15:48:57 UTC", description: "Bump the clap group with 3 updates", pr_number: 19899, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 33, deletions_count: 26}, + {sha: "4cd4b6a26de5f70a687b934df7193aa9ba2d46f7", date: "2024-02-22 15:49:05 UTC", description: "Bump serde_json from 1.0.113 to 1.0.114", pr_number: 19909, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "c9e24003095f3a6271aa9a3d50c83c3b6f857014", date: "2024-02-22 15:49:08 UTC", description: "Bump syn from 2.0.49 to 2.0.50", pr_number: 19913, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 47, deletions_count: 47}, + {sha: "23ffe8812cd7df603cf3cf310773ee356c96c002", date: "2024-02-22 15:49:13 UTC", description: "Bump myrotvorets/set-commit-status-action from 2.0.0 to 2.0.1", pr_number: 19924, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 12, insertions_count: 29, deletions_count: 29}, + {sha: "a68a0b5c6a1ddd33682b578163727403dd9ef296", date: "2024-02-22 08:16:12 UTC", description: "Update CONTRIBUTING.md docs regarding how to have website…", pr_number: 19926, scopes: ["dev"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 5, deletions_count: 1}, + {sha: "3f59886a39321570e459ba65469d933a968876f2", date: "2024-02-23 03:21:04 UTC", description: "Add pre-requisite for vdev", pr_number: 19668, scopes: [], type: "docs", breaking_change: false, author: "Harold Dost", files_count: 1, insertions_count: 6, deletions_count: 0}, + {sha: "695f847d1711923261acdec0ad029185c7826521", date: "2024-02-23 02:52:44 UTC", description: "expose test utilities", pr_number: 19894, scopes: ["tests"], type: "chore", breaking_change: false, author: "Sebastian Tia", files_count: 4, insertions_count: 68, deletions_count: 35}, + {sha: "a6da1d8f4357513161520ae4c9fac96859d7de24", date: "2024-02-23 01:47:41 UTC", description: "add sink error path validation + multi config", pr_number: 18062, scopes: ["component validation"], type: "feat", breaking_change: false, author: "neuronull", files_count: 12, insertions_count: 277, deletions_count: 86}, + {sha: "bb1b8571070f38f7eee385dad92807249236d063", date: "2024-02-24 14:46:24 UTC", description: "Initial pulsar source", pr_number: 18475, scopes: ["sources"], type: "feat", breaking_change: false, author: "WarmSnowy", files_count: 12, insertions_count: 1328, deletions_count: 9}, + {sha: "5d03bf0e00b3f235cd2dfa9c88e77d7a162c0180", date: "2024-02-27 00:03:46 UTC", description: "Bump serde-wasm-bindgen from 0.6.3 to 0.6.4", pr_number: 19934, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "e2e5253ff42339f8c66226580a8aadf9b729e10d", date: "2024-02-27 00:04:11 UTC", description: "Bump the aws group with 6 updates", pr_number: 19936, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 20, deletions_count: 20}, + {sha: "ae5b06bff08d062216a1beab2f764b6b39b04b71", date: "2024-02-27 05:51:37 UTC", description: "Bump lru from 0.12.2 to 0.12.3", pr_number: 19945, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "7bb9716ebc46bb2842e8df4b2c20775c1897d631", date: "2024-02-27 05:51:47 UTC", description: "Bump socket2 from 0.5.5 to 0.5.6", pr_number: 19947, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 12, deletions_count: 12}, + {sha: "fb11980b98b5ad3358124b5ecfb24d136c6f8903", date: "2024-02-27 05:52:03 UTC", description: "Bump cached from 0.48.1 to 0.49.2", pr_number: 19948, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 5, deletions_count: 5}, + {sha: "4634e2f167f47c6f9cfe0221cb7238b976f76091", date: "2024-02-27 08:06:08 UTC", description: "Bump openssl from 0.10.63 to 0.10.64", pr_number: 19906, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 7, deletions_count: 7}, + {sha: "070e38c555d7a7aaf9dda67e7dd468cfbfb949b9", date: "2024-02-27 09:09:23 UTC", description: "add support for EDNS EDE fields", pr_number: 19937, scopes: ["dnsmsg_parser"], type: "feat", breaking_change: false, author: "Ensar Sarajčić", files_count: 10, insertions_count: 292, deletions_count: 6}, + {sha: "f33169d6aa7d130f8a6a47a7060eeb3c69e22e98", date: "2024-02-27 08:35:16 UTC", description: "PulsarErrorEvent only occurs for the source", pr_number: 19950, scopes: ["pulsar source"], type: "fix", breaking_change: false, author: "Stephen Wakely", files_count: 1, insertions_count: 6, deletions_count: 1}, + {sha: "b9c4544d83c9c4042c49b4153cb94ba062f9dfdb", date: "2024-02-27 23:57:38 UTC", description: "Bump bstr from 1.9.0 to 1.9.1", pr_number: 19946, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 5, deletions_count: 5}, + {sha: "3091443aa82b31ba04ecd3727c1f6bb37a6abbb0", date: "2024-02-27 23:57:49 UTC", description: "Bump darling from 0.20.6 to 0.20.8", pr_number: 19949, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 13, deletions_count: 13}, + {sha: "5f43cde7aa6165e55091ec8372e301a03426a3e5", date: "2024-02-28 04:57:59 UTC", description: "Bump syn from 2.0.50 to 2.0.51", pr_number: 19953, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 47, deletions_count: 47}, + {sha: "565d93d35cca13c77e3105e6fa376761b23251d2", date: "2024-02-28 04:58:09 UTC", description: "Bump dyn-clone from 1.0.16 to 1.0.17", pr_number: 19954, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 5, deletions_count: 5}, + {sha: "906cd65bb315cf658cc6c8a597c93e34de228d74", date: "2024-02-28 04:58:27 UTC", description: "Bump typetag from 0.2.15 to 0.2.16", pr_number: 19956, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 6, deletions_count: 6}, + {sha: "11f6491f77bd9fc98c3e19859d87aa036184a1d3", date: "2024-02-28 05:08:42 UTC", description: "Bump actions/add-to-project from 0.5.0 to 0.6.0", pr_number: 19960, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "cae37e99d8dba79c943e9cdf6af862523141f71c", date: "2024-02-28 05:08:58 UTC", description: "Bump docker/setup-buildx-action from 3.0.0 to 3.1.0", pr_number: 19961, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 4, deletions_count: 4}, + {sha: "b1a2ca11c156aa9f66125c56009e7f05bbe65d2f", date: "2024-02-28 23:33:08 UTC", description: "Bump tempfile from 3.10.0 to 3.10.1", pr_number: 19955, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 5, deletions_count: 5}, + {sha: "26ec8f432394b966e5c48da97634738f30c949d7", date: "2024-02-28 23:33:19 UTC", description: "Bump the aws group with 1 update", pr_number: 19965, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "c1d6529225b3c9dd1c3e00957361acab89fa4d50", date: "2024-02-29 04:33:31 UTC", description: "Bump serde-wasm-bindgen from 0.6.4 to 0.6.5", pr_number: 19966, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "d4cf2bf6989eee92a41e7312b63b8522fdb0444b", date: "2024-02-29 05:07:02 UTC", description: "Bump rumqttc from 0.23.0 to 0.24.0", pr_number: 19967, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 103, deletions_count: 36}, + {sha: "43a91293c61e67305ee175e3cf135adeec0b51b1", date: "2024-02-29 04:15:48 UTC", description: "robustly synchronize component validation framework tasks", pr_number: 19927, scopes: ["observability"], type: "chore", breaking_change: false, author: "neuronull", files_count: 11, insertions_count: 246, deletions_count: 218}, + {sha: "c71d5d16493f1662187ed6e7a11c8a88fbc4e133", date: "2024-03-01 03:22:55 UTC", description: "expose component validation framework", pr_number: 19964, scopes: ["testing"], type: "chore", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 162, deletions_count: 165}, + {sha: "44150403903915f0fa8b31e8fd20b2d8cb33b480", date: "2024-03-01 03:28:46 UTC", description: "add component validation", pr_number: 19932, scopes: ["ci"], type: "chore", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 4, deletions_count: 0}, + {sha: "9acc151516e8db9b8798eb80b10cee8f843b6da7", date: "2024-03-02 05:35:45 UTC", description: "Bump log from 0.4.20 to 0.4.21", pr_number: 19977, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "29a9167c8554befaa5a56a188b3c44e18d08c638", date: "2024-03-02 05:35:55 UTC", description: "Bump syn from 2.0.51 to 2.0.52", pr_number: 19979, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 47, deletions_count: 47}, + {sha: "6ef50922b302519518937008b99cba9f97a7283c", date: "2024-03-02 05:36:04 UTC", description: "Bump mlua from 0.9.5 to 0.9.6", pr_number: 19985, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 4, deletions_count: 4}, + {sha: "69e84b335edef665264aab16a5895c3877b99b5e", date: "2024-03-02 05:36:15 UTC", description: "Bump confy from 0.6.0 to 0.6.1", pr_number: 19986, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "e2d8ad468ba7fa96598cf8cd3cc80641861d8b30", date: "2024-03-02 05:36:24 UTC", description: "Bump indexmap from 2.2.3 to 2.2.5", pr_number: 19987, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 23, deletions_count: 23}, + {sha: "4677102f189dfb9f3f63ea2f03ad4008fa01b30e", date: "2024-03-04 23:05:42 UTC", description: "Bump opendal from 0.45.0 to 0.45.1", pr_number: 19996, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 4, deletions_count: 14}, + {sha: "02bb9b2e7eda2326f4da9d6500c76f1b6e812b28", date: "2024-03-04 23:05:57 UTC", description: "Bump arc-swap from 1.6.0 to 1.7.0", pr_number: 19997, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 4, deletions_count: 4}, + {sha: "8ca10a0232889fc8195911409d78469e50e76e12", date: "2024-03-05 07:07:36 UTC", description: "Bump the aws group with 3 updates", pr_number: 19976, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 9, deletions_count: 9}, + {sha: "312056c39178c3f40369d3aeefaf059dc9611626", date: "2024-03-05 07:42:16 UTC", description: "Bump bollard from 0.15.0 to 0.16.0", pr_number: 19998, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 153, deletions_count: 50}, + {sha: "c7e4e33ca0c479cd9c8b0c5af72f6bc804d287fe", date: "2024-03-05 02:19:24 UTC", description: "extend component validation framework for more flexible test case building", pr_number: 19941, scopes: ["observability"], type: "chore", breaking_change: false, author: "neuronull", files_count: 9, insertions_count: 117, deletions_count: 28}, + {sha: "676318aa258e9b211fd6bd8330eb900788f0473f", date: "2024-03-05 03:08:25 UTC", description: "expose deduping logic", pr_number: 19992, scopes: ["dedupe transform"], type: "chore", breaking_change: false, author: "neuronull", files_count: 6, insertions_count: 270, deletions_count: 243}, + {sha: "f34738e6737e79f77dc6aa9aecb8d00430f64d99", date: "2024-03-05 03:48:15 UTC", description: "increase timeout for `cross` workflow", pr_number: 20002, scopes: ["ci"], type: "chore", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "fa99d6c2cdc6457d6f70f00dccf8e03d57ffce3a", date: "2024-03-06 08:31:54 UTC", description: "don't remove timestamp for `raw` endpoint", pr_number: 19975, scopes: ["splunk_hec_logs sink"], type: "fix", breaking_change: false, author: "neuronull", files_count: 8, insertions_count: 120, deletions_count: 50}, + {sha: "3b6066d9f93e753c0c4989173eaced46b1d2c519", date: "2024-03-07 00:47:55 UTC", description: "Remove optionality from topology controller reload", pr_number: 20010, scopes: ["core"], type: "chore", breaking_change: false, author: "Bruce Guenter", files_count: 2, insertions_count: 25, deletions_count: 26}, + {sha: "d75f74cd9f28621f676e5c93aefbdccd279662af", date: "2024-03-07 06:54:08 UTC", description: "Bump cargo_toml from 0.19.1 to 0.19.2", pr_number: 20007, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "c1141f9288007ec79c140d551a5ddfef483c40c5", date: "2024-03-07 06:54:33 UTC", description: "Bump wasm-bindgen from 0.2.91 to 0.2.92", pr_number: 20009, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 10, deletions_count: 10}, + {sha: "cbebdb2689600b8515dc34430703c8281cf7caa0", date: "2024-03-07 06:54:43 UTC", description: "Bump pin-project from 1.1.4 to 1.1.5", pr_number: 20015, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 5, deletions_count: 5}, + {sha: "8db6288b4cc2ecf070649e0dc53879f267f41c32", date: "2024-03-07 01:02:25 UTC", description: "calculate `EstimatedJsonSizeOf` for `component_received_event_bytes_total` before enrichment", pr_number: 19942, scopes: ["splunk_hec source"], type: "fix", breaking_change: false, author: "neuronull", files_count: 4, insertions_count: 137, deletions_count: 44}, + {sha: "eb3099657f53c8de5584b20fbb68f05c342f93c7", date: "2024-03-07 01:13:03 UTC", description: "Use gzip compression for datadog_logs regression tests", pr_number: 20020, scopes: ["ci"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 2, deletions_count: 0}, + {sha: "ea377f007e0657d65915f90b46e602ad6a149708", date: "2024-03-07 03:54:40 UTC", description: "add component spec validation tests for `datadog_logs` sink", pr_number: 19887, scopes: ["observability"], type: "chore", breaking_change: false, author: "neuronull", files_count: 6, insertions_count: 74, deletions_count: 16}, + {sha: "44ed0d146e274c9593db17f8e9fe74de3833e58f", date: "2024-03-07 05:40:46 UTC", description: "caller resolves the component validation framework test case path", pr_number: 20021, scopes: ["tests"], type: "chore", breaking_change: false, author: "neuronull", files_count: 1, insertions_count: 4, deletions_count: 2}, + {sha: "0f472db2b153566df47caec0c50b2f26ba0a2197", date: "2024-03-07 07:53:51 UTC", description: "Add missing `TraceEvent::remove` function", pr_number: 20023, scopes: ["core"], type: "chore", breaking_change: false, author: "Bruce Guenter", files_count: 1, insertions_count: 4, deletions_count: 0}, + {sha: "a3bedbd70b6b297e3d7cf9868a7c82f87a86d548", date: "2024-03-07 07:05:05 UTC", description: "only compile ValidatableComponent in test runs", pr_number: 20024, scopes: ["testing"], type: "chore", breaking_change: false, author: "neuronull", files_count: 8, insertions_count: 273, deletions_count: 269}, + {sha: "eb690d4343e74078e4debd9f9984bcf0e89ad8a5", date: "2024-03-08 04:26:37 UTC", description: "add TCP mode to DNSTAP source", pr_number: 19892, scopes: ["sources"], type: "feat", breaking_change: true, author: "Ensar Sarajčić", files_count: 26, insertions_count: 1658, deletions_count: 299}, + {sha: "482ed3cb7a9de9763d7e623c8a691ac4d9911638", date: "2024-03-08 08:41:19 UTC", description: "add support for more record types (HINFO, CSYNC, OPT, missing DNSSEC types)", pr_number: 19921, scopes: ["dnsmsg_parser"], type: "feat", breaking_change: false, author: "Ensar Sarajčić", files_count: 4, insertions_count: 186, deletions_count: 53}, + {sha: "d505045620cc5272be54b42fdd01abb8c0486d50", date: "2024-03-08 05:56:54 UTC", description: "Update statsd doc to mention timing conversion", pr_number: 20033, scopes: ["statsd source"], type: "docs", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 7, deletions_count: 0}, + {sha: "d5c8a77b5751c4d2277cee6ee76a1903873c5873", date: "2024-03-08 08:32:11 UTC", description: "add `parse_ddtags` config setting to parse the `ddtags` log event field into an object", pr_number: 20003, scopes: ["datadog_agent source"], type: "enhancement", breaking_change: false, author: "neuronull", files_count: 5, insertions_count: 185, deletions_count: 2}, + {sha: "485dea71725511b997586698650e202add499183", date: "2024-03-09 09:17:23 UTC", description: "add `lowercase_hostnames` option to `dnstap` source", pr_number: 20035, scopes: ["sources"], type: "feat", breaking_change: false, author: "Ensar Sarajčić", files_count: 5, insertions_count: 495, deletions_count: 299}, + {sha: "55a962a3c55d7b9437ec6b4b36ca42172bc9b953", date: "2024-03-09 02:26:04 UTC", description: "Update VRL to v0.12.0", pr_number: 20037, scopes: ["deps"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 17, deletions_count: 10}, + {sha: "c83e36dd447ef9a4ebe8270bc295743ca3053bb6", date: "2024-03-09 10:58:13 UTC", description: "Bump the clap group with 1 update", pr_number: 20026, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 15, deletions_count: 15}, + {sha: "bd2f0a33e75e624bb75cb2c311bcbfa620ab699a", date: "2024-03-09 12:17:13 UTC", description: "integrate Cargo package dependency info", pr_number: 19933, scopes: ["website"], type: "feat", breaking_change: false, author: "Hugo Hromic", files_count: 7, insertions_count: 30, deletions_count: 18}, + {sha: "37a19fab442b06be3dc73c6962578e2f083f9d88", date: "2024-03-09 03:54:22 UTC", description: "Remove mention of handwriting changelog for patch release", pr_number: 20040, scopes: ["dev"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 0, deletions_count: 3}, + {sha: "56f167629049f879429506ce34321b534cfd79da", date: "2024-03-11 22:58:23 UTC", description: "Bump base64 from 0.21.7 to 0.22.0", pr_number: 19999, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 5, deletions_count: 5}, + {sha: "04f78584d7dd10e98d81e3065fbb17483009d60f", date: "2024-03-12 07:31:33 UTC", description: "fix tests for currently unknown rdata types", pr_number: 20052, scopes: ["dnstap source"], type: "chore", breaking_change: false, author: "Ensar Sarajčić", files_count: 1, insertions_count: 17, deletions_count: 5}, + {sha: "6d0961347b7c36115da101ab993f66a532493a16", date: "2024-03-12 07:55:07 UTC", description: "add docs for new validate flag in punycode functions", pr_number: 19923, scopes: ["vrl"], type: "docs", breaking_change: false, author: "Ensar Sarajčić", files_count: 2, insertions_count: 28, deletions_count: 0}, + {sha: "cbcb874a9944801e8a89d42e44ecf551db55071a", date: "2024-03-12 14:59:16 UTC", description: "improve tls support for greptimedb sink", pr_number: 20006, scopes: ["greptimedb sink"], type: "feat", breaking_change: false, author: "Ning Sun", files_count: 5, insertions_count: 41, deletions_count: 57}, + {sha: "d2aca62f1edcedd76bb818dc936a54b0928b0786", date: "2024-03-12 02:11:01 UTC", description: "Use `component_kind` rather than `kind` for Hugo", pr_number: 20058, scopes: ["docs"], type: "fix", breaking_change: false, author: "Jesse Szwedko", files_count: 110, insertions_count: 112, deletions_count: 112}, + {sha: "b35eaf53315532a7668cd36342f72af2d4e00488", date: "2024-03-12 02:56:13 UTC", description: "Regenerate k8s manifests for Helm chart v0.31.1", pr_number: 20060, scopes: ["releasing"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 18, insertions_count: 25, deletions_count: 23}, + {sha: "e9815e1f328a4ef59099c3d07918f167947c2e1f", date: "2024-03-12 11:06:13 UTC", description: "Add ARMv6 builds", pr_number: 19192, scopes: ["platforms"], type: "feat", breaking_change: false, author: "William Taylor", files_count: 12, insertions_count: 195, deletions_count: 3}, + {sha: "38acf37f1d5d33f46af93f24034475e450f04b29", date: "2024-03-12 04:06:24 UTC", description: "Update banner to use past tense for repository decommissioning", pr_number: 20059, scopes: ["docs"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "4804e1745170dab2075fe6ef27534d57033ec2f7", date: "2024-03-12 05:30:26 UTC", description: "Use `component_kind` rather than `kind` in templates", pr_number: 20063, scopes: ["docs"], type: "fix", breaking_change: false, author: "Jesse Szwedko", files_count: 3, insertions_count: 4, deletions_count: 4}, + {sha: "f0d3037541b99bfcebfabdb1796200992f0747a8", date: "2024-03-12 22:15:19 UTC", description: "Default env vars for enterprise_http_to_http regression case", pr_number: 20073, scopes: ["ci"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "a7c3dbc453dc63dd4499b8f0c3dce15f16839f46", date: "2024-03-12 23:45:52 UTC", description: "Update default for --strict-env-vars to true", pr_number: 20062, scopes: ["cli"], type: "chore", breaking_change: true, author: "Jesse Szwedko", files_count: 4, insertions_count: 27, deletions_count: 10}, + {sha: "6a6c159da14b441df6dde0a3a9997a787910087a", date: "2024-03-13 04:23:34 UTC", description: "Update changelog generation script to handle authors and whitespace", pr_number: 20075, scopes: ["dev"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 3, insertions_count: 8, deletions_count: 8}, + {sha: "52d72dae521be48260c82a5e9fdb9ef81629e24c", date: "2024-03-13 12:09:53 UTC", description: "Bump docker/build-push-action from 5.1.0 to 5.2.0", pr_number: 20057, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "bcc6e40862ee16f4cec75b8f752c54a399bd6cbc", date: "2024-03-13 12:10:06 UTC", description: "Bump toml from 0.8.10 to 0.8.11", pr_number: 20067, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 18, deletions_count: 9}, + {sha: "98df316fedbdffcf475b3ca9c51ab5ad4bdaa1ae", date: "2024-03-13 12:10:19 UTC", description: "Bump serde_with from 3.6.1 to 3.7.0", pr_number: 20068, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 14, deletions_count: 14}, + {sha: "34d3aa5b23b859d0e9e0c566c2ae3ec5bf79ceca", date: "2024-03-13 12:10:32 UTC", description: "Bump thiserror from 1.0.57 to 1.0.58", pr_number: 20069, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 4, deletions_count: 4}, + {sha: "8811e218d9d691d0d5e600d0cd2cd50cacb02c0a", date: "2024-03-13 12:10:42 UTC", description: "Bump proc-macro2 from 1.0.78 to 1.0.79", pr_number: 20070, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 71, deletions_count: 71}, + {sha: "fe23c97ae6a45115c9924a3ea6410c62018c5060", date: "2024-03-13 16:38:04 UTC", description: "Bump anyhow from 1.0.80 to 1.0.81", pr_number: 20066, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 5, deletions_count: 5}, + {sha: "aa04ac86707ee0f1df8e7b77acbd459834ca1fa4", date: "2024-03-14 04:45:15 UTC", description: "add `permit_origin` config option for all tcp sources", pr_number: 20051, scopes: ["sources"], type: "feat", breaking_change: false, author: "Ensar Sarajčić", files_count: 16, insertions_count: 85, deletions_count: 29}, + {sha: "0ec279d2a1b6a113f6e62d1f755a29a371862307", date: "2024-03-13 20:48:07 UTC", description: "Bump bufbuild/buf-setup-action from 1.29.0 to 1.30.0", pr_number: 20056, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "de4687ff51eda7c67a66ebe86138ab9ad7ceb54c", date: "2024-03-14 03:48:18 UTC", description: "Bump the aws group with 4 updates", pr_number: 20079, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 14, deletions_count: 15}, + {sha: "c62ec39ab159b964ec0069db5b528f0954a66c43", date: "2024-03-14 03:48:28 UTC", description: "Bump reqwest from 0.11.24 to 0.11.26", pr_number: 20080, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 4, deletions_count: 4}, + {sha: "62de4218e00a9907bc3c79b9e36c01066b772bb5", date: "2024-03-14 03:48:38 UTC", description: "Bump serde-toml-merge from 0.3.4 to 0.3.5", pr_number: 20081, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "d23730e3138c20fac276178357234135f1fc52bd", date: "2024-03-14 03:48:52 UTC", description: "Bump os_info from 3.7.0 to 3.8.0", pr_number: 20082, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 4, deletions_count: 4}, + {sha: "ebdc64dbfc0ac71a1ff73ab9080849eca718a442", date: "2024-03-14 00:12:04 UTC", description: "Readd error log for elasticsearch sink", pr_number: 19846, scopes: ["elasticsearch sink"], type: "fix", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 19, deletions_count: 0}, + {sha: "f7380e45e4e1af63dd1bb3ecefac50ff45376a3c", date: "2024-03-14 02:29:15 UTC", description: "Set ignored_header_bytes default to `0`", pr_number: 20076, scopes: ["file source"], type: "fix", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 6, deletions_count: 0}, + {sha: "ccaa7e376d0167d187573c4b9b478f1c2778e359", date: "2024-03-14 05:18:39 UTC", description: "Update CODEOWNERS to reflect consolidation", pr_number: 20087, scopes: ["dev"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 98, deletions_count: 98}, + {sha: "d511e893ad0e594231e06f25a9d35ab70248bedc", date: "2024-03-15 05:41:54 UTC", description: "add support for custom MMDB types", pr_number: 20054, scopes: ["enrichment_tables"], type: "feat", breaking_change: false, author: "Ensar Sarajčić", files_count: 9, insertions_count: 436, deletions_count: 15}, + {sha: "4671ccbf0a6359ef8b752fa99fae9eb9c60fdee5", date: "2024-03-14 22:28:24 UTC", description: "Use correct how_it_works section for Vector sink", pr_number: 20095, scopes: ["docs"], type: "fix", breaking_change: false, author: "Jesse Szwedko", files_count: 1, insertions_count: 1, deletions_count: 1}, + {sha: "fafe8c50a4721fa3ddbea34e0641d3c145f14388", date: "2024-03-15 16:38:43 UTC", description: "remove repetitive words", pr_number: 20091, scopes: [], type: "chore", breaking_change: false, author: "teslaedison", files_count: 8, insertions_count: 9, deletions_count: 9}, + {sha: "0be97cdae0d97d9ccd9fb2e14501c9dd82fb6e10", date: "2024-03-16 05:28:19 UTC", description: "relax required input semantic meanings", pr_number: 20086, scopes: ["datadog_logs sink"], type: "fix", breaking_change: false, author: "neuronull", files_count: 3, insertions_count: 82, deletions_count: 16}, + {sha: "ad8a8690b7707540dd24a85e8ada8c51bab150fe", date: "2024-03-16 08:02:19 UTC", description: "Bump tokio-test from 0.4.3 to 0.4.4", pr_number: 20101, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 5, deletions_count: 5}, + {sha: "88606447dd9f874f27f06dc17c3e2f0b2083e221", date: "2024-03-18 22:48:54 UTC", description: "Bump the aws group with 1 update", pr_number: 20089, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 4, deletions_count: 3}, + {sha: "494d7e2a7bff5c7bebb90925b5f451a99e3f0d5c", date: "2024-03-18 22:49:07 UTC", description: "Bump docker/setup-buildx-action from 3.1.0 to 3.2.0", pr_number: 20097, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 4, deletions_count: 4}, + {sha: "cb4a5e6257508534295dc79c8af2768c7e74284d", date: "2024-03-19 02:49:18 UTC", description: "Bump docker/build-push-action from 5.2.0 to 5.3.0", pr_number: 20098, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "8737b24807ee6b00a20663f951ec0ce53682530e", date: "2024-03-19 02:51:47 UTC", description: "Bump syn from 2.0.52 to 2.0.53", pr_number: 20111, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 47, deletions_count: 47}, + {sha: "7e3e60fa447eab3b73f27e2c98ed1f2c4d19fe94", date: "2024-03-19 02:51:57 UTC", description: "Bump os_info from 3.8.0 to 3.8.1", pr_number: 20112, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "068b19918fd723e26b9fc5c6de289493d9ad55de", date: "2024-03-19 02:52:13 UTC", description: "Bump async-recursion from 1.0.5 to 1.1.0", pr_number: 20114, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "a1902c2897c23e40d18dc96df333461c0f65ef4a", date: "2024-03-19 02:52:23 UTC", description: "Bump async-trait from 0.1.77 to 0.1.78", pr_number: 20115, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "3e8c6a48451233fb7b60b4ca0a5139986745f80e", date: "2024-03-19 02:52:32 UTC", description: "Bump serde_yaml from 0.9.32 to 0.9.33", pr_number: 20116, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 4, insertions_count: 12, deletions_count: 12}, + {sha: "5c33628279443068365616783b6a2d5466e8a548", date: "2024-03-19 02:52:46 UTC", description: "Bump mongodb from 2.8.1 to 2.8.2", pr_number: 20117, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "7c9b4c59c06a49c46e1f0f84faa6114dcce5c642", date: "2024-03-19 03:26:17 UTC", description: "Bump the clap group with 1 update", pr_number: 20108, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 23, deletions_count: 16}, + {sha: "4c7becebe8ec38f2a60d25a97bafa3d6c9a12fd7", date: "2024-03-19 05:16:11 UTC", description: "Bump tokio-stream from 0.1.14 to 0.1.15", pr_number: 20100, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 4, deletions_count: 4}, + {sha: "5e7248cfaa787126cb7654e0523d6ced8c06f245", date: "2024-03-19 05:24:16 UTC", description: "do not filter out file contents from error logs", pr_number: 20125, scopes: ["remap"], type: "enhancement", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 6, deletions_count: 42}, + {sha: "12c1866214e55869275afa5fc0741f2af8baa0fd", date: "2024-03-19 05:01:37 UTC", description: "further adjustments to component validation framework", pr_number: 20043, scopes: ["testing"], type: "chore", breaking_change: false, author: "neuronull", files_count: 3, insertions_count: 56, deletions_count: 5}, + {sha: "80f63bb6b52561ae4a9f98783ae98472c0798845", date: "2024-03-19 11:24:11 UTC", description: "Bump the graphql group with 2 updates", pr_number: 20107, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 3, insertions_count: 16, deletions_count: 19}, + {sha: "ad6a48efc0f79b2c18a5c1394e5d8603fdfd1bab", date: "2024-03-19 07:04:35 UTC", description: "bugs in internal component metric reporting", pr_number: 20044, scopes: ["datadog_agent source"], type: "fix", breaking_change: false, author: "neuronull", files_count: 7, insertions_count: 155, deletions_count: 6}, + {sha: "62297dcb8caba651ed60f154c36b5a4e1a63046b", date: "2024-03-20 00:28:08 UTC", description: "Bump VRL to v0.13.0", pr_number: 20126, scopes: ["deps"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 3, insertions_count: 13, deletions_count: 3}, + {sha: "58a4a2ef52e606c0f9b9fa975cf114b661300584", date: "2024-03-20 00:52:12 UTC", description: "Move host_metrics feature gate", pr_number: 20134, scopes: ["api"], type: "chore", breaking_change: false, author: "Jesse Szwedko", files_count: 2, insertions_count: 4, deletions_count: 4}, + {sha: "b184196d9760539db31a5238ee7b7254329b7c8d", date: "2024-03-21 02:44:33 UTC", description: "Bump uuid from 1.7.0 to 1.8.0", pr_number: 20131, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "2a88fc06b7c958f9787a3e050c677cbe5860d62d", date: "2024-03-21 02:44:43 UTC", description: "Bump the aws group with 2 updates", pr_number: 20129, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 6, deletions_count: 6}, + {sha: "04bff918cfcba087c18766ef81a8e2316b8790f4", date: "2024-03-22 05:30:21 UTC", description: "Bump smallvec from 1.13.1 to 1.13.2", pr_number: 20145, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 1, insertions_count: 2, deletions_count: 2}, + {sha: "db9c681fd99234f6cd4799185bace2f351e0712d", date: "2024-03-22 09:31:23 UTC", description: "Bump actions/add-to-project from 0.6.0 to 0.6.1", pr_number: 20137, scopes: ["ci"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "e012a80bb5d8e4f318fb4408d9e2ab6242a8883b", date: "2024-03-22 09:31:26 UTC", description: "Bump serde-toml-merge from 0.3.5 to 0.3.6", pr_number: 20132, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 3, deletions_count: 3}, + {sha: "20e56d3080ec3cb04c750966c2722799ed920225", date: "2024-03-22 09:31:37 UTC", description: "Bump toml from 0.8.11 to 0.8.12", pr_number: 20130, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 6, deletions_count: 6}, + {sha: "3f83ea32e06c8e3575e6b82bdf8e25a7eb97dcc0", date: "2024-03-22 09:31:48 UTC", description: "Bump h2 from 0.4.2 to 0.4.3", pr_number: 20110, scopes: ["deps"], type: "chore", breaking_change: false, author: "dependabot[bot]", files_count: 2, insertions_count: 4, deletions_count: 4}, + {sha: "4c68f9699749d17fa926983e2a90bdeec92b112a", date: "2024-03-22 11:05:32 UTC", description: "add documentation for `sieve` function", pr_number: 20000, scopes: ["vrl"], type: "docs", breaking_change: false, author: "Ensar Sarajčić", files_count: 1, insertions_count: 65, deletions_count: 0}, + {sha: "abd776d7c74ae48968fa34829d3683f68115a9e0", date: "2024-03-22 22:48:19 UTC", description: "Bump Rust to 1.77.0", pr_number: 20149, scopes: ["deps"], type: "chore", breaking_change: false, author: "Bruce Guenter", files_count: 14, insertions_count: 40, deletions_count: 42}, + {sha: "314ea367302fb95a3ec0c2fcdfbe19df6a0e7603", date: "2024-03-23 10:54:00 UTC", description: "add `uuid_v7` function", pr_number: 20048, scopes: ["vrl"], type: "feat", breaking_change: false, author: "Philipp Paulweber", files_count: 4, insertions_count: 72, deletions_count: 0}, + {sha: "4d23e66dc22c499ad8263b937c21800d1b68d1c7", date: "2024-03-23 06:45:38 UTC", description: "Update TLS docs for `verify_certificate`", pr_number: 20153, scopes: ["security"], type: "docs", breaking_change: false, author: "Jesse Szwedko", files_count: 68, insertions_count: 210, deletions_count: 210}, + {sha: "4279bf0018055de68f59dffe9532fab96c80d3ac", date: "2024-03-26 06:35:53 UTC", description: "Add documentation for parse_proto and encode_proto", pr_number: 20139, scopes: ["vrl"], type: "docs", breaking_change: false, author: "Flávio Cruz", files_count: 5, insertions_count: 121, deletions_count: 0}, + ] +} diff --git a/website/cue/reference/versions.cue b/website/cue/reference/versions.cue index 268a507d4f03f..511883bf9baec 100644 --- a/website/cue/reference/versions.cue +++ b/website/cue/reference/versions.cue @@ -2,6 +2,7 @@ package metadata // This has to be maintained manually because there's currently no way to sort versions programmatically versions: [string, ...string] & [ + "0.37.0", "0.36.1", "0.36.0", "0.35.1", From 5349313dacc77f6798b51574c0639220b58f4284 Mon Sep 17 00:00:00 2001 From: neuronull Date: Tue, 26 Mar 2024 09:43:01 -0600 Subject: [PATCH 0182/1491] chore(core): expose semantic meaning log event helper fn (#20178) --- lib/vector-core/src/event/log_event.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lib/vector-core/src/event/log_event.rs b/lib/vector-core/src/event/log_event.rs index 43ca2ccb3944d..a53fae72ab145 100644 --- a/lib/vector-core/src/event/log_event.rs +++ b/lib/vector-core/src/event/log_event.rs @@ -329,7 +329,7 @@ impl LogEvent { } /// Retrieves the target path of a field based on the specified `meaning`. - fn find_key_by_meaning(&self, meaning: impl AsRef) -> Option<&OwnedTargetPath> { + pub fn find_key_by_meaning(&self, meaning: impl AsRef) -> Option<&OwnedTargetPath> { self.metadata() .schema_definition() .meaning_path(meaning.as_ref()) From 9148785f9232b53792a1dcf8fcb73d97691669cc Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 26 Mar 2024 10:51:35 -0700 Subject: [PATCH 0183/1491] chore(docs): Remove package deprecation banner (#20181) I'd planned to time this with the v0.37.0 release. Signed-off-by: Jesse Szwedko --- website/config.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/config.toml b/website/config.toml index 67b21dd0eb4f6..ed044aef981e5 100644 --- a/website/config.toml +++ b/website/config.toml @@ -34,7 +34,7 @@ subtagline = "Vector is deployed over 1,000,000 times per month by Fortune 500 c alpine_js_version = "2.8.2" ionicons_version = "5.4.0" site_logo = "img/vector-open-graph.png" -display_banner = true # Whether to display the top banner in layouts/partials/banner.html +display_banner = false # Whether to display the top banner in layouts/partials/banner.html favicon = "favicon.ico" # Update this every time there's a new "generation" of the site. Incrementing this forces the browser to refresh any From c1fc9f03b816c54b0efb57198e5a16159a324ac9 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 26 Mar 2024 11:25:15 -0700 Subject: [PATCH 0184/1491] chore(dev): Bump Vector to 0.38.0 (#20180) Now that 0.37.0 has been released. Signed-off-by: Jesse Szwedko --- Cargo.lock | 2 +- Cargo.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 155a138284865..e965fc7c839b9 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10017,7 +10017,7 @@ checksum = "f1bddf1187be692e79c5ffeab891132dfb0f236ed36a43c7ed39f1165ee20191" [[package]] name = "vector" -version = "0.37.0" +version = "0.38.0" dependencies = [ "apache-avro", "approx", diff --git a/Cargo.toml b/Cargo.toml index eeb76455b6a6d..29c57191d12cb 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "vector" -version = "0.37.0" +version = "0.38.0" authors = ["Vector Contributors "] edition = "2021" description = "A lightweight and ultra-fast tool for building observability pipelines" From 3378adad7b5b1819ffd52cf9563f519bb13ebdad Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Tue, 26 Mar 2024 11:39:59 -0700 Subject: [PATCH 0185/1491] chore(releasing): Bump Kubernetes manifsts to chart version 0.32.0 (#20182) Signed-off-by: Jesse Szwedko --- distribution/kubernetes/vector-agent/README.md | 2 +- distribution/kubernetes/vector-agent/configmap.yaml | 2 +- distribution/kubernetes/vector-agent/daemonset.yaml | 4 ++-- distribution/kubernetes/vector-agent/rbac.yaml | 4 ++-- distribution/kubernetes/vector-agent/service-headless.yaml | 2 +- distribution/kubernetes/vector-agent/serviceaccount.yaml | 2 +- distribution/kubernetes/vector-aggregator/README.md | 2 +- distribution/kubernetes/vector-aggregator/configmap.yaml | 2 +- .../kubernetes/vector-aggregator/service-headless.yaml | 2 +- distribution/kubernetes/vector-aggregator/service.yaml | 2 +- distribution/kubernetes/vector-aggregator/serviceaccount.yaml | 2 +- distribution/kubernetes/vector-aggregator/statefulset.yaml | 4 ++-- distribution/kubernetes/vector-stateless-aggregator/README.md | 2 +- .../kubernetes/vector-stateless-aggregator/configmap.yaml | 2 +- .../kubernetes/vector-stateless-aggregator/deployment.yaml | 4 ++-- .../vector-stateless-aggregator/service-headless.yaml | 2 +- .../kubernetes/vector-stateless-aggregator/service.yaml | 2 +- .../vector-stateless-aggregator/serviceaccount.yaml | 2 +- 18 files changed, 22 insertions(+), 22 deletions(-) diff --git a/distribution/kubernetes/vector-agent/README.md b/distribution/kubernetes/vector-agent/README.md index 305c49b19cdbb..55e8722848b94 100644 --- a/distribution/kubernetes/vector-agent/README.md +++ b/distribution/kubernetes/vector-agent/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.31.1 with the following `values.yaml`: +version 0.32.0 with the following `values.yaml`: ```yaml role: Agent diff --git a/distribution/kubernetes/vector-agent/configmap.yaml b/distribution/kubernetes/vector-agent/configmap.yaml index 50a0f146022bf..bfe345207f77d 100644 --- a/distribution/kubernetes/vector-agent/configmap.yaml +++ b/distribution/kubernetes/vector-agent/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" data: agent.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-agent/daemonset.yaml b/distribution/kubernetes/vector-agent/daemonset.yaml index a14e9ff4113f0..16604d4408ce7 100644 --- a/distribution/kubernetes/vector-agent/daemonset.yaml +++ b/distribution/kubernetes/vector-agent/daemonset.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" annotations: {} spec: selector: @@ -30,7 +30,7 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.36.1-distroless-libc" + image: "timberio/vector:0.37.0-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir diff --git a/distribution/kubernetes/vector-agent/rbac.yaml b/distribution/kubernetes/vector-agent/rbac.yaml index 0fa5e329dfbaa..6161e45777303 100644 --- a/distribution/kubernetes/vector-agent/rbac.yaml +++ b/distribution/kubernetes/vector-agent/rbac.yaml @@ -10,7 +10,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" rules: - apiGroups: - "" @@ -31,7 +31,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" roleRef: apiGroup: rbac.authorization.k8s.io kind: ClusterRole diff --git a/distribution/kubernetes/vector-agent/service-headless.yaml b/distribution/kubernetes/vector-agent/service-headless.yaml index a064605c6eafe..49465ede3cd1b 100644 --- a/distribution/kubernetes/vector-agent/service-headless.yaml +++ b/distribution/kubernetes/vector-agent/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-agent/serviceaccount.yaml b/distribution/kubernetes/vector-agent/serviceaccount.yaml index 45c0f65f458c4..726beb2e9de24 100644 --- a/distribution/kubernetes/vector-agent/serviceaccount.yaml +++ b/distribution/kubernetes/vector-agent/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Agent - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" automountServiceAccountToken: true diff --git a/distribution/kubernetes/vector-aggregator/README.md b/distribution/kubernetes/vector-aggregator/README.md index a628dd9f7a7cf..9993816bcb101 100644 --- a/distribution/kubernetes/vector-aggregator/README.md +++ b/distribution/kubernetes/vector-aggregator/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.31.1 with the following `values.yaml`: +version 0.32.0 with the following `values.yaml`: ```yaml diff --git a/distribution/kubernetes/vector-aggregator/configmap.yaml b/distribution/kubernetes/vector-aggregator/configmap.yaml index f63c63908af06..9318a1b97f097 100644 --- a/distribution/kubernetes/vector-aggregator/configmap.yaml +++ b/distribution/kubernetes/vector-aggregator/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" data: aggregator.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-aggregator/service-headless.yaml b/distribution/kubernetes/vector-aggregator/service-headless.yaml index 95243260dbac0..d21a91e690485 100644 --- a/distribution/kubernetes/vector-aggregator/service-headless.yaml +++ b/distribution/kubernetes/vector-aggregator/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-aggregator/service.yaml b/distribution/kubernetes/vector-aggregator/service.yaml index 9ab6f562d84b9..e419f3cc6f096 100644 --- a/distribution/kubernetes/vector-aggregator/service.yaml +++ b/distribution/kubernetes/vector-aggregator/service.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" annotations: spec: ports: diff --git a/distribution/kubernetes/vector-aggregator/serviceaccount.yaml b/distribution/kubernetes/vector-aggregator/serviceaccount.yaml index 75bec8b11ec83..d5294b38517a6 100644 --- a/distribution/kubernetes/vector-aggregator/serviceaccount.yaml +++ b/distribution/kubernetes/vector-aggregator/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" automountServiceAccountToken: true diff --git a/distribution/kubernetes/vector-aggregator/statefulset.yaml b/distribution/kubernetes/vector-aggregator/statefulset.yaml index 8ddce3f40fe86..5bc2f9cfbf6c1 100644 --- a/distribution/kubernetes/vector-aggregator/statefulset.yaml +++ b/distribution/kubernetes/vector-aggregator/statefulset.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Aggregator - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" annotations: {} spec: replicas: 1 @@ -33,7 +33,7 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.36.1-distroless-libc" + image: "timberio/vector:0.37.0-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/README.md b/distribution/kubernetes/vector-stateless-aggregator/README.md index 6f23102e6b1a8..6507dee0ef8c5 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/README.md +++ b/distribution/kubernetes/vector-stateless-aggregator/README.md @@ -1,6 +1,6 @@ The kubernetes manifests found in this directory have been automatically generated from the [helm chart `vector/vector`](https://github.com/vectordotdev/helm-charts/tree/master/charts/vector) -version 0.31.1 with the following `values.yaml`: +version 0.32.0 with the following `values.yaml`: ```yaml role: Stateless-Aggregator diff --git a/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml b/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml index 1ef333048e261..180fe30cf8cdd 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/configmap.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" data: aggregator.yaml: | data_dir: /vector-data-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml b/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml index ae35e1940c01a..2f639b4d4a87a 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/deployment.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" annotations: {} spec: replicas: 1 @@ -31,7 +31,7 @@ spec: dnsPolicy: ClusterFirst containers: - name: vector - image: "timberio/vector:0.36.1-distroless-libc" + image: "timberio/vector:0.37.0-distroless-libc" imagePullPolicy: IfNotPresent args: - --config-dir diff --git a/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml b/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml index 4fa239d4a656b..6c828782d4989 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/service-headless.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" annotations: spec: clusterIP: None diff --git a/distribution/kubernetes/vector-stateless-aggregator/service.yaml b/distribution/kubernetes/vector-stateless-aggregator/service.yaml index e9b10fdda00f3..513a265b7209b 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/service.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/service.yaml @@ -8,7 +8,7 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" annotations: spec: ports: diff --git a/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml b/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml index 18316cff0d047..27d4c6c81590c 100644 --- a/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml +++ b/distribution/kubernetes/vector-stateless-aggregator/serviceaccount.yaml @@ -8,5 +8,5 @@ metadata: app.kubernetes.io/name: vector app.kubernetes.io/instance: vector app.kubernetes.io/component: Stateless-Aggregator - app.kubernetes.io/version: "0.36.1-distroless-libc" + app.kubernetes.io/version: "0.37.0-distroless-libc" automountServiceAccountToken: true From 6f351255f49c66c02298a6b5652bd9cfce2cde9a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ensar=20Saraj=C4=8Di=C4=87?= Date: Wed, 27 Mar 2024 18:18:40 +0100 Subject: [PATCH 0186/1491] fix(enrichment_tables): bring back support for `GeoLite2-City` db (#20192) * fix(enrichment_tables): bring back support for `GeoLite2-City` db Brings back support for `GeoLite2-City` DB after adding custom `mmdb` type. The issue was that previously any unknown DB type was treated as a City DB, which covered the case of `GeoLite2-City`. This now explicitly adds it as a supported type. Fixes: #20191 Related: #20054 * Add changelog --- changelog.d/20192_geoip_geolite_city_support.fix.md | 3 +++ src/enrichment_tables/geoip.rs | 2 +- 2 files changed, 4 insertions(+), 1 deletion(-) create mode 100644 changelog.d/20192_geoip_geolite_city_support.fix.md diff --git a/changelog.d/20192_geoip_geolite_city_support.fix.md b/changelog.d/20192_geoip_geolite_city_support.fix.md new file mode 100644 index 0000000000000..5c8a7b4426d04 --- /dev/null +++ b/changelog.d/20192_geoip_geolite_city_support.fix.md @@ -0,0 +1,3 @@ +Fixed an issue where `GeoLite2-City` MMDB database type was not supported. + +authors: esensar diff --git a/src/enrichment_tables/geoip.rs b/src/enrichment_tables/geoip.rs index 7d7ad25cc106d..0b615c5671fe8 100644 --- a/src/enrichment_tables/geoip.rs +++ b/src/enrichment_tables/geoip.rs @@ -36,7 +36,7 @@ impl TryFrom<&str> for DatabaseKind { "GeoLite2-ASN" => Ok(Self::Asn), "GeoIP2-ISP" => Ok(Self::Isp), "GeoIP2-Connection-Type" => Ok(Self::ConnectionType), - "GeoIP2-City" => Ok(Self::City), + "GeoIP2-City" | "GeoLite2-City" => Ok(Self::City), _ => Err(()), } } From 41bb21ef711d55884b02eee42b0126c25e97dd5e Mon Sep 17 00:00:00 2001 From: Bruce Guenter Date: Wed, 27 Mar 2024 12:33:57 -0600 Subject: [PATCH 0187/1491] chore(deps): Bump MSRV to reduce usage of `async_trait` (#20155) There are several uses that still require the use of `async_trait`, namely those where the resulting trait must be object safe (i.e. is used in a `Box` construct). The `async_trait` macro handles this by boxing the resulting future, so we _could_ work around this ourselves, but only by manually expanding the boxing ourselves, which is not a win over just using the trait. --- Cargo.lock | 1 - Cargo.toml | 2 +- lib/vector-api-client/Cargo.toml | 1 - lib/vector-api-client/src/gql/components.rs | 4 ---- lib/vector-api-client/src/gql/health.rs | 3 --- lib/vector-api-client/src/gql/meta.rs | 3 --- lib/vector-api-client/src/lib.rs | 1 + lib/vector-api-client/src/test/mod.rs | 3 --- lib/vector-buffers/src/lib.rs | 1 + lib/vector-buffers/src/variants/disk_v2/io.rs | 5 ----- lib/vector-buffers/src/variants/disk_v2/tests/mod.rs | 3 --- .../src/variants/disk_v2/tests/model/filesystem.rs | 3 --- lib/vector-core/src/sink.rs | 5 ++--- src/config/enrichment_table.rs | 2 -- src/config/provider.rs | 2 -- src/enrichment_tables/file.rs | 1 - src/enrichment_tables/geoip.rs | 1 - src/enrichment_tables/mmdb.rs | 1 - src/lib.rs | 1 + src/providers/http.rs | 1 - src/sinks/aws_kinesis/firehose/record.rs | 1 - src/sinks/aws_kinesis/record.rs | 8 ++++---- src/sinks/aws_kinesis/streams/record.rs | 1 - src/sinks/aws_s_s/client.rs | 7 ++++--- src/sinks/aws_s_s/sns/client.rs | 1 - src/sinks/aws_s_s/sqs/client.rs | 1 - src/sinks/gcp/pubsub.rs | 1 - src/sinks/influxdb/logs.rs | 1 - src/sinks/mezmo.rs | 1 - src/sinks/util/http.rs | 6 ++++-- src/sources/util/http/prelude.rs | 2 -- 31 files changed, 18 insertions(+), 56 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e965fc7c839b9..efe23a60a1ce8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -10210,7 +10210,6 @@ name = "vector-api-client" version = "0.1.2" dependencies = [ "anyhow", - "async-trait", "chrono", "clap 4.5.3", "futures 0.3.30", diff --git a/Cargo.toml b/Cargo.toml index 29c57191d12cb..05196713e64ea 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -12,7 +12,7 @@ default-run = "vector" autobenches = false # our benchmarks are not runnable on their own either way # Minimum supported rust version # See docs/DEVELOPING.md for policy -rust-version = "1.75" +rust-version = "1.77" [[bin]] name = "vector" diff --git a/lib/vector-api-client/Cargo.toml b/lib/vector-api-client/Cargo.toml index 1b658944f1e3f..6fb4678e25202 100644 --- a/lib/vector-api-client/Cargo.toml +++ b/lib/vector-api-client/Cargo.toml @@ -16,7 +16,6 @@ serde_json.workspace = true anyhow = { version = "1.0.81", default-features = false, features = ["std"] } # Tokio / Futures -async-trait = { version = "0.1", default-features = false } futures = { version = "0.3", default-features = false, features = ["compat", "io-compat"] } tokio = { version = "1.36.0", default-features = false, features = ["macros", "rt", "sync"] } tokio-stream = { version = "0.1.15", default-features = false, features = ["sync"] } diff --git a/lib/vector-api-client/src/gql/components.rs b/lib/vector-api-client/src/gql/components.rs index 00dcb2f8a72e5..a2bfde4cdf487 100644 --- a/lib/vector-api-client/src/gql/components.rs +++ b/lib/vector-api-client/src/gql/components.rs @@ -1,6 +1,5 @@ use std::fmt; -use async_trait::async_trait; use graphql_client::GraphQLQuery; use crate::{BoxedSubscription, QueryResult}; @@ -32,12 +31,10 @@ pub struct ComponentAddedSubscription; )] pub struct ComponentRemovedSubscription; -#[async_trait] pub trait ComponentsQueryExt { async fn components_query(&self, first: i64) -> crate::QueryResult; } -#[async_trait] impl ComponentsQueryExt for crate::Client { async fn components_query(&self, first: i64) -> QueryResult { let request_body = ComponentsQuery::build_query(components_query::Variables { first }); @@ -50,7 +47,6 @@ pub trait ComponentsSubscriptionExt { fn component_removed(&self) -> crate::BoxedSubscription; } -#[async_trait] impl ComponentsSubscriptionExt for crate::SubscriptionClient { /// Subscription for when a component has been added fn component_added(&self) -> BoxedSubscription { diff --git a/lib/vector-api-client/src/gql/health.rs b/lib/vector-api-client/src/gql/health.rs index baa83b6450eb4..f3bd4968c8b30 100644 --- a/lib/vector-api-client/src/gql/health.rs +++ b/lib/vector-api-client/src/gql/health.rs @@ -1,6 +1,5 @@ //! Health queries/subscriptions, for asserting a GraphQL API server is alive. -use async_trait::async_trait; use graphql_client::GraphQLQuery; /// Shorthand for a Chrono datetime, set to UTC. @@ -29,13 +28,11 @@ pub struct HealthQuery; pub struct HeartbeatSubscription; /// Extension methods for health queries. -#[async_trait] pub trait HealthQueryExt { /// Executes a health query. async fn health_query(&self) -> crate::QueryResult; } -#[async_trait] impl HealthQueryExt for crate::Client { /// Executes a health query. async fn health_query(&self) -> crate::QueryResult { diff --git a/lib/vector-api-client/src/gql/meta.rs b/lib/vector-api-client/src/gql/meta.rs index c25dc41c3d091..d8d9a734342f3 100644 --- a/lib/vector-api-client/src/gql/meta.rs +++ b/lib/vector-api-client/src/gql/meta.rs @@ -1,4 +1,3 @@ -use async_trait::async_trait; use graphql_client::GraphQLQuery; /// MetaVersionStringQuery returns the version string of the queried Vector instance. @@ -11,13 +10,11 @@ use graphql_client::GraphQLQuery; pub struct MetaVersionStringQuery; /// Extension methods for meta queries. -#[async_trait] pub trait MetaQueryExt { /// Executes a meta version string query. async fn meta_version_string(&self) -> crate::QueryResult; } -#[async_trait] impl MetaQueryExt for crate::Client { /// Executes a meta version string query. async fn meta_version_string(&self) -> crate::QueryResult { diff --git a/lib/vector-api-client/src/lib.rs b/lib/vector-api-client/src/lib.rs index 6f3a2f4c39ac4..e31401172bdc9 100644 --- a/lib/vector-api-client/src/lib.rs +++ b/lib/vector-api-client/src/lib.rs @@ -11,6 +11,7 @@ #![deny(warnings)] #![deny(missing_debug_implementations, missing_copy_implementations)] +#![allow(async_fn_in_trait)] mod client; /// GraphQL queries diff --git a/lib/vector-api-client/src/test/mod.rs b/lib/vector-api-client/src/test/mod.rs index 57590e30ced65..1f7716d96214f 100644 --- a/lib/vector-api-client/src/test/mod.rs +++ b/lib/vector-api-client/src/test/mod.rs @@ -1,4 +1,3 @@ -use async_trait::async_trait; use graphql_client::GraphQLQuery; use crate::{BoxedSubscription, QueryResult}; @@ -48,7 +47,6 @@ pub struct ComponentByComponentKeyQuery; )] pub struct ComponentsConnectionQuery; -#[async_trait] pub trait TestQueryExt { async fn component_links_query( &self, @@ -77,7 +75,6 @@ pub trait TestQueryExt { ) -> crate::QueryResult; } -#[async_trait] impl TestQueryExt for crate::Client { async fn component_links_query( &self, diff --git a/lib/vector-buffers/src/lib.rs b/lib/vector-buffers/src/lib.rs index d24065e0736bd..d8b28c278ac75 100644 --- a/lib/vector-buffers/src/lib.rs +++ b/lib/vector-buffers/src/lib.rs @@ -9,6 +9,7 @@ #![allow(clippy::module_name_repetitions)] #![allow(clippy::type_complexity)] // long-types happen, especially in async code #![allow(clippy::must_use_candidate)] +#![allow(async_fn_in_trait)] #[macro_use] extern crate tracing; diff --git a/lib/vector-buffers/src/variants/disk_v2/io.rs b/lib/vector-buffers/src/variants/disk_v2/io.rs index e81ec5c80a69b..a63b46ba1bc7d 100644 --- a/lib/vector-buffers/src/variants/disk_v2/io.rs +++ b/lib/vector-buffers/src/variants/disk_v2/io.rs @@ -1,6 +1,5 @@ use std::{io, path::Path}; -use async_trait::async_trait; use tokio::{ fs::OpenOptions, io::{AsyncRead, AsyncWrite}, @@ -22,7 +21,6 @@ impl Metadata { } /// Generalized interface for opening and deleting files from a filesystem. -#[async_trait] pub trait Filesystem: Send + Sync { type File: AsyncFile; type MemoryMap: ReadableMemoryMap; @@ -89,7 +87,6 @@ pub trait Filesystem: Send + Sync { async fn delete_file(&self, path: &Path) -> io::Result<()>; } -#[async_trait] pub trait AsyncFile: AsyncRead + AsyncWrite + Send + Sync { /// Queries metadata about the underlying file. /// @@ -128,7 +125,6 @@ pub trait WritableMemoryMap: ReadableMemoryMap { #[derive(Clone, Debug)] pub struct ProductionFilesystem; -#[async_trait] impl Filesystem for ProductionFilesystem { type File = tokio::fs::File; type MemoryMap = memmap2::Mmap; @@ -217,7 +213,6 @@ fn open_readable_file_options() -> OpenOptions { open_options } -#[async_trait] impl AsyncFile for tokio::fs::File { async fn metadata(&self) -> io::Result { let metadata = self.metadata().await?; diff --git a/lib/vector-buffers/src/variants/disk_v2/tests/mod.rs b/lib/vector-buffers/src/variants/disk_v2/tests/mod.rs index fa304951aca94..dfdf4120ddc4b 100644 --- a/lib/vector-buffers/src/variants/disk_v2/tests/mod.rs +++ b/lib/vector-buffers/src/variants/disk_v2/tests/mod.rs @@ -4,7 +4,6 @@ use std::{ sync::Arc, }; -use async_trait::async_trait; use tokio::{ fs::OpenOptions, io::{AsyncWriteExt, DuplexStream}, @@ -32,7 +31,6 @@ mod model; mod record; mod size_limits; -#[async_trait] impl AsyncFile for DuplexStream { async fn metadata(&self) -> io::Result { Ok(Metadata { len: 0 }) @@ -43,7 +41,6 @@ impl AsyncFile for DuplexStream { } } -#[async_trait] impl AsyncFile for Cursor> { async fn metadata(&self) -> io::Result { Ok(Metadata { len: 0 }) diff --git a/lib/vector-buffers/src/variants/disk_v2/tests/model/filesystem.rs b/lib/vector-buffers/src/variants/disk_v2/tests/model/filesystem.rs index db14ec92b2819..f6d5d11d5d079 100644 --- a/lib/vector-buffers/src/variants/disk_v2/tests/model/filesystem.rs +++ b/lib/vector-buffers/src/variants/disk_v2/tests/model/filesystem.rs @@ -8,7 +8,6 @@ use std::{ task::{Context, Poll}, }; -use async_trait::async_trait; use tokio::io::{AsyncRead, AsyncWrite, ReadBuf}; use crate::variants::disk_v2::{ @@ -206,7 +205,6 @@ impl AsyncWrite for TestFile { } } -#[async_trait] impl AsyncFile for TestFile { #[instrument(skip(self), level = "debug")] async fn metadata(&self) -> io::Result { @@ -304,7 +302,6 @@ impl Default for TestFilesystem { } } -#[async_trait] impl Filesystem for TestFilesystem { type File = TestFile; type MemoryMap = TestMmap; diff --git a/lib/vector-core/src/sink.rs b/lib/vector-core/src/sink.rs index a3e2e66e08c17..436ecc2cf7c01 100644 --- a/lib/vector-core/src/sink.rs +++ b/lib/vector-core/src/sink.rs @@ -1,6 +1,5 @@ use std::{fmt, iter::IntoIterator, pin::Pin}; -use async_trait::async_trait; use futures::{stream, task::Context, task::Poll, Sink, SinkExt, Stream, StreamExt}; use crate::event::{into_event_stream, Event, EventArray, EventContainer}; @@ -86,7 +85,7 @@ impl fmt::Debug for VectorSink { // === StreamSink === -#[async_trait] +#[async_trait::async_trait] pub trait StreamSink { async fn run(self: Box, input: stream::BoxStream<'_, T>) -> Result<(), ()>; } @@ -172,7 +171,7 @@ struct EventStream { sink: Box, } -#[async_trait] +#[async_trait::async_trait] impl + Send> StreamSink for EventStream { async fn run(self: Box, input: stream::BoxStream<'_, EventArray>) -> Result<(), ()> { let input = Box::pin(input.flat_map(into_event_stream)); diff --git a/src/config/enrichment_table.rs b/src/config/enrichment_table.rs index 7052b82005b68..5e2cd72a00858 100644 --- a/src/config/enrichment_table.rs +++ b/src/config/enrichment_table.rs @@ -1,4 +1,3 @@ -use async_trait::async_trait; use enum_dispatch::enum_dispatch; use vector_lib::config::GlobalOptions; use vector_lib::configurable::{configurable_component, NamedComponent}; @@ -22,7 +21,6 @@ impl EnrichmentTableOuter { } /// Generalized interface for describing and building enrichment table components. -#[async_trait] #[enum_dispatch] pub trait EnrichmentTableConfig: NamedComponent + core::fmt::Debug + Send + Sync { /// Builds the enrichment table with the given globals. diff --git a/src/config/provider.rs b/src/config/provider.rs index f2ae9595ee2c4..c2d7fbd462e62 100644 --- a/src/config/provider.rs +++ b/src/config/provider.rs @@ -1,11 +1,9 @@ -use async_trait::async_trait; use enum_dispatch::enum_dispatch; use vector_lib::configurable::NamedComponent; use crate::{providers::BuildResult, signal}; /// Generalized interface for constructing a configuration from a provider. -#[async_trait] #[enum_dispatch] pub trait ProviderConfig: NamedComponent + core::fmt::Debug + Send + Sync { /// Builds a configuration. diff --git a/src/enrichment_tables/file.rs b/src/enrichment_tables/file.rs index 5f1e63cce1214..fe932e1347614 100644 --- a/src/enrichment_tables/file.rs +++ b/src/enrichment_tables/file.rs @@ -230,7 +230,6 @@ impl FileConfig { } } -#[async_trait::async_trait] impl EnrichmentTableConfig for FileConfig { async fn build( &self, diff --git a/src/enrichment_tables/geoip.rs b/src/enrichment_tables/geoip.rs index 0b615c5671fe8..84d9da62b8aae 100644 --- a/src/enrichment_tables/geoip.rs +++ b/src/enrichment_tables/geoip.rs @@ -92,7 +92,6 @@ impl GenerateConfig for GeoipConfig { } } -#[async_trait::async_trait] impl EnrichmentTableConfig for GeoipConfig { async fn build( &self, diff --git a/src/enrichment_tables/mmdb.rs b/src/enrichment_tables/mmdb.rs index 7b63ef4650c88..d4a40197d405b 100644 --- a/src/enrichment_tables/mmdb.rs +++ b/src/enrichment_tables/mmdb.rs @@ -30,7 +30,6 @@ impl GenerateConfig for MmdbConfig { } } -#[async_trait::async_trait] impl EnrichmentTableConfig for MmdbConfig { async fn build( &self, diff --git a/src/lib.rs b/src/lib.rs index 4e710e3429385..9943ce45b68c1 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -7,6 +7,7 @@ #![deny(warnings)] #![deny(missing_docs)] #![cfg_attr(docsrs, feature(doc_cfg), deny(rustdoc::broken_intra_doc_links))] +#![allow(async_fn_in_trait)] #![allow(clippy::approx_constant)] #![allow(clippy::float_cmp)] #![allow(clippy::match_wild_err_arm)] diff --git a/src/providers/http.rs b/src/providers/http.rs index 95bce42ea1547..a41e6404395a6 100644 --- a/src/providers/http.rs +++ b/src/providers/http.rs @@ -169,7 +169,6 @@ fn poll_http( } } -#[async_trait::async_trait] impl ProviderConfig for HttpConfig { async fn build(&mut self, signal_handler: &mut signal::SignalHandler) -> BuildResult { let url = self diff --git a/src/sinks/aws_kinesis/firehose/record.rs b/src/sinks/aws_kinesis/firehose/record.rs index 0b57c55ae5d98..24704aa3b50d5 100644 --- a/src/sinks/aws_kinesis/firehose/record.rs +++ b/src/sinks/aws_kinesis/firehose/record.rs @@ -41,7 +41,6 @@ pub struct KinesisFirehoseClient { pub client: KinesisClient, } -#[async_trait::async_trait] impl SendRecord for KinesisFirehoseClient { type T = KinesisRecord; type E = KinesisError; diff --git a/src/sinks/aws_kinesis/record.rs b/src/sinks/aws_kinesis/record.rs index 3a3521f113e6d..2af9f9de59949 100644 --- a/src/sinks/aws_kinesis/record.rs +++ b/src/sinks/aws_kinesis/record.rs @@ -1,4 +1,5 @@ -use async_trait::async_trait; +use std::future::Future; + use aws_smithy_runtime_api::client::{orchestrator::HttpResponse, result::SdkError}; use bytes::Bytes; @@ -19,15 +20,14 @@ pub trait Record { } /// Capable of sending records. -#[async_trait] pub trait SendRecord { type T; type E; /// Sends the records. - async fn send( + fn send( &self, records: Vec, stream_name: String, - ) -> Result>; + ) -> impl Future>> + Send; } diff --git a/src/sinks/aws_kinesis/streams/record.rs b/src/sinks/aws_kinesis/streams/record.rs index 4bb90e3e5c963..16bf89515c277 100644 --- a/src/sinks/aws_kinesis/streams/record.rs +++ b/src/sinks/aws_kinesis/streams/record.rs @@ -51,7 +51,6 @@ pub struct KinesisStreamClient { pub client: KinesisClient, } -#[async_trait::async_trait] impl SendRecord for KinesisStreamClient { type T = KinesisRecord; type E = KinesisError; diff --git a/src/sinks/aws_s_s/client.rs b/src/sinks/aws_s_s/client.rs index ad8907aa503b0..855d7ce0e6da3 100644 --- a/src/sinks/aws_s_s/client.rs +++ b/src/sinks/aws_s_s/client.rs @@ -1,15 +1,16 @@ +use std::future::Future; + use aws_smithy_runtime_api::client::{orchestrator::HttpResponse, result::SdkError}; use super::{request_builder::SendMessageEntry, service::SendMessageResponse}; -#[async_trait::async_trait] pub(super) trait Client where R: std::fmt::Debug + std::fmt::Display + std::error::Error, { - async fn send_message( + fn send_message( &self, entry: SendMessageEntry, byte_size: usize, - ) -> Result>; + ) -> impl Future>> + Send; } diff --git a/src/sinks/aws_s_s/sns/client.rs b/src/sinks/aws_s_s/sns/client.rs index bbcacf30474e5..9890ee2a73645 100644 --- a/src/sinks/aws_s_s/sns/client.rs +++ b/src/sinks/aws_s_s/sns/client.rs @@ -16,7 +16,6 @@ impl SnsMessagePublisher { } } -#[async_trait::async_trait] impl Client for SnsMessagePublisher { async fn send_message( &self, diff --git a/src/sinks/aws_s_s/sqs/client.rs b/src/sinks/aws_s_s/sqs/client.rs index 0e107af13fb28..f50bce6d053d6 100644 --- a/src/sinks/aws_s_s/sqs/client.rs +++ b/src/sinks/aws_s_s/sqs/client.rs @@ -16,7 +16,6 @@ impl SqsMessagePublisher { } } -#[async_trait::async_trait] impl Client for SqsMessagePublisher { async fn send_message( &self, diff --git a/src/sinks/gcp/pubsub.rs b/src/sinks/gcp/pubsub.rs index 3ae8724a73c85..3421b1bab975a 100644 --- a/src/sinks/gcp/pubsub.rs +++ b/src/sinks/gcp/pubsub.rs @@ -206,7 +206,6 @@ impl HttpEventEncoder for PubSubSinkEventEncoder { } } -#[async_trait::async_trait] impl HttpSink for PubsubSink { type Input = Value; type Output = Vec; diff --git a/src/sinks/influxdb/logs.rs b/src/sinks/influxdb/logs.rs index 6707f791084eb..186c382d7d494 100644 --- a/src/sinks/influxdb/logs.rs +++ b/src/sinks/influxdb/logs.rs @@ -320,7 +320,6 @@ impl HttpEventEncoder for InfluxDbLogsEncoder { } } -#[async_trait::async_trait] impl HttpSink for InfluxDbLogsSink { type Input = BytesMut; type Output = BytesMut; diff --git a/src/sinks/mezmo.rs b/src/sinks/mezmo.rs index 3c22b7a7dded0..fe91cdd869f5e 100644 --- a/src/sinks/mezmo.rs +++ b/src/sinks/mezmo.rs @@ -295,7 +295,6 @@ impl HttpEventEncoder> for } } -#[async_trait::async_trait] impl HttpSink for MezmoConfig { type Input = PartitionInnerBuffer; type Output = PartitionInnerBuffer, PartitionKey>; diff --git a/src/sinks/util/http.rs b/src/sinks/util/http.rs index 879f8b59ca5cf..0904a67cb0468 100644 --- a/src/sinks/util/http.rs +++ b/src/sinks/util/http.rs @@ -41,14 +41,16 @@ pub trait HttpEventEncoder { fn encode_event(&mut self, event: Event) -> Option; } -#[async_trait::async_trait] pub trait HttpSink: Send + Sync + 'static { type Input; type Output; type Encoder: HttpEventEncoder; fn build_encoder(&self) -> Self::Encoder; - async fn build_request(&self, events: Self::Output) -> crate::Result>; + fn build_request( + &self, + events: Self::Output, + ) -> impl Future>> + Send; } /// Provides a simple wrapper around internal tower and diff --git a/src/sources/util/http/prelude.rs b/src/sources/util/http/prelude.rs index 516d0290a5caa..367116a96c684 100644 --- a/src/sources/util/http/prelude.rs +++ b/src/sources/util/http/prelude.rs @@ -6,7 +6,6 @@ use std::{ time::Duration, }; -use async_trait::async_trait; use bytes::Bytes; use futures::{FutureExt, TryFutureExt}; use hyper::{service::make_service_fn, Server}; @@ -45,7 +44,6 @@ use super::{ error::ErrorMessage, }; -#[async_trait] pub trait HttpSource: Clone + Send + Sync + 'static { // This function can be defined to enrich events with additional HTTP // metadata. This function should be used rather than internal enrichment so From 2e7a3ca03138896d850acdcee47e6deba6f3d29c Mon Sep 17 00:00:00 2001 From: neuronull Date: Wed, 27 Mar 2024 15:35:54 -0600 Subject: [PATCH 0188/1491] chore(ci): peg `fakeintake` docker image (#20196) --- scripts/e2e/datadog-logs/compose.yaml | 6 ++++-- scripts/e2e/datadog-metrics/compose.yaml | 6 ++++-- 2 files changed, 8 insertions(+), 4 deletions(-) diff --git a/scripts/e2e/datadog-logs/compose.yaml b/scripts/e2e/datadog-logs/compose.yaml index 4ca9633af2e4e..56fb68350c004 100644 --- a/scripts/e2e/datadog-logs/compose.yaml +++ b/scripts/e2e/datadog-logs/compose.yaml @@ -84,12 +84,14 @@ services: # Receives log data from the `datadog-agent` service. Is queried by the test runner # which does the validation of consistency with the other fakeintake service. fakeintake-agent: - image: docker.io/datadog/fakeintake:latest + # TODO: temporarily pegging the image as latest results in failures + image: docker.io/datadog/fakeintake:v77a06f2b # Receives log data from the `datadog-agent-vector` service. Is queried by the test runner # which does the validation of consistency with the other fakeintake service. fakeintake-vector: - image: docker.io/datadog/fakeintake:latest + # TODO: temporarily pegging the image as latest results in failures + image: docker.io/datadog/fakeintake:v77a06f2b networks: default: diff --git a/scripts/e2e/datadog-metrics/compose.yaml b/scripts/e2e/datadog-metrics/compose.yaml index d0860b83b71e3..5942fda1011e9 100644 --- a/scripts/e2e/datadog-metrics/compose.yaml +++ b/scripts/e2e/datadog-metrics/compose.yaml @@ -67,12 +67,14 @@ services: # Receives metric data from the `datadog-agent` service. Is queried by the test runner # which does the validation of consistency with the other fakeintake service. fakeintake-agent: - image: docker.io/datadog/fakeintake:latest + # TODO: temporarily pegging the image as latest results in failures + image: docker.io/datadog/fakeintake:v77a06f2b # Receives metric data from the `datadog-agent-vector` service. Is queried by the test runner # which does the validation of consistency with the other fakeintake service. fakeintake-vector: - image: docker.io/datadog/fakeintake:latest + # TODO: temporarily pegging the image as latest results in failures + image: docker.io/datadog/fakeintake:v77a06f2b networks: default: From 5ebcc8874d283765593bed7683bd76ec27ebcc00 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Thu, 28 Mar 2024 09:46:26 -0700 Subject: [PATCH 0189/1491] chore(ci): Drop `apt-get upgrade` (#20203) We are seeing a lot of flakiness for this upgrade lately and I'm not really sure it is necessary to be running this. Signed-off-by: Jesse Szwedko --- scripts/environment/bootstrap-ubuntu-20.04.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/scripts/environment/bootstrap-ubuntu-20.04.sh b/scripts/environment/bootstrap-ubuntu-20.04.sh index be6182e6cacd5..ee7bd6b97e40a 100755 --- a/scripts/environment/bootstrap-ubuntu-20.04.sh +++ b/scripts/environment/bootstrap-ubuntu-20.04.sh @@ -20,8 +20,6 @@ apt-get install --yes \ apt-utils \ apt-transport-https -apt-get upgrade --yes - # Deps apt-get install --yes --no-install-recommends \ awscli \ From 11c968d350d2b18337664384be87d7cba345a1c6 Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Thu, 28 Mar 2024 12:31:59 -0700 Subject: [PATCH 0190/1491] chore(ci): Remove pip install of modules (#20204) These don't seem to be used anywhere anymore. Signed-off-by: Jesse Szwedko --- scripts/environment/bootstrap-ubuntu-20.04.sh | 1 - scripts/environment/prepare.sh | 3 --- 2 files changed, 4 deletions(-) diff --git a/scripts/environment/bootstrap-ubuntu-20.04.sh b/scripts/environment/bootstrap-ubuntu-20.04.sh index ee7bd6b97e40a..7ce1541fdb348 100755 --- a/scripts/environment/bootstrap-ubuntu-20.04.sh +++ b/scripts/environment/bootstrap-ubuntu-20.04.sh @@ -39,7 +39,6 @@ apt-get install --yes --no-install-recommends \ llvm \ locales \ pkg-config \ - python3-pip \ rename \ rpm \ ruby-bundler \ diff --git a/scripts/environment/prepare.sh b/scripts/environment/prepare.sh index 3f73ec9b30844..369bee8e2cae4 100755 --- a/scripts/environment/prepare.sh +++ b/scripts/environment/prepare.sh @@ -29,8 +29,5 @@ fi sudo npm -g install markdownlint-cli@0.30 sudo npm -g install @datadog/datadog-ci -pip3 install jsonschema==3.2.0 -pip3 install remarshal==0.11.2 - # Make sure our release build settings are present. . scripts/environment/release-flags.sh From 5e3984a51695511312e7db7cfcad5dc9e90c8e84 Mon Sep 17 00:00:00 2001 From: neuronull Date: Thu, 28 Mar 2024 13:48:25 -0600 Subject: [PATCH 0191/1491] chore(docs): note for 0.37 about incorrect ddtags parsing behavior (#20186) * chore(docs): note for 0.37 about incorrect ddtags parsing behavior * generate docs * feedback jhgilbert --- src/sources/datadog_agent/mod.rs | 3 +++ .../cue/reference/components/sources/base/datadog_agent.cue | 3 +++ website/cue/reference/releases/0.37.0.cue | 6 ++++++ 3 files changed, 12 insertions(+) diff --git a/src/sources/datadog_agent/mod.rs b/src/sources/datadog_agent/mod.rs index f2097b813e8d2..f469a6b257a01 100644 --- a/src/sources/datadog_agent/mod.rs +++ b/src/sources/datadog_agent/mod.rs @@ -113,6 +113,9 @@ pub struct DatadogAgentConfig { /// If this is set to `true`, when log events contain the field `ddtags`, the string value that /// contains a list of key:value pairs set by the Agent is parsed and expanded into an object. + /// + /// Note: This setting introduced in 0.37.0 is incorrectly parsing into an object. This will be + /// fixed in 0.37.1 to parse into an array, which aligns with the Datadog intake. #[configurable(metadata(docs::advanced))] #[serde(default = "crate::serde::default_false")] parse_ddtags: bool, diff --git a/website/cue/reference/components/sources/base/datadog_agent.cue b/website/cue/reference/components/sources/base/datadog_agent.cue index 66f0027427344..48e73cc7932bc 100644 --- a/website/cue/reference/components/sources/base/datadog_agent.cue +++ b/website/cue/reference/components/sources/base/datadog_agent.cue @@ -432,6 +432,9 @@ base: components: sources: datadog_agent: configuration: { description: """ If this is set to `true`, when log events contain the field `ddtags`, the string value that contains a list of key:value pairs set by the Agent is parsed and expanded into an object. + + Note: This setting introduced in 0.37.0 is incorrectly parsing into an object. This will be + fixed in 0.37.1 to parse into an array, which aligns with the Datadog intake. """ required: false type: bool: default: false diff --git a/website/cue/reference/releases/0.37.0.cue b/website/cue/reference/releases/0.37.0.cue index 29420f87dac0c..6191e7adfbec2 100644 --- a/website/cue/reference/releases/0.37.0.cue +++ b/website/cue/reference/releases/0.37.0.cue @@ -31,6 +31,12 @@ releases: "0.37.0": { for this contribution. """ + known_issues: [ + """ + The `parse_ddtags` setting added to the `datadog_agent` Source incorrectly parses the tags into an object instead of an array. + """, + ] + changelog: [ { type: "enhancement" From 81b7b854a91bcccbde01677824aa6fed349144cf Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Thu, 28 Mar 2024 14:16:51 -0700 Subject: [PATCH 0192/1491] docs(dnstap source, releasing): Add breaking change note for dnstap source mode (#20202) * docs(dnstap source, releasing): Add breaking change note for dnstap source mode `mode` is now a required parameter with no default. Signed-off-by: Jesse Szwedko * spelling Signed-off-by: Jesse Szwedko --------- Signed-off-by: Jesse Szwedko --- .../en/highlights/2023-03-26-0-37-0-upgrade-guide.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/website/content/en/highlights/2023-03-26-0-37-0-upgrade-guide.md b/website/content/en/highlights/2023-03-26-0-37-0-upgrade-guide.md index a556c0236e2a9..cc9b211403d01 100644 --- a/website/content/en/highlights/2023-03-26-0-37-0-upgrade-guide.md +++ b/website/content/en/highlights/2023-03-26-0-37-0-upgrade-guide.md @@ -12,6 +12,7 @@ badges: Vector's 0.37.0 release includes **breaking changes**: 1. [Vector defaults to requiring non-optional environment variables](#strict-env-vars) +1. [The `dnstap` source now requires the `mode` parameter](#dnstap-mode) and **potentially impactful changes**: @@ -39,6 +40,12 @@ confused by Vector not behaving as the expected. In particular, this happens whe regex capture groups in VRL without realizing they need to be escaped as `$$1` to avoid interpolation. +#### The `dnstap` source now requires the `mode` parameter {#dnstap-mode} + +The `dnstap` source now requires the `mode` parameter with the addition of support for reading +events over a TCP socket. Set this to `unix` to continue reading from the Unix socket configured as +`socket_path`. + ### Potentially impactful changes #### `geoip` enrichment tables no longer fallback to the `GeoIP-City` type {#geoip} From 1f72366dd96ce40ba11c0fea1c044b2184dbb616 Mon Sep 17 00:00:00 2001 From: neuronull Date: Thu, 28 Mar 2024 15:32:26 -0600 Subject: [PATCH 0193/1491] fix(datadog_agent)!: align `ddtags` parsing with DD logs intake (#20184) * chore(datadog_agent): align ddtags parsed output with DD logs intake * changelog * feedback- changelog * component docs * tags reconstruct in dd logs * upgrade guide note * spell check * upgrade/changelog notes about dd logs * Revert "upgrade/changelog notes about dd logs" This reverts commit 210e0adf47ccaadf1ec4172ea909ab3ed69d921b. * Revert "spell check" This reverts commit a5ccbe11dbd47e500c5a529322742a8ca6d70c17. * Revert "tags reconstruct in dd logs" This reverts commit e4c517f07884494daa261c08a24d1b18416acac3. * feedback jesse * feedback jesse- nits --- .../dd_agent_parse_ddtags_format.breaking.md | 1 + src/sources/datadog_agent/logs.rs | 35 ++++++------------- src/sources/datadog_agent/mod.rs | 7 ++-- src/sources/datadog_agent/tests.rs | 5 +-- .../components/sources/base/datadog_agent.cue | 5 +-- 5 files changed, 16 insertions(+), 37 deletions(-) create mode 100644 changelog.d/dd_agent_parse_ddtags_format.breaking.md diff --git a/changelog.d/dd_agent_parse_ddtags_format.breaking.md b/changelog.d/dd_agent_parse_ddtags_format.breaking.md new file mode 100644 index 0000000000000..ee2a3e6e6c051 --- /dev/null +++ b/changelog.d/dd_agent_parse_ddtags_format.breaking.md @@ -0,0 +1 @@ +Previously the `datadog_agent` setting `parse_ddtags` parsed the tag string into an Object. It is now parsed into an Array of `key:value` strings, which matches the behavior of the Datadog logs backend intake. diff --git a/src/sources/datadog_agent/logs.rs b/src/sources/datadog_agent/logs.rs index 634293d4a9a79..398febe5a07b3 100644 --- a/src/sources/datadog_agent/logs.rs +++ b/src/sources/datadog_agent/logs.rs @@ -10,7 +10,6 @@ use vector_lib::json_size::JsonSize; use vector_lib::lookup::path; use vector_lib::{config::LegacyKey, EstimatedJsonEncodedSizeOf}; use vrl::core::Value; -use vrl::value::{KeyString, ObjectMap}; use warp::{filters::BoxedFilter, path as warp_path, path::FullPath, reply::Response, Filter}; use crate::{ @@ -215,35 +214,26 @@ pub(crate) fn decode_log_body( // the tag list members are separated by `,` and the // tag-value pairs are separated by `:`. // -// The output is an Object regardless of the input string. -// Bare tags are constructed as a k-v pair with a null value. +// The output is an Array regardless of the input string. fn parse_ddtags(ddtags_raw: &Bytes) -> Value { if ddtags_raw.is_empty() { - return ObjectMap::new().into(); + return Vec::::new().into(); } let ddtags_str = String::from_utf8_lossy(ddtags_raw); - // The value is a single bare tag - if !ddtags_str.contains(',') && !ddtags_str.contains(':') { - return ObjectMap::from([(KeyString::from(ddtags_str), Value::Null)]).into(); - } - // There are multiple tags, which could be either bare or pairs - let ddtags_object: ObjectMap = ddtags_str + let ddtags: Vec = ddtags_str .split(',') .filter(|kv| !kv.is_empty()) - .map(|kv| match kv.split_once(':') { - Some((k, v)) => (KeyString::from(k), Value::Bytes(Bytes::from(v.to_string()))), - None => (KeyString::from(kv), Value::Null), - }) + .map(|kv| Value::Bytes(Bytes::from(kv.trim().to_string()))) .collect(); - if ddtags_object.is_empty() && !ddtags_str.is_empty() { + if ddtags.is_empty() && !ddtags_str.is_empty() { warn!(message = "`parse_ddtags` set to true and Agent log contains non-empty ddtags string, but no tag-value pairs were parsed.") } - ddtags_object.into() + ddtags.into() } #[cfg(test)] @@ -257,7 +247,7 @@ mod tests { let raw = Bytes::from(String::from("")); let val = parse_ddtags(&raw); - assert_eq!(val, value!({})); + assert_eq!(val, value!([])); } #[test] @@ -265,7 +255,7 @@ mod tests { let raw = Bytes::from(String::from("bare")); let val = parse_ddtags(&raw); - assert_eq!(val, value!({"bare": null})); + assert_eq!(val, value!(["bare"])); } #[test] @@ -273,7 +263,7 @@ mod tests { let raw = Bytes::from(String::from("filename:driver.log")); let val = parse_ddtags(&raw); - assert_eq!(val, value!({"filename": "driver.log"})); + assert_eq!(val, value!(["filename:driver.log"])); } #[test] @@ -281,10 +271,7 @@ mod tests { let raw = Bytes::from(String::from("filename:driver.log,wizard:the_grey")); let val = parse_ddtags(&raw); - assert_eq!( - val, - value!({"filename": "driver.log", "wizard": "the_grey"}) - ); + assert_eq!(val, value!(["filename:driver.log", "wizard:the_grey"])); } #[test] @@ -294,7 +281,7 @@ mod tests { assert_eq!( val, - value!({"filename": "driver.log", "wizard": "the_grey", "debug": null}) + value!(["filename:driver.log", "debug", "wizard:the_grey"]) ); } } diff --git a/src/sources/datadog_agent/mod.rs b/src/sources/datadog_agent/mod.rs index f469a6b257a01..ef406630be6c7 100644 --- a/src/sources/datadog_agent/mod.rs +++ b/src/sources/datadog_agent/mod.rs @@ -112,10 +112,7 @@ pub struct DatadogAgentConfig { multiple_outputs: bool, /// If this is set to `true`, when log events contain the field `ddtags`, the string value that - /// contains a list of key:value pairs set by the Agent is parsed and expanded into an object. - /// - /// Note: This setting introduced in 0.37.0 is incorrectly parsing into an object. This will be - /// fixed in 0.37.1 to parse into an array, which aligns with the Datadog intake. + /// contains a list of key:value pairs set by the Agent is parsed and expanded into an array. #[configurable(metadata(docs::advanced))] #[serde(default = "crate::serde::default_false")] parse_ddtags: bool, @@ -281,7 +278,7 @@ impl SourceConfig for DatadogAgentConfig { Some(LegacyKey::InsertIfEmpty(owned_value_path!("ddtags"))), &owned_value_path!("ddtags"), if self.parse_ddtags { - Kind::object(Collection::empty().with_unknown(Kind::bytes())).or_undefined() + Kind::array(Collection::empty().with_unknown(Kind::bytes())).or_undefined() } else { Kind::bytes() }, diff --git a/src/sources/datadog_agent/tests.rs b/src/sources/datadog_agent/tests.rs index cee767ec9d418..9fcbdd6a9fcf9 100644 --- a/src/sources/datadog_agent/tests.rs +++ b/src/sources/datadog_agent/tests.rs @@ -175,10 +175,7 @@ fn test_decode_log_body_parse_ddtags() { assert_eq!(log["service"], log_msg.service.into()); assert_eq!(log["ddsource"], log_msg.ddsource.into()); - assert_eq!( - log["ddtags"], - value!({"env": "staging", "wizard": "the_grey"}) - ); + assert_eq!(log["ddtags"], value!(["wizard:the_grey", "env:staging"])); } #[test] diff --git a/website/cue/reference/components/sources/base/datadog_agent.cue b/website/cue/reference/components/sources/base/datadog_agent.cue index 48e73cc7932bc..ba8c42768b138 100644 --- a/website/cue/reference/components/sources/base/datadog_agent.cue +++ b/website/cue/reference/components/sources/base/datadog_agent.cue @@ -431,10 +431,7 @@ base: components: sources: datadog_agent: configuration: { parse_ddtags: { description: """ If this is set to `true`, when log events contain the field `ddtags`, the string value that - contains a list of key:value pairs set by the Agent is parsed and expanded into an object. - - Note: This setting introduced in 0.37.0 is incorrectly parsing into an object. This will be - fixed in 0.37.1 to parse into an array, which aligns with the Datadog intake. + contains a list of key:value pairs set by the Agent is parsed and expanded into an array. """ required: false type: bool: default: false From c7f0a85fbfc6bdcf17c5a3bf1ad571c80731b701 Mon Sep 17 00:00:00 2001 From: neuronull Date: Thu, 28 Mar 2024 15:32:39 -0600 Subject: [PATCH 0194/1491] chore(testing): support LogNamespace in Component Validation Framework (#20148) * chore(testing): support LogNamespace in Component Validation Framework * check events * feedback bruce- helper struct * feedback bruce- convert to bool later * feedback bruce- syntax nits * typo * feedback bruce- nits --- src/components/validation/mod.rs | 14 + src/components/validation/resources/http.rs | 260 +++++++++--------- src/components/validation/resources/mod.rs | 39 ++- src/components/validation/runner/config.rs | 22 +- src/components/validation/runner/mod.rs | 4 +- .../validators/component_spec/mod.rs | 5 +- src/sinks/datadog/logs/config.rs | 6 +- src/sinks/http/config.rs | 2 + src/sinks/splunk_hec/logs/config.rs | 6 +- src/sources/datadog_agent/tests.rs | 3 + src/sources/http_client/tests.rs | 3 + src/sources/http_server.rs | 3 + src/sources/splunk_hec/mod.rs | 2 + src/sources/vector/mod.rs | 2 +- 14 files changed, 211 insertions(+), 160 deletions(-) diff --git a/src/components/validation/mod.rs b/src/components/validation/mod.rs index e8126ba4159be..90fc349f0a1ef 100644 --- a/src/components/validation/mod.rs +++ b/src/components/validation/mod.rs @@ -6,6 +6,8 @@ mod test_case; pub mod util; mod validators; +use vector_lib::config::LogNamespace; + use crate::config::{BoxedSink, BoxedSource, BoxedTransform}; /// For components implementing `ValidatableComponent` @@ -125,42 +127,49 @@ pub struct ValidationConfiguration { /// There may be only one `ComponentTestCaseConfig` necessary to execute all test cases, but some cases /// require more advanced configuration in order to hit the code path desired. component_configurations: Vec, + log_namespace: LogNamespace, } impl ValidationConfiguration { /// Creates a new `ValidationConfiguration` for a source. pub fn from_source( component_name: &'static str, + log_namespace: LogNamespace, component_configurations: Vec, ) -> Self { Self { component_name, component_type: ComponentType::Source, component_configurations, + log_namespace, } } /// Creates a new `ValidationConfiguration` for a transform. pub fn from_transform( component_name: &'static str, + log_namespace: LogNamespace, component_configurations: Vec, ) -> Self { Self { component_name, component_type: ComponentType::Transform, component_configurations, + log_namespace, } } /// Creates a new `ValidationConfiguration` for a sink. pub fn from_sink( component_name: &'static str, + log_namespace: LogNamespace, component_configurations: Vec, ) -> Self { Self { component_name, component_type: ComponentType::Sink, component_configurations, + log_namespace, } } @@ -179,6 +188,11 @@ impl ValidationConfiguration { self.component_configurations.clone() } + /// Gets the LogNamespace that the component is using. + pub const fn log_namespace(&self) -> LogNamespace { + self.log_namespace + } + fn get_comp_test_case(&self, test_case: Option<&String>) -> Option { let empty = String::from(""); let test_case = test_case.unwrap_or(&empty); diff --git a/src/components/validation/resources/http.rs b/src/components/validation/resources/http.rs index 36f3a67e36619..2bfc04deba62f 100644 --- a/src/components/validation/resources/http.rs +++ b/src/components/validation/resources/http.rs @@ -26,7 +26,8 @@ use crate::components::validation::{ }; use vector_lib::{ codecs::encoding::Framer, codecs::encoding::Serializer::Json, - codecs::CharacterDelimitedEncoder, event::Event, EstimatedJsonEncodedSizeOf, + codecs::CharacterDelimitedEncoder, config::LogNamespace, event::Event, + EstimatedJsonEncodedSizeOf, }; use super::{encode_test_event, ResourceCodec, ResourceDirection, TestEvent}; @@ -73,32 +74,12 @@ impl HttpResourceConfig { } } - pub fn spawn_as_output( - self, - direction: ResourceDirection, - codec: ResourceCodec, - output_tx: mpsc::Sender>, - task_coordinator: &TaskCoordinator, - input_events: Vec, - runner_metrics: &Arc>, - ) -> vector_lib::Result<()> { - match direction { + pub fn spawn_as_output(self, ctx: HttpResourceOutputContext) -> vector_lib::Result<()> { + match ctx.direction { // We'll pull data from the sink. - ResourceDirection::Pull => Ok(spawn_output_http_client( - self, - codec, - output_tx, - task_coordinator, - )), + ResourceDirection::Pull => Ok(ctx.spawn_output_http_client(self)), // The sink will push data to us. - ResourceDirection::Push => spawn_output_http_server( - self, - codec, - output_tx, - task_coordinator, - input_events, - runner_metrics, - ), + ResourceDirection::Push => ctx.spawn_output_http_server(self), } } } @@ -297,127 +278,141 @@ fn spawn_input_http_client( }); } -/// Spawns an HTTP server that accepts events sent by a sink. -#[allow(clippy::missing_const_for_fn)] -fn spawn_output_http_server( - config: HttpResourceConfig, - codec: ResourceCodec, - output_tx: mpsc::Sender>, - task_coordinator: &TaskCoordinator, - input_events: Vec, - runner_metrics: &Arc>, -) -> vector_lib::Result<()> { - // This HTTP server will wait for events to be sent by a sink, and collect them and send them on - // via an output sender. We accept/collect events until we're told to shutdown. - - // First, we'll build and spawn our HTTP server. - let decoder = codec.into_decoder()?; +/// Anything that the output side HTTP external resource needs +pub struct HttpResourceOutputContext<'a> { + pub direction: ResourceDirection, + pub codec: ResourceCodec, + pub output_tx: mpsc::Sender>, + pub task_coordinator: &'a TaskCoordinator, + pub input_events: Vec, + pub runner_metrics: &'a Arc>, + pub log_namespace: LogNamespace, +} - // Note that we currently don't differentiate which events should and shouldn't be rejected- - // we reject all events in this server if any are marked for rejection. - // In the future it might be useful to be able to select which to reject. That will involve - // adding logic to the test case which is passed down here, and to the event itself. Since - // we can't guarantee the order of events, we'd need a way to flag which ones need to be - // rejected. - let should_reject = input_events.iter().filter(|te| te.should_reject()).count() > 0; +impl HttpResourceOutputContext<'_> { + /// Spawns an HTTP server that accepts events sent by a sink. + #[allow(clippy::missing_const_for_fn)] + fn spawn_output_http_server(&self, config: HttpResourceConfig) -> vector_lib::Result<()> { + // This HTTP server will wait for events to be sent by a sink, and collect them and send them on + // via an output sender. We accept/collect events until we're told to shutdown. + + // First, we'll build and spawn our HTTP server. + let decoder = self.codec.into_decoder(self.log_namespace)?; + + // Note that we currently don't differentiate which events should and shouldn't be rejected- + // we reject all events in this server if any are marked for rejection. + // In the future it might be useful to be able to select which to reject. That will involve + // adding logic to the test case which is passed down here, and to the event itself. Since + // we can't guarantee the order of events, we'd need a way to flag which ones need to be + // rejected. + let should_reject = self + .input_events + .iter() + .filter(|te| te.should_reject()) + .count() + > 0; + + let output_tx = self.output_tx.clone(); + let (_, http_server_shutdown_tx) = spawn_http_server( + self.task_coordinator, + &config, + self.runner_metrics, + move |request, output_runner_metrics| { + let output_tx = output_tx.clone(); + let mut decoder = decoder.clone(); - let (_, http_server_shutdown_tx) = spawn_http_server( - task_coordinator, - &config, - runner_metrics, - move |request, output_runner_metrics| { - let output_tx = output_tx.clone(); - let mut decoder = decoder.clone(); - - async move { - match hyper::body::to_bytes(request.into_body()).await { - Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), - Ok(body) => { - let mut body = BytesMut::from(&body[..]); - loop { - match decoder.decode_eof(&mut body) { - Ok(Some((events, byte_size))) => { - if should_reject { - info!("HTTP server external output resource decoded {byte_size} bytes but test case configured to reject."); - } else { - let mut output_runner_metrics = - output_runner_metrics.lock().await; - info!("HTTP server external output resource decoded {byte_size} bytes."); - - // Update the runner metrics for the received events. This will later - // be used in the Validators, as the "expected" case. - output_runner_metrics.received_bytes_total += - byte_size as u64; - - output_runner_metrics.received_events_total += - events.len() as u64; - - events.iter().for_each(|event| { - output_runner_metrics.received_event_bytes_total += - event.estimated_json_encoded_size_of().get() as u64; - }); - - output_tx - .send(events.to_vec()) - .await - .expect("should not fail to send output event"); + async move { + match hyper::body::to_bytes(request.into_body()).await { + Err(_) => StatusCode::INTERNAL_SERVER_ERROR.into_response(), + Ok(body) => { + let mut body = BytesMut::from(&body[..]); + loop { + match decoder.decode_eof(&mut body) { + Ok(Some((events, byte_size))) => { + if should_reject { + info!("HTTP server external output resource decoded {byte_size} bytes but test case configured to reject."); + } else { + let mut output_runner_metrics = + output_runner_metrics.lock().await; + info!("HTTP server external output resource decoded {byte_size} bytes."); + + // Update the runner metrics for the received events. This will later + // be used in the Validators, as the "expected" case. + output_runner_metrics.received_bytes_total += + byte_size as u64; + + output_runner_metrics.received_events_total += + events.len() as u64; + + events.iter().for_each(|event| { + output_runner_metrics.received_event_bytes_total += + event.estimated_json_encoded_size_of().get() + as u64; + }); + + output_tx + .send(events.to_vec()) + .await + .expect("should not fail to send output event"); + } } - } - Ok(None) => { - if should_reject { - // This status code is not retried and should result in the component under test - // emitting error events - return StatusCode::BAD_REQUEST.into_response(); - } else { - return StatusCode::OK.into_response(); + Ok(None) => { + if should_reject { + // This status code is not retried and should result in the component under test + // emitting error events + return StatusCode::BAD_REQUEST.into_response(); + } else { + return StatusCode::OK.into_response(); + } + } + Err(_) => { + error!( + "HTTP server failed to decode {:?}", + String::from_utf8_lossy(&body) + ); + return StatusCode::INTERNAL_SERVER_ERROR.into_response(); } } - Err(_) => return StatusCode::INTERNAL_SERVER_ERROR.into_response(), } } } } - } - }, - ); + }, + ); - // Now we'll create and spawn the resource's core logic loop which simply waits for the runner - // to instruct us to shutdown, and when that happens, cascades to shutting down the HTTP server. - let resource_started = task_coordinator.track_started(); - let resource_completed = task_coordinator.track_completed(); - let mut resource_shutdown_rx = task_coordinator.register_for_shutdown(); + // Now we'll create and spawn the resource's core logic loop which simply waits for the runner + // to instruct us to shutdown, and when that happens, cascades to shutting down the HTTP server. + let resource_started = self.task_coordinator.track_started(); + let resource_completed = self.task_coordinator.track_completed(); + let mut resource_shutdown_rx = self.task_coordinator.register_for_shutdown(); - tokio::spawn(async move { - resource_started.mark_as_done(); - info!("HTTP server external output resource started."); + tokio::spawn(async move { + resource_started.mark_as_done(); + info!("HTTP server external output resource started."); - // Wait for the runner to tell us to shutdown - resource_shutdown_rx.wait().await; + // Wait for the runner to tell us to shutdown + resource_shutdown_rx.wait().await; - // signal the server to shutdown - let _ = http_server_shutdown_tx.send(()); + // signal the server to shutdown + let _ = http_server_shutdown_tx.send(()); - // mark ourselves as done - resource_completed.mark_as_done(); + // mark ourselves as done + resource_completed.mark_as_done(); - info!("HTTP server external output resource completed."); - }); + info!("HTTP server external output resource completed."); + }); - Ok(()) -} + Ok(()) + } -/// Spawns an HTTP client that pulls events by making requests to an HTTP server driven by a sink. -#[allow(clippy::missing_const_for_fn)] -fn spawn_output_http_client( - _config: HttpResourceConfig, - _codec: ResourceCodec, - _output_tx: mpsc::Sender>, - _task_coordinator: &TaskCoordinator, -) { - // TODO: The `prometheus_exporter` sink is the only sink that exposes an HTTP server which must be - // scraped... but since we need special logic to aggregate/deduplicate scraped metrics, we can't - // use this generically for that purpose. - todo!() + /// Spawns an HTTP client that pulls events by making requests to an HTTP server driven by a sink. + #[allow(clippy::missing_const_for_fn)] + fn spawn_output_http_client(&self, _config: HttpResourceConfig) { + // TODO: The `prometheus_exporter` sink is the only sink that exposes an HTTP server which must be + // scraped... but since we need special logic to aggregate/deduplicate scraped metrics, we can't + // use this generically for that purpose. + todo!() + } } fn spawn_http_server( @@ -490,7 +485,12 @@ where } }); - let router = Router::new().route(&request_path, method_router); + let router = Router::new().route(&request_path, method_router).fallback( + |req: Request| async move { + error!(?req, "Component sent request the server could not route."); + StatusCode::NOT_FOUND + }, + ); // Now actually run/drive the HTTP server and process requests until we're told to shutdown. http_server_started.mark_as_done(); diff --git a/src/components/validation/resources/mod.rs b/src/components/validation/resources/mod.rs index c605d182d8a16..82d5ea2b906be 100644 --- a/src/components/validation/resources/mod.rs +++ b/src/components/validation/resources/mod.rs @@ -4,12 +4,16 @@ mod http; use std::sync::Arc; use tokio::sync::{mpsc, Mutex}; -use vector_lib::codecs::{ - decoding::{self, DeserializerConfig}, - encoding::{ - self, Framer, FramingConfig, JsonSerializerConfig, SerializerConfig, TextSerializerConfig, +use vector_lib::{ + codecs::{ + decoding::{self, DeserializerConfig}, + encoding::{ + self, Framer, FramingConfig, JsonSerializerConfig, SerializerConfig, + TextSerializerConfig, + }, + BytesEncoder, }, - BytesEncoder, + config::LogNamespace, }; use vector_lib::{config::DataType, event::Event}; @@ -17,6 +21,7 @@ use crate::codecs::{Decoder, DecodingConfig, Encoder, EncodingConfig, EncodingCo pub use self::event::{encode_test_event, TestEvent}; pub use self::http::HttpResourceConfig; +use self::http::HttpResourceOutputContext; use super::{ sync::{Configuring, TaskCoordinator}, @@ -101,7 +106,7 @@ impl ResourceCodec { /// /// The decoder is generated as an inverse to the input codec: if an encoding configuration was /// given, we generate a decoder that satisfies that encoding configuration, and vice versa. - pub fn into_decoder(&self) -> vector_lib::Result { + pub fn into_decoder(&self, log_namespace: LogNamespace) -> vector_lib::Result { let (framer, deserializer) = match self { Self::Decoding(config) => return config.build(), Self::Encoding(config) => ( @@ -118,7 +123,7 @@ impl ResourceCodec { } }; - Ok(Decoder::new(framer, deserializer)) + Ok(Decoder::new(framer, deserializer).with_log_namespace(log_namespace)) } } @@ -350,16 +355,20 @@ impl ExternalResource { task_coordinator: &TaskCoordinator, input_events: Vec, runner_metrics: &Arc>, + log_namespace: LogNamespace, ) -> vector_lib::Result<()> { match self.definition { - ResourceDefinition::Http(http_config) => http_config.spawn_as_output( - self.direction, - self.codec, - output_tx, - task_coordinator, - input_events, - runner_metrics, - ), + ResourceDefinition::Http(http_config) => { + http_config.spawn_as_output(HttpResourceOutputContext { + direction: self.direction, + codec: self.codec, + output_tx, + task_coordinator, + input_events, + runner_metrics, + log_namespace, + }) + } } } } diff --git a/src/components/validation/runner/config.rs b/src/components/validation/runner/config.rs index f2adb90765ce1..5f70f56670e99 100644 --- a/src/components/validation/runner/config.rs +++ b/src/components/validation/runner/config.rs @@ -1,3 +1,5 @@ +use vector_lib::config::LogNamespace; + use crate::{ components::validation::{ component_names::*, @@ -34,6 +36,7 @@ impl TopologyBuilder { "No test case name defined for configuration {:?}.", config_name ))?; + Ok(match component_configuration { ComponentConfiguration::Source(source) => { debug_assert_eq!(configuration.component_type(), ComponentType::Source); @@ -41,11 +44,11 @@ impl TopologyBuilder { } ComponentConfiguration::Transform(transform) => { debug_assert_eq!(configuration.component_type(), ComponentType::Transform); - Self::from_transform(transform) + Self::from_transform(transform, configuration.log_namespace) } ComponentConfiguration::Sink(sink) => { debug_assert_eq!(configuration.component_type(), ComponentType::Sink); - Self::from_sink(sink) + Self::from_sink(sink, configuration.log_namespace) } }) } @@ -65,8 +68,8 @@ impl TopologyBuilder { } } - fn from_transform(transform: BoxedTransform) -> Self { - let (input_edge, input_source) = build_input_edge(); + fn from_transform(transform: BoxedTransform, log_namespace: LogNamespace) -> Self { + let (input_edge, input_source) = build_input_edge(log_namespace); let (output_edge, output_sink) = build_output_edge(); let mut config_builder = ConfigBuilder::default(); @@ -81,8 +84,8 @@ impl TopologyBuilder { } } - fn from_sink(sink: BoxedSink) -> Self { - let (input_edge, input_source) = build_input_edge(); + fn from_sink(sink: BoxedSink, log_namespace: LogNamespace) -> Self { + let (input_edge, input_source) = build_input_edge(log_namespace); let mut config_builder = ConfigBuilder::default(); config_builder.add_source(TEST_INPUT_SOURCE_NAME, input_source); @@ -123,11 +126,14 @@ impl TopologyBuilder { } } -fn build_input_edge() -> (InputEdge, impl Into) { +fn build_input_edge(log_namespace: LogNamespace) -> (InputEdge, impl Into) { let input_listen_addr = GrpcAddress::from(next_addr()); debug!(listen_addr = %input_listen_addr, "Creating controlled input edge."); - let input_source = VectorSourceConfig::from_address(input_listen_addr.as_socket_addr()); + let mut input_source = VectorSourceConfig::from_address(input_listen_addr.as_socket_addr()); + + input_source.log_namespace = Some(log_namespace == LogNamespace::Vector); + let input_edge = InputEdge::from_address(input_listen_addr); (input_edge, input_source) diff --git a/src/components/validation/runner/mod.rs b/src/components/validation/runner/mod.rs index 5df689267a8a7..8c5937bbf86cd 100644 --- a/src/components/validation/runner/mod.rs +++ b/src/components/validation/runner/mod.rs @@ -322,6 +322,7 @@ impl Runner { &runner_metrics, maybe_runner_encoder.as_ref().cloned(), self.configuration.component_type, + self.configuration.log_namespace(), ); // the number of events we expect to receive from the output. @@ -498,6 +499,7 @@ fn build_external_resource( output_task_coordinator, test_case.events.clone(), runner_metrics, + configuration.log_namespace(), )?; Ok(( @@ -567,10 +569,10 @@ fn spawn_input_driver( runner_metrics: &Arc>, mut maybe_encoder: Option>, component_type: ComponentType, + log_namespace: LogNamespace, ) -> JoinHandle<()> { let input_runner_metrics = Arc::clone(runner_metrics); - let log_namespace = LogNamespace::Legacy; let now = Utc::now(); tokio::spawn(async move { diff --git a/src/components/validation/validators/component_spec/mod.rs b/src/components/validation/validators/component_spec/mod.rs index 855a7633a9b71..98db84a1ce6dd 100644 --- a/src/components/validation/validators/component_spec/mod.rs +++ b/src/components/validation/validators/component_spec/mod.rs @@ -281,7 +281,7 @@ fn compare_actual_to_expected( let actual = sum_counters(metric_type, &metrics)?; - info!("{}: expected {}, actual {}.", metric_type, expected, actual,); + info!("{metric_type}: expected {expected}, actual {actual}."); if actual != expected && // This is a bit messy. The issue is that EstimatedJsonSizeOf can be called by a component @@ -293,8 +293,7 @@ fn compare_actual_to_expected( || (actual != (expected + (expect_received_events * 2)))) { errs.push(format!( - "{}: expected {}, but received {}", - metric_type, expected, actual + "{metric_type}: expected {expected}, actual {actual}", )); } diff --git a/src/sinks/datadog/logs/config.rs b/src/sinks/datadog/logs/config.rs index a0aa0856c5b96..3f83f5733fe55 100644 --- a/src/sinks/datadog/logs/config.rs +++ b/src/sinks/datadog/logs/config.rs @@ -185,7 +185,10 @@ mod test { use super::*; use crate::codecs::EncodingConfigWithFraming; use crate::components::validation::prelude::*; - use vector_lib::codecs::{JsonSerializerConfig, MetricTagValues}; + use vector_lib::{ + codecs::{JsonSerializerConfig, MetricTagValues}, + config::LogNamespace, + }; #[test] fn generate_config() { @@ -223,6 +226,7 @@ mod test { ValidationConfiguration::from_sink( Self::NAME, + LogNamespace::Legacy, vec![ComponentTestCaseConfig::from_sink( config, None, diff --git a/src/sinks/http/config.rs b/src/sinks/http/config.rs index 6c00c82ea05f0..e025ab81b5a84 100644 --- a/src/sinks/http/config.rs +++ b/src/sinks/http/config.rs @@ -315,6 +315,7 @@ mod tests { fn validation_configuration() -> ValidationConfiguration { use std::str::FromStr; use vector_lib::codecs::{JsonSerializerConfig, MetricTagValues}; + use vector_lib::config::LogNamespace; let config = HttpSinkConfig { uri: UriSerde::from_str("http://127.0.0.1:9000/endpoint") @@ -344,6 +345,7 @@ mod tests { ValidationConfiguration::from_sink( Self::NAME, + LogNamespace::Legacy, vec![ComponentTestCaseConfig::from_sink( config, None, diff --git a/src/sinks/splunk_hec/logs/config.rs b/src/sinks/splunk_hec/logs/config.rs index 9735ef9382984..31a6d2b5d7931 100644 --- a/src/sinks/splunk_hec/logs/config.rs +++ b/src/sinks/splunk_hec/logs/config.rs @@ -284,7 +284,10 @@ impl HecLogsSinkConfig { mod tests { use super::*; use crate::components::validation::prelude::*; - use vector_lib::codecs::{JsonSerializerConfig, MetricTagValues}; + use vector_lib::{ + codecs::{JsonSerializerConfig, MetricTagValues}, + config::LogNamespace, + }; #[test] fn generate_config() { @@ -341,6 +344,7 @@ mod tests { ValidationConfiguration::from_sink( Self::NAME, + LogNamespace::Legacy, vec![ComponentTestCaseConfig::from_sink( config, None, diff --git a/src/sources/datadog_agent/tests.rs b/src/sources/datadog_agent/tests.rs index 9fcbdd6a9fcf9..fb081bee06525 100644 --- a/src/sources/datadog_agent/tests.rs +++ b/src/sources/datadog_agent/tests.rs @@ -2529,6 +2529,8 @@ impl ValidatableComponent for DatadogAgentConfig { keepalive: Default::default(), }; + let log_namespace: LogNamespace = config.log_namespace.unwrap_or_default().into(); + // TODO set up separate test cases for metrics and traces endpoints let logs_addr = format!("http://{}/api/v2/logs", config.address); @@ -2548,6 +2550,7 @@ impl ValidatableComponent for DatadogAgentConfig { ValidationConfiguration::from_source( Self::NAME, + log_namespace, vec![ComponentTestCaseConfig::from_source( config, None, diff --git a/src/sources/http_client/tests.rs b/src/sources/http_client/tests.rs index b834089a6d96c..1021c5f0cdc96 100644 --- a/src/sources/http_client/tests.rs +++ b/src/sources/http_client/tests.rs @@ -1,6 +1,7 @@ use http::Uri; use std::collections::HashMap; use tokio::time::Duration; +use vector_lib::config::LogNamespace; use warp::{http::HeaderMap, Filter}; use crate::components::validation::prelude::*; @@ -49,6 +50,7 @@ impl ValidatableComponent for HttpClientConfig { decoding: DeserializerConfig::Json(Default::default()), ..Default::default() }; + let log_namespace: LogNamespace = config.log_namespace.unwrap_or_default().into(); let external_resource = ExternalResource::new( ResourceDirection::Pull, @@ -58,6 +60,7 @@ impl ValidatableComponent for HttpClientConfig { ValidationConfiguration::from_source( Self::NAME, + log_namespace, vec![ComponentTestCaseConfig::from_source( config, None, diff --git a/src/sources/http_server.rs b/src/sources/http_server.rs index 467dd20a97a7b..df482a0a1443c 100644 --- a/src/sources/http_server.rs +++ b/src/sources/http_server.rs @@ -1529,6 +1529,8 @@ mod tests { ..Default::default() }; + let log_namespace: LogNamespace = config.log_namespace.unwrap_or(false).into(); + let listen_addr_http = format!("http://{}/", config.address); let uri = Uri::try_from(&listen_addr_http).expect("should not fail to parse URI"); @@ -1542,6 +1544,7 @@ mod tests { ValidationConfiguration::from_source( Self::NAME, + log_namespace, vec![ComponentTestCaseConfig::from_source( config, None, diff --git a/src/sources/splunk_hec/mod.rs b/src/sources/splunk_hec/mod.rs index 31493a2d9eb85..e28d71a96df17 100644 --- a/src/sources/splunk_hec/mod.rs +++ b/src/sources/splunk_hec/mod.rs @@ -2623,6 +2623,7 @@ mod tests { let listen_addr_http = format!("http://{}/services/collector/event", config.address); let uri = Uri::try_from(&listen_addr_http).expect("should not fail to parse URI"); + let log_namespace: LogNamespace = config.log_namespace.unwrap_or_default().into(); let framing = BytesDecoderConfig::new().into(); let decoding = DeserializerConfig::Json(Default::default()); @@ -2637,6 +2638,7 @@ mod tests { ValidationConfiguration::from_source( Self::NAME, + log_namespace, vec![ComponentTestCaseConfig::from_source( config, None, diff --git a/src/sources/vector/mod.rs b/src/sources/vector/mod.rs index edaa3ceddf3f4..3320ee54d221f 100644 --- a/src/sources/vector/mod.rs +++ b/src/sources/vector/mod.rs @@ -137,7 +137,7 @@ pub struct VectorConfig { /// The namespace to use for logs. This overrides the global setting. #[serde(default)] #[configurable(metadata(docs::hidden))] - log_namespace: Option, + pub log_namespace: Option, } impl VectorConfig { From 742b883b5881b1b1f88d01c023c277b293500ee3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 18:49:53 -0700 Subject: [PATCH 0195/1491] chore(deps): Bump temp-dir from 0.1.12 to 0.1.13 (#20151) Bumps [temp-dir](https://gitlab.com/leonhard-llc/ops) from 0.1.12 to 0.1.13. - [Commits](https://gitlab.com/leonhard-llc/ops/compare/temp-dir-v0.1.12...temp-dir-v0.1.13) --- updated-dependencies: - dependency-name: temp-dir dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- lib/vector-buffers/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index efe23a60a1ce8..08c9b76172102 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8929,9 +8929,9 @@ dependencies = [ [[package]] name = "temp-dir" -version = "0.1.12" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dd16aa9ffe15fe021c6ee3766772132c6e98dfa395a167e16864f61a9cfb71d6" +checksum = "1f227968ec00f0e5322f9b8173c7a0cbcff6181a0a5b28e9892491c286277231" [[package]] name = "tempfile" diff --git a/lib/vector-buffers/Cargo.toml b/lib/vector-buffers/Cargo.toml index cb9f81c4e4d67..4b477d6932fe5 100644 --- a/lib/vector-buffers/Cargo.toml +++ b/lib/vector-buffers/Cargo.toml @@ -45,7 +45,7 @@ proptest = "1.4" quickcheck = "1.0" rand = "0.8.5" serde_yaml = { version = "0.9", default-features = false } -temp-dir = "0.1.12" +temp-dir = "0.1.13" tokio-test = "0.4.4" tracing-fluent-assertions = { version = "0.3" } tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt", "registry", "std", "ansi"] } From cf61b90a8cb6c80a541fad0689867f8ae55bae5a Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 28 Mar 2024 18:49:57 -0700 Subject: [PATCH 0196/1491] chore(deps): Bump bollard from 0.16.0 to 0.16.1 (#20158) Bumps [bollard](https://github.com/fussybeaver/bollard) from 0.16.0 to 0.16.1. - [Release notes](https://github.com/fussybeaver/bollard/releases) - [Commits](https://github.com/fussybeaver/bollard/compare/v0.16.0...v0.16.1) --- updated-dependencies: - dependency-name: bollard dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 6 +++--- Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 08c9b76172102..2c7a8fa6126f2 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1539,11 +1539,11 @@ dependencies = [ [[package]] name = "bollard" -version = "0.16.0" +version = "0.16.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "83545367eb6428eb35c29cdec3a1f350fa8d6d9085d59a7d7bcb637f2e38db5a" +checksum = "0aed08d3adb6ebe0eff737115056652670ae290f177759aac19c30456135f94c" dependencies = [ - "base64 0.21.7", + "base64 0.22.0", "bollard-stubs", "bytes 1.5.0", "chrono", diff --git a/Cargo.toml b/Cargo.toml index 05196713e64ea..b301642f08d59 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -259,7 +259,7 @@ apache-avro = { version = "0.16.0", default-features = false, optional = true } axum = { version = "0.6.20", default-features = false } base64 = { version = "0.22.0", default-features = false, optional = true } bloomy = { version = "1.2.0", default-features = false, optional = true } -bollard = { version = "0.16.0", default-features = false, features = ["ssl", "chrono"], optional = true } +bollard = { version = "0.16.1", default-features = false, features = ["ssl", "chrono"], optional = true } bytes = { version = "1.5.0", default-features = false, features = ["serde"] } bytesize = { version = "1.3.0", default-features = false } chrono.workspace = true From 3c4eb68abb16c81fa7170926256785433b6a3553 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 01:50:02 +0000 Subject: [PATCH 0197/1491] chore(deps): Bump indoc from 2.0.4 to 2.0.5 (#20159) Bumps [indoc](https://github.com/dtolnay/indoc) from 2.0.4 to 2.0.5. - [Release notes](https://github.com/dtolnay/indoc/releases) - [Commits](https://github.com/dtolnay/indoc/compare/2.0.4...2.0.5) --- updated-dependencies: - dependency-name: indoc dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- lib/k8s-e2e-tests/Cargo.toml | 2 +- lib/vector-api-client/Cargo.toml | 2 +- 4 files changed, 5 insertions(+), 5 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 2c7a8fa6126f2..e2f5b035d4cb1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4486,9 +4486,9 @@ dependencies = [ [[package]] name = "indoc" -version = "2.0.4" +version = "2.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" +checksum = "b248f5224d1d606005e02c97f5aa4e88eeb230488bcc03bc9ca4d7991399f2b5" [[package]] name = "infer" diff --git a/Cargo.toml b/Cargo.toml index b301642f08d59..b77258ee23cdc 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -291,7 +291,7 @@ hyper-openssl = { version = "0.9.2", default-features = false } hyper-proxy = { version = "0.9.1", default-features = false, features = ["openssl-tls"] } indexmap.workspace = true infer = { version = "0.15.0", default-features = false, optional = true} -indoc = { version = "2.0.4", default-features = false } +indoc = { version = "2.0.5", default-features = false } inventory = { version = "0.3.15", default-features = false } ipnet = { version = "2", default-features = false, optional = true, features = ["serde", "std"] } itertools = { version = "0.12.1", default-features = false, optional = false, features = ["use_alloc"] } diff --git a/lib/k8s-e2e-tests/Cargo.toml b/lib/k8s-e2e-tests/Cargo.toml index 7fd257e2170c7..d9e389312a8e0 100644 --- a/lib/k8s-e2e-tests/Cargo.toml +++ b/lib/k8s-e2e-tests/Cargo.toml @@ -15,7 +15,7 @@ regex = "1" reqwest = { version = "0.11.26", features = ["json"] } serde_json.workspace = true tokio = { version = "1.36.0", features = ["full"] } -indoc = "2.0.4" +indoc = "2.0.5" env_logger = "0.10" tracing = { version = "0.1", features = ["log"] } rand = "0.8" diff --git a/lib/vector-api-client/Cargo.toml b/lib/vector-api-client/Cargo.toml index 6fb4678e25202..3ee0cf8d8fc7c 100644 --- a/lib/vector-api-client/Cargo.toml +++ b/lib/vector-api-client/Cargo.toml @@ -32,4 +32,4 @@ chrono.workspace = true clap.workspace = true url = { version = "2.5.0", default-features = false } uuid = { version = "1", default-features = false, features = ["serde", "v4"] } -indoc = { version = "2.0.4", default-features = false } +indoc = { version = "2.0.5", default-features = false } From 461597721247195853910c3b3d7cba9a8a16b3cc Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 01:50:06 +0000 Subject: [PATCH 0198/1491] chore(deps): Bump indexmap from 2.2.5 to 2.2.6 (#20161) Bumps [indexmap](https://github.com/indexmap-rs/indexmap) from 2.2.5 to 2.2.6. - [Changelog](https://github.com/indexmap-rs/indexmap/blob/master/RELEASES.md) - [Commits](https://github.com/indexmap-rs/indexmap/compare/2.2.5...2.2.6) --- updated-dependencies: - dependency-name: indexmap dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 42 +++++++++++++++++++------------------- Cargo.toml | 2 +- lib/file-source/Cargo.toml | 2 +- 3 files changed, 23 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e2f5b035d4cb1..c6b3e7cde6385 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -441,7 +441,7 @@ dependencies = [ "fnv", "futures-util", "http 1.0.0", - "indexmap 2.2.5", + "indexmap 2.2.6", "mime", "multer", "num-traits", @@ -491,7 +491,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68e40849c29a39012d38bff87bfed431f1ed6c53fbec493294c1045d61a7ae75" dependencies = [ "bytes 1.5.0", - "indexmap 2.2.5", + "indexmap 2.2.6", "serde", "serde_json", ] @@ -3277,7 +3277,7 @@ dependencies = [ "flate2", "futures 0.3.30", "glob", - "indexmap 2.2.5", + "indexmap 2.2.6", "libc", "quickcheck", "scan_fmt", @@ -3757,7 +3757,7 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.9", - "indexmap 2.2.5", + "indexmap 2.2.6", "slab", "tokio", "tokio-util", @@ -3776,7 +3776,7 @@ dependencies = [ "futures-sink", "futures-util", "http 1.0.0", - "indexmap 2.2.5", + "indexmap 2.2.6", "slab", "tokio", "tokio-util", @@ -4461,9 +4461,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.2.5" +version = "2.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b0b929d511467233429c45a44ac1dcaa21ba0f5ba11e4879e6ed28ddb4f9df4" +checksum = "168fb715dda47215e360912c096649d23d58bf392ac62f73919e831745e40f26" dependencies = [ "equivalent", "hashbrown 0.14.3", @@ -6367,7 +6367,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1d3afd2628e69da2be385eb6f2fd57c8ac7977ceeff6dc166ff1657b0e386a9" dependencies = [ "fixedbitset", - "indexmap 2.2.5", + "indexmap 2.2.6", ] [[package]] @@ -6814,7 +6814,7 @@ dependencies = [ name = "prometheus-parser" version = "0.1.0" dependencies = [ - "indexmap 2.2.5", + "indexmap 2.2.6", "nom", "num_enum 0.7.2", "prost 0.12.3", @@ -8185,7 +8185,7 @@ version = "1.0.114" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" dependencies = [ - "indexmap 2.2.5", + "indexmap 2.2.6", "itoa", "ryu", "serde", @@ -8282,7 +8282,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.2.5", + "indexmap 2.2.6", "serde", "serde_derive", "serde_json", @@ -8332,7 +8332,7 @@ version = "0.9.33" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a0623d197252096520c6f2a5e1171ee436e5af99a5d7caa2891e55e61950e6d9" dependencies = [ - "indexmap 2.2.5", + "indexmap 2.2.6", "itoa", "ryu", "serde", @@ -9330,7 +9330,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.2.5", + "indexmap 2.2.6", "toml_datetime", "winnow 0.5.18", ] @@ -9341,7 +9341,7 @@ version = "0.20.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "70f427fce4d84c72b5b732388bf4a9f4531b53f74e2887e3ecb2481f68f66d81" dependencies = [ - "indexmap 2.2.5", + "indexmap 2.2.6", "toml_datetime", "winnow 0.5.18", ] @@ -9352,7 +9352,7 @@ version = "0.22.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c12219811e0c1ba077867254e5ad62ee2c9c190b0d957110750ac0cda1ae96cd" dependencies = [ - "indexmap 2.2.5", + "indexmap 2.2.6", "serde", "serde_spanned", "toml_datetime", @@ -9991,7 +9991,7 @@ dependencies = [ "dunce", "glob", "hex", - "indexmap 2.2.5", + "indexmap 2.2.6", "indicatif", "itertools 0.12.1", "log", @@ -10097,7 +10097,7 @@ dependencies = [ "hyper 0.14.28", "hyper-openssl", "hyper-proxy", - "indexmap 2.2.5", + "indexmap 2.2.6", "indoc", "infer 0.15.0", "inventory", @@ -10282,7 +10282,7 @@ dependencies = [ "crossbeam-utils", "derivative", "futures 0.3.30", - "indexmap 2.2.5", + "indexmap 2.2.6", "metrics", "nom", "ordered-float 4.2.0", @@ -10313,7 +10313,7 @@ dependencies = [ "chrono-tz", "encoding_rs", "http 0.2.9", - "indexmap 2.2.5", + "indexmap 2.2.6", "inventory", "no-proxy", "num-traits", @@ -10383,7 +10383,7 @@ dependencies = [ "headers", "http 0.2.9", "hyper-proxy", - "indexmap 2.2.5", + "indexmap 2.2.6", "ipnet", "metrics", "metrics-tracing-context", @@ -10591,7 +10591,7 @@ dependencies = [ "hostname", "iana-time-zone", "idna 0.5.0", - "indexmap 2.2.5", + "indexmap 2.2.6", "indoc", "itertools 0.12.1", "lalrpop", diff --git a/Cargo.toml b/Cargo.toml index b77258ee23cdc..38d7bfeab9636 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -131,7 +131,7 @@ members = [ [workspace.dependencies] chrono = { version = "0.4.34", default-features = false, features = ["clock", "serde"] } clap = { version = "4.5.3", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } -indexmap = { version = "2.2.5", default-features = false, features = ["serde", "std"] } +indexmap = { version = "2.2.6", default-features = false, features = ["serde", "std"] } pin-project = { version = "1.1.5", default-features = false } proptest = "1.4" proptest-derive = "0.4.0" diff --git a/lib/file-source/Cargo.toml b/lib/file-source/Cargo.toml index 8a0b57cc1f006..e7f21b6e6124a 100644 --- a/lib/file-source/Cargo.toml +++ b/lib/file-source/Cargo.toml @@ -39,7 +39,7 @@ default-features = false features = [] [dependencies.indexmap] -version = "2.2.5" +version = "2.2.6" default-features = false features = ["serde"] From 04813b9eee62d7dc3c55b761b144478af73a1a95 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 01:50:14 +0000 Subject: [PATCH 0199/1491] chore(deps): Bump async-trait from 0.1.78 to 0.1.79 (#20163) Bumps [async-trait](https://github.com/dtolnay/async-trait) from 0.1.78 to 0.1.79. - [Release notes](https://github.com/dtolnay/async-trait/releases) - [Commits](https://github.com/dtolnay/async-trait/compare/0.1.78...0.1.79) --- updated-dependencies: - dependency-name: async-trait dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index c6b3e7cde6385..b4729cfb20902 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -702,9 +702,9 @@ checksum = "b4eb2cdb97421e01129ccb49169d8279ed21e829929144f4a22a6e54ac549ca1" [[package]] name = "async-trait" -version = "0.1.78" +version = "0.1.79" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "461abc97219de0eaaf81fe3ef974a540158f3d079c2ab200f891f1a2ef201e85" +checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", diff --git a/Cargo.toml b/Cargo.toml index 38d7bfeab9636..854a091dd5fe8 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -156,7 +156,7 @@ loki-logproto = { path = "lib/loki-logproto", optional = true } # Tokio / Futures async-stream = { version = "0.3.5", default-features = false } -async-trait = { version = "0.1.78", default-features = false } +async-trait = { version = "0.1.79", default-features = false } futures = { version = "0.3.30", default-features = false, features = ["compat", "io-compat"], package = "futures" } tokio = { version = "1.36.0", default-features = false, features = ["full"] } tokio-openssl = { version = "0.6.4", default-features = false } From 52a15a0548a03baa59eb4e107bf5a4041505bd29 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 01:50:18 +0000 Subject: [PATCH 0200/1491] chore(deps): Bump syn from 2.0.53 to 2.0.55 (#20164) Bumps [syn](https://github.com/dtolnay/syn) from 2.0.53 to 2.0.55. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/2.0.53...2.0.55) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 94 +++++++++++++++++++++++++++--------------------------- 1 file changed, 47 insertions(+), 47 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b4729cfb20902..3ce7fa2b0f89c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "strum 0.26.1", - "syn 2.0.53", + "syn 2.0.55", "thiserror", ] @@ -651,7 +651,7 @@ checksum = "30c5ef0ede93efbf733c1a727f3b6b5a1060bbedd5600183e66f6e4be4af0ec5" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -691,7 +691,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -708,7 +708,7 @@ checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -1479,7 +1479,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9990737a6d5740ff51cdbbc0f0503015cb30c390f6623968281eb214a520cfc0" dependencies = [ "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -1609,7 +1609,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", "syn_derive", ] @@ -2041,7 +2041,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -2569,7 +2569,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -2641,7 +2641,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "strsim 0.10.0", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -2674,7 +2674,7 @@ checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ "darling_core 0.20.8", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -2779,7 +2779,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -3060,7 +3060,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -3072,7 +3072,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -3092,7 +3092,7 @@ checksum = "5c785274071b1b420972453b306eeca06acf4633829db4223b58a2a8c5953bc4" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -3494,7 +3494,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -5291,7 +5291,7 @@ checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -5411,7 +5411,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "regex", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -5861,7 +5861,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -5873,7 +5873,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -6061,7 +6061,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -6346,7 +6346,7 @@ dependencies = [ "pest_meta", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -6434,7 +6434,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -6711,7 +6711,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ "proc-macro2 1.0.79", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -6914,7 +6914,7 @@ dependencies = [ "prost 0.12.3", "prost-types 0.12.3", "regex", - "syn 2.0.53", + "syn 2.0.55", "tempfile", "which 4.4.2", ] @@ -6942,7 +6942,7 @@ dependencies = [ "itertools 0.11.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -7716,7 +7716,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.0", - "syn 2.0.53", + "syn 2.0.55", "unicode-ident", ] @@ -8165,7 +8165,7 @@ checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -8176,7 +8176,7 @@ checksum = "330f01ce65a3a5fe59a60c82f3c9a024b573b8a6e875bd233fe5f934e71d54e3" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -8238,7 +8238,7 @@ checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -8311,7 +8311,7 @@ dependencies = [ "darling 0.20.8", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -8590,7 +8590,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -8777,7 +8777,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "rustversion", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -8790,7 +8790,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "rustversion", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -8833,9 +8833,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.53" +version = "2.0.55" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7383cd0e49fff4b6b90ca5670bfd3e9d6a733b3f90c686605aa7eec8c4996032" +checksum = "002a1b3dbf967edfafc32655d0f377ab0bb7b994aa1d32c8cc7e9b8bf3ebb8f0" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", @@ -8851,7 +8851,7 @@ dependencies = [ "proc-macro-error", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -9019,7 +9019,7 @@ checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -9167,7 +9167,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -9414,7 +9414,7 @@ dependencies = [ "proc-macro2 1.0.79", "prost-build 0.12.3", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -9517,7 +9517,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -9751,7 +9751,7 @@ checksum = "f03ca4cb38206e2bef0700092660bb74d696f808514dae47fa1467cbfe26e96e" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -9781,7 +9781,7 @@ checksum = "ac73887f47b9312552aa90ef477927ff014d63d1920ca8037c6c1951eab64bb1" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] @@ -10341,7 +10341,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_json", - "syn 2.0.53", + "syn 2.0.55", "tracing 0.1.40", ] @@ -10354,7 +10354,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_derive_internals", - "syn 2.0.53", + "syn 2.0.55", "vector-config", "vector-config-common", ] @@ -10771,7 +10771,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", "wasm-bindgen-shared", ] @@ -10805,7 +10805,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11260,7 +11260,7 @@ checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.53", + "syn 2.0.55", ] [[package]] From e25efa1c270d5cb008240edb4ad2413535a332e7 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 01:50:22 +0000 Subject: [PATCH 0201/1491] chore(deps): Bump serde_yaml from 0.9.33 to 0.9.34+deprecated (#20165) Bumps [serde_yaml](https://github.com/dtolnay/serde-yaml) from 0.9.33 to 0.9.34+deprecated. - [Release notes](https://github.com/dtolnay/serde-yaml/releases) - [Commits](https://github.com/dtolnay/serde-yaml/compare/0.9.33...0.9.34) --- updated-dependencies: - dependency-name: serde_yaml dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 14 +++++++------- Cargo.toml | 2 +- lib/vector-core/Cargo.toml | 2 +- vdev/Cargo.toml | 2 +- 4 files changed, 10 insertions(+), 10 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 3ce7fa2b0f89c..44c8663df465d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4845,7 +4845,7 @@ dependencies = [ "secrecy", "serde", "serde_json", - "serde_yaml 0.9.33", + "serde_yaml 0.9.34+deprecated", "thiserror", "tokio", "tokio-util", @@ -8328,9 +8328,9 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.9.33" +version = "0.9.34+deprecated" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a0623d197252096520c6f2a5e1171ee436e5af99a5d7caa2891e55e61950e6d9" +checksum = "6a8b1a1a2ebf674015cc02edccce75287f1a0130d394307b36743c2f5d504b47" dependencies = [ "indexmap 2.2.6", "itoa", @@ -10003,7 +10003,7 @@ dependencies = [ "reqwest", "serde", "serde_json", - "serde_yaml 0.9.33", + "serde_yaml 0.9.34+deprecated", "sha2", "tempfile", "toml", @@ -10160,7 +10160,7 @@ dependencies = [ "serde_bytes", "serde_json", "serde_with 3.7.0", - "serde_yaml 0.9.33", + "serde_yaml 0.9.34+deprecated", "sha2", "similar-asserts", "smallvec", @@ -10256,7 +10256,7 @@ dependencies = [ "rand 0.8.5", "rkyv", "serde", - "serde_yaml 0.9.33", + "serde_yaml 0.9.34+deprecated", "snafu 0.7.5", "temp-dir", "tokio", @@ -10414,7 +10414,7 @@ dependencies = [ "serde", "serde_json", "serde_with 3.7.0", - "serde_yaml 0.9.33", + "serde_yaml 0.9.34+deprecated", "similar-asserts", "smallvec", "snafu 0.7.5", diff --git a/Cargo.toml b/Cargo.toml index 854a091dd5fe8..a2f179e50d336 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -216,7 +216,7 @@ serde-toml-merge = { version = "0.3.6", default-features = false } serde_bytes = { version = "0.11.14", default-features = false, features = ["std"], optional = true } serde_json.workspace = true serde_with = { version = "3.7.0", default-features = false, features = ["macros", "std"] } -serde_yaml = { version = "0.9.33", default-features = false } +serde_yaml = { version = "0.9.34", default-features = false } # Messagepack rmp-serde = { version = "1.1.2", default-features = false, optional = true } diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index 758ee576b3128..5c9450f70e559 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -90,7 +90,7 @@ ndarray-stats = "0.5.1" noisy_float = "0.2.0" rand = "0.8.5" rand_distr = "0.4.3" -serde_yaml = { version = "0.9.33", default-features = false } +serde_yaml = { version = "0.9.34", default-features = false } tracing-subscriber = { version = "0.3.18", default-features = false, features = ["env-filter", "fmt", "ansi", "registry"] } vector-common = { path = "../vector-common", default-features = false, features = ["test"] } diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index 580673c189c30..9f0747d2f92d1 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -33,7 +33,7 @@ regex = { version = "1.10.3", default-features = false, features = ["std", "perf reqwest = { version = "0.11", features = ["json", "blocking"] } serde.workspace = true serde_json.workspace = true -serde_yaml = "0.9.33" +serde_yaml = "0.9.34" sha2 = "0.10.8" tempfile = "3.10.1" toml.workspace = true From 1befcd9a6f8d0115fcc11ac866ba380c9893ee25 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 01:50:26 +0000 Subject: [PATCH 0202/1491] chore(deps): Bump arc-swap from 1.7.0 to 1.7.1 (#20166) Bumps [arc-swap](https://github.com/vorner/arc-swap) from 1.7.0 to 1.7.1. - [Changelog](https://github.com/vorner/arc-swap/blob/master/CHANGELOG.md) - [Commits](https://github.com/vorner/arc-swap/commits) --- updated-dependencies: - dependency-name: arc-swap dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- lib/enrichment/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 44c8663df465d..f06f92dd23282 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -274,9 +274,9 @@ dependencies = [ [[package]] name = "arc-swap" -version = "1.7.0" +version = "1.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b3d0060af21e8d11a926981cc00c6c1541aa91dd64b9f881985c3da1094425f" +checksum = "69f7f8c3906b62b754cd5326047894316021dcfe5a194c8ea52bdd94934a3457" [[package]] name = "arr_macro" diff --git a/lib/enrichment/Cargo.toml b/lib/enrichment/Cargo.toml index 74a2025884ac7..923bd375ec23e 100644 --- a/lib/enrichment/Cargo.toml +++ b/lib/enrichment/Cargo.toml @@ -6,7 +6,7 @@ edition = "2021" publish = false [dependencies] -arc-swap = { version = "1.7.0", default-features = false } +arc-swap = { version = "1.7.1", default-features = false } chrono.workspace = true dyn-clone = { version = "1.0.17", default-features = false } vrl.workspace = true From 656e2207c2f3057d72c19667b1673f621be56ade Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 01:50:41 +0000 Subject: [PATCH 0203/1491] chore(ci): Bump bufbuild/buf-breaking-action from 1.1.3 to 1.1.4 (#20171) Bumps [bufbuild/buf-breaking-action](https://github.com/bufbuild/buf-breaking-action) from 1.1.3 to 1.1.4. - [Release notes](https://github.com/bufbuild/buf-breaking-action/releases) - [Commits](https://github.com/bufbuild/buf-breaking-action/compare/v1.1.3...v1.1.4) --- updated-dependencies: - dependency-name: bufbuild/buf-breaking-action dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/protobuf.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/protobuf.yml b/.github/workflows/protobuf.yml index f9ea54bf8a06c..4b962e778efcf 100644 --- a/.github/workflows/protobuf.yml +++ b/.github/workflows/protobuf.yml @@ -23,6 +23,6 @@ jobs: # Install the `buf` CLI - uses: bufbuild/buf-setup-action@v1.30.0 # Perform breaking change detection against the `master` branch - - uses: bufbuild/buf-breaking-action@v1.1.3 + - uses: bufbuild/buf-breaking-action@v1.1.4 with: against: "https://github.com/vectordotdev/vector.git#branch=master" From b05fb602bb734adb82f17032e91c44ae9fc5a4df Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 01:51:10 +0000 Subject: [PATCH 0204/1491] chore(deps): Bump express from 4.18.2 to 4.19.2 in /website (#20183) Bumps [express](https://github.com/expressjs/express) from 4.18.2 to 4.19.2. - [Release notes](https://github.com/expressjs/express/releases) - [Changelog](https://github.com/expressjs/express/blob/master/History.md) - [Commits](https://github.com/expressjs/express/compare/4.18.2...4.19.2) --- updated-dependencies: - dependency-name: express dependency-type: indirect ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- website/yarn.lock | 43 ++++++++++++++++++++++++------------------- 1 file changed, 24 insertions(+), 19 deletions(-) diff --git a/website/yarn.lock b/website/yarn.lock index f9b555c4b79b4..d7ea94dd225a4 100644 --- a/website/yarn.lock +++ b/website/yarn.lock @@ -2006,13 +2006,13 @@ bluebird@^3.5.1: resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.2.tgz#9f229c15be272454ffa973ace0dbee79a1b0c36f" integrity sha512-XpNj6GDQzdfW+r2Wnn7xiSAd7TM3jzkxGXBGTtWKuSXv1xUV+azxAm8jdWZN06QTQk+2N2XB9jRDkvbmQmcRtg== -body-parser@1.20.1: - version "1.20.1" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668" - integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw== +body-parser@1.20.2: + version "1.20.2" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.2.tgz#6feb0e21c4724d06de7ff38da36dad4f57a747fd" + integrity sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA== dependencies: bytes "3.1.2" - content-type "~1.0.4" + content-type "~1.0.5" debug "2.6.9" depd "2.0.0" destroy "1.2.0" @@ -2020,7 +2020,7 @@ body-parser@1.20.1: iconv-lite "0.4.24" on-finished "2.4.1" qs "6.11.0" - raw-body "2.5.1" + raw-body "2.5.2" type-is "~1.6.18" unpipe "1.0.0" @@ -2394,6 +2394,11 @@ content-type@~1.0.4: resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== +content-type@~1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.5.tgz#8b773162656d1d1086784c8f23a54ce6d73d7918" + integrity sha512-nTjqfcBFEipKdXCv4YDQWCfmcLZKm81ldF0pAopTvyrFGVbcR6P/VAAd5G7N+0tTr8QqiU0tFadD6FK4NtJwOA== + convert-source-map@^1.1.0, convert-source-map@^1.5.0, convert-source-map@^1.7.0: version "1.8.0" resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" @@ -2411,10 +2416,10 @@ cookie@0.3.1: resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb" integrity sha1-5+Ch+e9DtMi6klxcWpboBtFoc7s= -cookie@0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.5.0.tgz#d1f5d71adec6558c58f389987c366aa47e994f8b" - integrity sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw== +cookie@0.6.0: + version "0.6.0" + resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.6.0.tgz#2798b04b071b0ecbff0dbb62a505a8efa4e19051" + integrity sha512-U71cyTamuh1CRNCfpGY6to28lxvNwPG4Guz/EVjgf3Jmzv0vlDp1atT9eS5dDjMYHucpHbWns6Lwf3BKz6svdw== copy-to-clipboard@^3.3.1: version "3.3.1" @@ -2894,16 +2899,16 @@ events@^1.1.0: integrity sha1-nr23Y1rQmccNzEwqH1AEKI6L2SQ= express@^4.17.1: - version "4.18.2" - resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" - integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== + version "4.19.2" + resolved "https://registry.yarnpkg.com/express/-/express-4.19.2.tgz#e25437827a3aa7f2a827bc8171bbbb664a356465" + integrity sha512-5T6nhjsT+EOMzuck8JjBHARTHfMht0POzlA60WV2pMD3gyXw2LZnZ+ueGdNxG+0calOJcWKbpFcuzLZ91YWq9Q== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.1" + body-parser "1.20.2" content-disposition "0.5.4" content-type "~1.0.4" - cookie "0.5.0" + cookie "0.6.0" cookie-signature "1.0.6" debug "2.6.9" depd "2.0.0" @@ -4511,10 +4516,10 @@ raven@^2.2.1: timed-out "4.0.1" uuid "3.3.2" -raw-body@2.5.1: - version "2.5.1" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.1.tgz#fe1b1628b181b700215e5fd42389f98b71392857" - integrity sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig== +raw-body@2.5.2: + version "2.5.2" + resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.5.2.tgz#99febd83b90e08975087e8f1f9419a149366b68a" + integrity sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA== dependencies: bytes "3.1.2" http-errors "2.0.0" From 8a78b8270b61136a37e7a8b5c364b912e6515d2b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 01:51:14 +0000 Subject: [PATCH 0205/1491] chore(deps): Bump graphql_client from 0.13.0 to 0.14.0 (#20187) Bumps [graphql_client](https://github.com/graphql-rust/graphql-client) from 0.13.0 to 0.14.0. - [Release notes](https://github.com/graphql-rust/graphql-client/releases) - [Changelog](https://github.com/graphql-rust/graphql-client/blob/main/CHANGELOG.md) - [Commits](https://github.com/graphql-rust/graphql-client/compare/0.13.0...0.14.0) --- updated-dependencies: - dependency-name: graphql_client dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 12 ++++++------ lib/vector-api-client/Cargo.toml | 2 +- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index f06f92dd23282..25f9ce1c2807a 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3652,9 +3652,9 @@ dependencies = [ [[package]] name = "graphql_client" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09cdf7b487d864c2939b23902291a5041bc4a84418268f25fda1c8d4e15ad8fa" +checksum = "a50cfdc7f34b7f01909d55c2dcb71d4c13cbcbb4a1605d6c8bd760d654c1144b" dependencies = [ "graphql_query_derive", "serde", @@ -3663,9 +3663,9 @@ dependencies = [ [[package]] name = "graphql_client_codegen" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a40f793251171991c4eb75bd84bc640afa8b68ff6907bc89d3b712a22f700506" +checksum = "5e27ed0c2cf0c0cc52c6bcf3b45c907f433015e580879d14005386251842fb0a" dependencies = [ "graphql-introspection-query", "graphql-parser", @@ -3680,9 +3680,9 @@ dependencies = [ [[package]] name = "graphql_query_derive" -version = "0.13.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00bda454f3d313f909298f626115092d348bc231025699f557b27e248475f48c" +checksum = "83febfa838f898cfa73dfaa7a8eb69ff3409021ac06ee94cfb3d622f6eeb1a97" dependencies = [ "graphql_client_codegen", "proc-macro2 1.0.79", diff --git a/lib/vector-api-client/Cargo.toml b/lib/vector-api-client/Cargo.toml index 3ee0cf8d8fc7c..095cd5a512f34 100644 --- a/lib/vector-api-client/Cargo.toml +++ b/lib/vector-api-client/Cargo.toml @@ -21,7 +21,7 @@ tokio = { version = "1.36.0", default-features = false, features = ["macros", "r tokio-stream = { version = "0.1.15", default-features = false, features = ["sync"] } # GraphQL -graphql_client = { version = "0.13.0", default-features = false, features = ["graphql_query_derive"] } +graphql_client = { version = "0.14.0", default-features = false, features = ["graphql_query_derive"] } # HTTP / WebSockets reqwest = { version = "0.11.26", default-features = false, features = ["json"] } From 06f3ad3416b4afe89a02fb29704cda40f8e71da3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 02:24:42 +0000 Subject: [PATCH 0206/1491] chore(deps): Bump the aws group with 1 update (#20175) Bumps the aws group with 1 update: [aws-smithy-async](https://github.com/smithy-lang/smithy-rs). Updates `aws-smithy-async` from 1.1.8 to 1.2.0 - [Release notes](https://github.com/smithy-lang/smithy-rs/releases) - [Changelog](https://github.com/smithy-lang/smithy-rs/blob/main/CHANGELOG.md) - [Commits](https://github.com/smithy-lang/smithy-rs/commits) --- updated-dependencies: - dependency-name: aws-smithy-async dependency-type: direct:production update-type: version-update:semver-minor dependency-group: aws ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 25f9ce1c2807a..a0ea720bdb5df 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1052,9 +1052,9 @@ dependencies = [ [[package]] name = "aws-smithy-async" -version = "1.1.8" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d26ea8fa03025b2face2b3038a63525a10891e3d8829901d502e5384a0d8cd46" +checksum = "f7a41ccd6b74401a49ca828617049e5c23d83163d330a4f90a8081aadee0ac45" dependencies = [ "futures-util", "pin-project-lite", diff --git a/Cargo.toml b/Cargo.toml index a2f179e50d336..5b345c70c3006 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -196,7 +196,7 @@ aws-smithy-http = { version = "0.60", default-features = false, features = ["eve aws-smithy-types = { version = "1.1.8", default-features = false, optional = true } aws-smithy-runtime-api = { version = "1.2.0", default-features = false, optional = true } aws-smithy-runtime = { version = "1.1.8", default-features = false, features = ["client", "connector-hyper-0-14-x", "rt-tokio"], optional = true } -aws-smithy-async = { version = "1.0.2", default-features = false, features = ["rt-tokio"], optional = true } +aws-smithy-async = { version = "1.2.0", default-features = false, features = ["rt-tokio"], optional = true } # Azure azure_core = { version = "0.17", default-features = false, features = ["enable_reqwest"], optional = true } From eb7ab42940d1743ad2fa2f44318c0b25240f3595 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 10:03:40 -0700 Subject: [PATCH 0207/1491] chore(deps): Bump os_info from 3.8.1 to 3.8.2 (#20162) Bumps [os_info](https://github.com/stanislav-tkach/os_info) from 3.8.1 to 3.8.2. - [Release notes](https://github.com/stanislav-tkach/os_info/releases) - [Changelog](https://github.com/stanislav-tkach/os_info/blob/master/CHANGELOG.md) - [Commits](https://github.com/stanislav-tkach/os_info/compare/v3.8.1...v3.8.2) --- updated-dependencies: - dependency-name: os_info dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- vdev/Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index a0ea720bdb5df..5885bf10b7a53 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -6144,9 +6144,9 @@ dependencies = [ [[package]] name = "os_info" -version = "3.8.1" +version = "3.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6cbb46d5d01695d7a1fb8be5f0d1968bd2b2b8ba1d1b3e7062ce2a0593e57af1" +checksum = "ae99c7fa6dd38c7cafe1ec085e804f8f555a2f8659b0dbe03f1f9963a9b51092" dependencies = [ "log", "windows-sys 0.52.0", diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index 9f0747d2f92d1..ca496a0e7fe93 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -25,7 +25,7 @@ indicatif = { version = "0.17.8", features = ["improved_unicode"] } itertools = "0.12.1" log = "0.4.21" once_cell = "1.19" -os_info = { version = "3.8.1", default-features = false } +os_info = { version = "3.8.2", default-features = false } # watch https://github.com/epage/anstyle for official interop with Clap owo-colors = { version = "4.0.0", features = ["supports-colors"] } paste = "1.0.14" From 6c3003dc0592f9fb7ff121a971db9af44c793a68 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 17:03:47 +0000 Subject: [PATCH 0208/1491] chore(deps): Bump regex from 1.10.3 to 1.10.4 (#20168) Bumps [regex](https://github.com/rust-lang/regex) from 1.10.3 to 1.10.4. - [Release notes](https://github.com/rust-lang/regex/releases) - [Changelog](https://github.com/rust-lang/regex/blob/master/CHANGELOG.md) - [Commits](https://github.com/rust-lang/regex/compare/1.10.3...1.10.4) --- updated-dependencies: - dependency-name: regex dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- lib/codecs/Cargo.toml | 2 +- lib/vector-core/Cargo.toml | 2 +- vdev/Cargo.toml | 2 +- 5 files changed, 6 insertions(+), 6 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 5885bf10b7a53..b87624498b542 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7435,9 +7435,9 @@ dependencies = [ [[package]] name = "regex" -version = "1.10.3" +version = "1.10.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b62dbe01f0b06f9d8dc7d49e05a0785f153b00b2c227856282f671e0318c9b15" +checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c" dependencies = [ "aho-corasick", "memchr", diff --git a/Cargo.toml b/Cargo.toml index 5b345c70c3006..35919462c775a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -319,7 +319,7 @@ rand = { version = "0.8.5", default-features = false, features = ["small_rng"] } rand_distr = { version = "0.4.3", default-features = false } rdkafka = { version = "0.35.0", default-features = false, features = ["tokio", "libz", "ssl", "zstd"], optional = true } redis = { version = "0.24.0", default-features = false, features = ["connection-manager", "tokio-comp", "tokio-native-tls-comp"], optional = true } -regex = { version = "1.10.3", default-features = false, features = ["std", "perf"] } +regex = { version = "1.10.4", default-features = false, features = ["std", "perf"] } roaring = { version = "0.10.3", default-features = false, optional = true } rumqttc = { version = "0.24.0", default-features = false, features = ["use-rustls"], optional = true } seahash = { version = "4.1.0", default-features = false } diff --git a/lib/codecs/Cargo.toml b/lib/codecs/Cargo.toml index f47ce0fc2d60d..1f17d85660c35 100644 --- a/lib/codecs/Cargo.toml +++ b/lib/codecs/Cargo.toml @@ -22,7 +22,7 @@ once_cell = { version = "1.19", default-features = false } ordered-float = { version = "4.2.0", default-features = false } prost = { version = "0.12.3", default-features = false, features = ["std"] } prost-reflect = { version = "0.13", default-features = false, features = ["serde"] } -regex = { version = "1.10.3", default-features = false, features = ["std", "perf"] } +regex = { version = "1.10.4", default-features = false, features = ["std", "perf"] } serde.workspace = true serde_json.workspace = true smallvec = { version = "1", default-features = false, features = ["union"] } diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index 5c9450f70e559..cb4f04bc07f08 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -39,7 +39,7 @@ proptest = { version = "1.4", optional = true } prost-types = { version = "0.12", default-features = false } prost = { version = "0.12", default-features = false, features = ["std"] } quanta = { version = "0.12.2", default-features = false } -regex = { version = "1.10.3", default-features = false, features = ["std", "perf"] } +regex = { version = "1.10.4", default-features = false, features = ["std", "perf"] } ryu = { version = "1", default-features = false } serde.workspace = true serde_json.workspace = true diff --git a/vdev/Cargo.toml b/vdev/Cargo.toml index ca496a0e7fe93..979813e51e15a 100644 --- a/vdev/Cargo.toml +++ b/vdev/Cargo.toml @@ -29,7 +29,7 @@ os_info = { version = "3.8.2", default-features = false } # watch https://github.com/epage/anstyle for official interop with Clap owo-colors = { version = "4.0.0", features = ["supports-colors"] } paste = "1.0.14" -regex = { version = "1.10.3", default-features = false, features = ["std", "perf"] } +regex = { version = "1.10.4", default-features = false, features = ["std", "perf"] } reqwest = { version = "0.11", features = ["json", "blocking"] } serde.workspace = true serde_json.workspace = true From 5a6b670ce05cc7c34b0af0cf2a58810d47b2f71c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 29 Mar 2024 17:03:51 +0000 Subject: [PATCH 0209/1491] chore(deps): Bump the clap group with 1 update (#20176) Bumps the clap group with 1 update: [clap](https://github.com/clap-rs/clap). Updates `clap` from 4.5.3 to 4.5.4 - [Release notes](https://github.com/clap-rs/clap/releases) - [Changelog](https://github.com/clap-rs/clap/blob/master/CHANGELOG.md) - [Commits](https://github.com/clap-rs/clap/compare/v4.5.3...v4.5.4) --- updated-dependencies: - dependency-name: clap dependency-type: direct:production update-type: version-update:semver-patch dependency-group: clap ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 28 ++++++++++++++-------------- Cargo.toml | 2 +- 2 files changed, 15 insertions(+), 15 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b87624498b542..4083710039a7c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1992,9 +1992,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.5.3" +version = "4.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "949626d00e063efc93b6dca932419ceb5432f99769911c0b995f7e884c778813" +checksum = "90bc066a67923782aa8515dbaea16946c5bcc5addbd668bb80af688e53e548a0" dependencies = [ "clap_builder", "clap_derive", @@ -2006,7 +2006,7 @@ version = "2.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb9b20c0dd58e4c2e991c8d203bbeb76c11304d1011659686b5b644bc29aa478" dependencies = [ - "clap 4.5.3", + "clap 4.5.4", "log", ] @@ -2029,14 +2029,14 @@ version = "4.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "885e4d7d5af40bfb99ae6f9433e292feac98d452dcb3ec3d25dfe7552b77da8c" dependencies = [ - "clap 4.5.3", + "clap 4.5.4", ] [[package]] name = "clap_derive" -version = "4.5.3" +version = "4.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "90239a040c80f5e14809ca132ddc4176ab33d5e17e49691793296e3fcb34d72f" +checksum = "528131438037fd55894f62d6e9f068b8f45ac57ffa77517819645d10aed04f64" dependencies = [ "heck 0.5.0", "proc-macro2 1.0.79", @@ -2366,7 +2366,7 @@ dependencies = [ "anes", "cast", "ciborium", - "clap 4.5.3", + "clap 4.5.4", "criterion-plot", "futures 0.3.30", "is-terminal", @@ -9983,7 +9983,7 @@ dependencies = [ "anyhow", "cached", "chrono", - "clap 4.5.3", + "clap 4.5.4", "clap-verbosity-flag", "clap_complete", "confy", @@ -10061,7 +10061,7 @@ dependencies = [ "chrono", "chrono-tz", "cidr-utils 0.6.1", - "clap 4.5.3", + "clap 4.5.4", "colored", "console-subscriber", "criterion", @@ -10211,7 +10211,7 @@ version = "0.1.2" dependencies = [ "anyhow", "chrono", - "clap 4.5.3", + "clap 4.5.4", "futures 0.3.30", "graphql_client", "indoc", @@ -10234,7 +10234,7 @@ dependencies = [ "async-trait", "bytecheck", "bytes 1.5.0", - "clap 4.5.3", + "clap 4.5.4", "crc32fast", "criterion", "crossbeam-queue", @@ -10494,7 +10494,7 @@ dependencies = [ name = "vector-vrl-cli" version = "0.1.0" dependencies = [ - "clap 4.5.3", + "clap 4.5.4", "vector-vrl-functions", "vrl", ] @@ -10513,7 +10513,7 @@ dependencies = [ "ansi_term", "chrono", "chrono-tz", - "clap 4.5.3", + "clap 4.5.4", "enrichment", "glob", "prettydiff", @@ -10573,7 +10573,7 @@ dependencies = [ "chrono", "chrono-tz", "cidr-utils 0.6.1", - "clap 4.5.3", + "clap 4.5.4", "codespan-reporting", "community-id", "crypto_secretbox", diff --git a/Cargo.toml b/Cargo.toml index 35919462c775a..576da94f37d84 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -130,7 +130,7 @@ members = [ [workspace.dependencies] chrono = { version = "0.4.34", default-features = false, features = ["clock", "serde"] } -clap = { version = "4.5.3", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } +clap = { version = "4.5.4", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } indexmap = { version = "2.2.6", default-features = false, features = ["serde", "std"] } pin-project = { version = "1.1.5", default-features = false } proptest = "1.4" From 5f981d46f1a890fa84e92465b94061fde01efd5d Mon Sep 17 00:00:00 2001 From: Jesse Szwedko Date: Fri, 29 Mar 2024 15:14:41 -0700 Subject: [PATCH 0210/1491] chore(ci): Only use one label for selecting GHA runner (#20210) GHA is deprecating multi-label runners at the end of April. Signed-off-by: Jesse Szwedko --- .github/workflows/compilation-timings.yml | 10 +++++----- .github/workflows/e2e.yml | 2 +- .github/workflows/integration-comment.yml | 4 ++-- .github/workflows/integration-test.yml | 2 +- .github/workflows/integration.yml | 2 +- .github/workflows/k8s_e2e.yml | 4 ++-- .github/workflows/misc.yml | 2 +- .github/workflows/publish.yml | 18 +++++++++--------- .github/workflows/regression.yml | 4 ++-- .github/workflows/test.yml | 2 +- .github/workflows/unit_windows.yml | 2 +- 11 files changed, 26 insertions(+), 26 deletions(-) diff --git a/.github/workflows/compilation-timings.yml b/.github/workflows/compilation-timings.yml index e96bea65ea946..5b26604109093 100644 --- a/.github/workflows/compilation-timings.yml +++ b/.github/workflows/compilation-timings.yml @@ -13,7 +13,7 @@ env: jobs: release-build-optimized: name: "Release Build (optimized)" - runs-on: [linux, ubuntu-20.04-8core] + runs-on: ubuntu-20.04-8core steps: - uses: colpal/actions-clean@v1 - uses: actions/checkout@v3 @@ -24,7 +24,7 @@ jobs: release-build-normal: name: "Release Build (normal)" - runs-on: [linux, ubuntu-20.04-8core] + runs-on: ubuntu-20.04-8core env: # We're not actually doing a debug build, we're just turning off the logic # in release-flags.sh so that we don't override the Cargo "release" profile @@ -40,7 +40,7 @@ jobs: debug-build: name: "Debug Build" - runs-on: [linux, ubuntu-20.04-8core] + runs-on: ubuntu-20.04-8core steps: - uses: colpal/actions-clean@v1 - uses: actions/checkout@v3 @@ -51,7 +51,7 @@ jobs: debug-rebuild: name: "Debug Rebuild" - runs-on: [linux, ubuntu-20.04-8core] + runs-on: ubuntu-20.04-8core steps: - uses: colpal/actions-clean@v1 - uses: actions/checkout@v3 @@ -64,7 +64,7 @@ jobs: check: name: "Cargo Check" - runs-on: [linux, ubuntu-20.04-8core] + runs-on: ubuntu-20.04-8core steps: - uses: colpal/actions-clean@v1 - uses: actions/checkout@v3 diff --git a/.github/workflows/e2e.yml b/.github/workflows/e2e.yml index 55ed97c8f125d..3a6524f156c1b 100644 --- a/.github/workflows/e2e.yml +++ b/.github/workflows/e2e.yml @@ -49,7 +49,7 @@ jobs: e2e-tests: name: E2E Tests - runs-on: [linux, ubuntu-20.04-8core] + runs-on: ubuntu-20.04-8core timeout-minutes: 45 needs: changes if: always() && ( diff --git a/.github/workflows/integration-comment.yml b/.github/workflows/integration-comment.yml index f5d0324837f28..1c4281d77f01f 100644 --- a/.github/workflows/integration-comment.yml +++ b/.github/workflows/integration-comment.yml @@ -82,7 +82,7 @@ jobs: integration-tests: needs: prep-pr - runs-on: [linux, ubuntu-20.04-4core] + runs-on: ubuntu-20.04-4core timeout-minutes: 90 steps: - uses: actions/checkout@v3 @@ -460,7 +460,7 @@ jobs: e2e-tests: needs: prep-pr - runs-on: [linux, ubuntu-20.04-8core] + runs-on: ubuntu-20.04-8core timeout-minutes: 30 steps: - uses: actions/checkout@v3 diff --git a/.github/workflows/integration-test.yml b/.github/workflows/integration-test.yml index ce0dbb195ab4b..2d5833ee2b837 100644 --- a/.github/workflows/integration-test.yml +++ b/.github/workflows/integration-test.yml @@ -30,7 +30,7 @@ env: jobs: test-integration: - runs-on: [linux, ubuntu-20.04-4core] + runs-on: ubuntu-20.04-4core timeout-minutes: 40 if: inputs.if || github.event_name == 'workflow_dispatch' steps: diff --git a/.github/workflows/integration.yml b/.github/workflows/integration.yml index e425f53650aa3..f499c4cc649b6 100644 --- a/.github/workflows/integration.yml +++ b/.github/workflows/integration.yml @@ -48,7 +48,7 @@ jobs: integration-tests: name: Integration Tests - runs-on: [linux, ubuntu-20.04-4core] + runs-on: ubuntu-20.04-4core needs: changes if: always() && ( github.event_name == 'merge_group' || ( diff --git a/.github/workflows/k8s_e2e.yml b/.github/workflows/k8s_e2e.yml index 6eb6624268e12..f2a01f3d63bee 100644 --- a/.github/workflows/k8s_e2e.yml +++ b/.github/workflows/k8s_e2e.yml @@ -59,7 +59,7 @@ jobs: build-x86_64-unknown-linux-gnu: name: Build - x86_64-unknown-linux-gnu - runs-on: [linux, ubuntu-20.04-4core] + runs-on: ubuntu-20.04-4core timeout-minutes: 45 needs: changes # Run this job even if `changes` job is skipped (non- pull request trigger) @@ -181,7 +181,7 @@ jobs: test-e2e-kubernetes: name: K8s ${{ matrix.kubernetes_version.version }} / ${{ matrix.container_runtime }} (${{ matrix.kubernetes_version.role }}) - runs-on: [linux, ubuntu-20.04-4core] + runs-on: ubuntu-20.04-4core timeout-minutes: 45 needs: - build-x86_64-unknown-linux-gnu diff --git a/.github/workflows/misc.yml b/.github/workflows/misc.yml index 887c1895df18d..e4ce62979fd0d 100644 --- a/.github/workflows/misc.yml +++ b/.github/workflows/misc.yml @@ -5,7 +5,7 @@ on: jobs: test-misc: - runs-on: [linux, ubuntu-20.04-4core] + runs-on: ubuntu-20.04-4core timeout-minutes: 45 env: CARGO_INCREMENTAL: 0 diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index 3461d0bdf91a1..fe30c45eefae7 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -45,7 +45,7 @@ jobs: build-x86_64-unknown-linux-musl-packages: name: Build Vector for x86_64-unknown-linux-musl (.tar.gz) - runs-on: [linux, release-builder] + runs-on: release-builder-linux timeout-minutes: 60 needs: generate-publish-metadata env: @@ -71,7 +71,7 @@ jobs: build-x86_64-unknown-linux-gnu-packages: name: Build Vector for x86_64-unknown-linux-gnu (.tar.gz, DEB, RPM) - runs-on: [linux, release-builder] + runs-on: release-builder-linux needs: generate-publish-metadata timeout-minutes: 60 env: @@ -97,7 +97,7 @@ jobs: build-aarch64-unknown-linux-musl-packages: name: Build Vector for aarch64-unknown-linux-musl (.tar.gz) - runs-on: [linux, release-builder] + runs-on: release-builder-linux timeout-minutes: 60 needs: generate-publish-metadata env: @@ -125,7 +125,7 @@ jobs: build-aarch64-unknown-linux-gnu-packages: name: Build Vector for aarch64-unknown-linux-gnu (.tar.gz) - runs-on: [linux, release-builder] + runs-on: release-builder-linux timeout-minutes: 60 needs: generate-publish-metadata env: @@ -153,7 +153,7 @@ jobs: build-armv7-unknown-linux-gnueabihf-packages: name: Build Vector for armv7-unknown-linux-gnueabihf (.tar.gz) - runs-on: [linux, release-builder] + runs-on: release-builder-linux timeout-minutes: 60 needs: generate-publish-metadata env: @@ -181,7 +181,7 @@ jobs: build-armv7-unknown-linux-musleabihf-packages: name: Build Vector for armv7-unknown-linux-musleabihf (.tar.gz) - runs-on: [linux, release-builder] + runs-on: release-builder-linux timeout-minutes: 60 needs: generate-publish-metadata env: @@ -209,7 +209,7 @@ jobs: build-arm-unknown-linux-gnueabi-packages: name: Build Vector for arm-unknown-linux-gnueabi (.tar.gz) - runs-on: [ linux, release-builder ] + runs-on: release-builder-linux timeout-minutes: 60 needs: generate-publish-metadata env: @@ -237,7 +237,7 @@ jobs: build-arm-unknown-linux-musleabi-packages: name: Build Vector for arm-unknown-linux-musleabi (.tar.gz) - runs-on: [ linux, release-builder ] + runs-on: release-builder-linux needs: generate-publish-metadata env: VECTOR_VERSION: ${{ needs.generate-publish-metadata.outputs.vector_version }} @@ -293,7 +293,7 @@ jobs: build-x86_64-pc-windows-msvc-packages: name: Build Vector for x86_64-pc-windows-msvc (.zip) - runs-on: [windows, release-builder] + runs-on: release-builder-windows timeout-minutes: 90 needs: generate-publish-metadata env: diff --git a/.github/workflows/regression.yml b/.github/workflows/regression.yml index 798bc549f60bd..8402e4b25d572 100644 --- a/.github/workflows/regression.yml +++ b/.github/workflows/regression.yml @@ -287,7 +287,7 @@ jobs: build-baseline: name: Build baseline Vector container - runs-on: [linux, ubuntu-20.04-4core] + runs-on: ubuntu-20.04-4core timeout-minutes: 30 needs: - compute-metadata @@ -325,7 +325,7 @@ jobs: build-comparison: name: Build comparison Vector container - runs-on: [linux, ubuntu-20.04-4core] + runs-on: ubuntu-20.04-4core timeout-minutes: 30 needs: - compute-metadata diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 01ddedd79ea46..1201184f1c986 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -35,7 +35,7 @@ jobs: checks: name: Checks - runs-on: [linux, ubuntu-20.04-8core] + runs-on: ubuntu-20.04-8core timeout-minutes: 45 needs: changes env: diff --git a/.github/workflows/unit_windows.yml b/.github/workflows/unit_windows.yml index 9bf6ac453aeec..46642beb1e273 100644 --- a/.github/workflows/unit_windows.yml +++ b/.github/workflows/unit_windows.yml @@ -6,7 +6,7 @@ on: jobs: test-windows: - runs-on: [windows, windows-2019-8core] + runs-on: windows-2019-8core timeout-minutes: 60 steps: - name: (PR comment) Get PR branch From a8e17a5e46ccdf55826afa6057fc4e6c1347f017 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 07:54:32 -0700 Subject: [PATCH 0211/1491] chore(deps): Bump quanta from 0.12.2 to 0.12.3 (#20218) Bumps [quanta](https://github.com/metrics-rs/quanta) from 0.12.2 to 0.12.3. - [Changelog](https://github.com/metrics-rs/quanta/blob/v0.12.3/CHANGELOG.md) - [Commits](https://github.com/metrics-rs/quanta/compare/v0.12.2...v0.12.3) --- updated-dependencies: - dependency-name: quanta dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 6 +++--- lib/vector-core/Cargo.toml | 2 +- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4083710039a7c..1d34c70b58480 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7075,9 +7075,9 @@ dependencies = [ [[package]] name = "quanta" -version = "0.12.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ca0b7bac0b97248c40bb77288fc52029cf1459c0461ea1b05ee32ccf011de2c" +checksum = "8e5167a477619228a0b284fac2674e3c388cba90631d7b7de620e6f1fcd08da5" dependencies = [ "crossbeam-utils", "libc", @@ -10402,7 +10402,7 @@ dependencies = [ "prost 0.12.3", "prost-build 0.12.3", "prost-types 0.12.3", - "quanta 0.12.2", + "quanta 0.12.3", "quickcheck", "quickcheck_macros", "rand 0.8.5", diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index cb4f04bc07f08..d8b3b95b3f545 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -38,7 +38,7 @@ pin-project.workspace = true proptest = { version = "1.4", optional = true } prost-types = { version = "0.12", default-features = false } prost = { version = "0.12", default-features = false, features = ["std"] } -quanta = { version = "0.12.2", default-features = false } +quanta = { version = "0.12.3", default-features = false } regex = { version = "1.10.4", default-features = false, features = ["std", "perf"] } ryu = { version = "1", default-features = false } serde.workspace = true From fead132341ac52d487abd85bb74b4cab57eafaff Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 07:55:24 -0700 Subject: [PATCH 0212/1491] chore(deps): Bump bytes from 1.5.0 to 1.6.0 (#20167) Bumps [bytes](https://github.com/tokio-rs/bytes) from 1.5.0 to 1.6.0. - [Release notes](https://github.com/tokio-rs/bytes/releases) - [Changelog](https://github.com/tokio-rs/bytes/blob/master/CHANGELOG.md) - [Commits](https://github.com/tokio-rs/bytes/compare/v1.5.0...v1.6.0) --- updated-dependencies: - dependency-name: bytes dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 150 ++++++++++++++--------------- Cargo.toml | 2 +- lib/file-source/Cargo.toml | 2 +- lib/loki-logproto/Cargo.toml | 2 +- lib/opentelemetry-proto/Cargo.toml | 2 +- lib/vector-buffers/Cargo.toml | 2 +- lib/vector-common/Cargo.toml | 2 +- lib/vector-core/Cargo.toml | 2 +- 8 files changed, 82 insertions(+), 82 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1d34c70b58480..26664e36f1c3e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -436,7 +436,7 @@ dependencies = [ "async-stream", "async-trait", "base64 0.21.7", - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "fnv", "futures-util", @@ -490,7 +490,7 @@ version = "7.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "68e40849c29a39012d38bff87bfed431f1ed6c53fbec493294c1045d61a7ae75" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "indexmap 2.2.6", "serde", "serde_json", @@ -576,7 +576,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbc1f1a75fd07f0f517322d103211f12d757658e91676def9a2e688774656c60" dependencies = [ "base64 0.21.7", - "bytes 1.5.0", + "bytes 1.6.0", "futures 0.3.30", "http 0.2.9", "memchr", @@ -757,7 +757,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.5.0", + "bytes 1.6.0", "fastrand 2.0.1", "http 0.2.9", "hyper 0.14.28", @@ -787,7 +787,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.5.0", + "bytes 1.6.0", "http 0.2.9", "http-body 0.4.5", "pin-project-lite", @@ -855,7 +855,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.5.0", + "bytes 1.6.0", "fastrand 2.0.1", "http 0.2.9", "regex", @@ -878,7 +878,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.5.0", + "bytes 1.6.0", "http 0.2.9", "regex", "tracing 0.1.40", @@ -900,7 +900,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.5.0", + "bytes 1.6.0", "http 0.2.9", "regex", "tracing 0.1.40", @@ -922,7 +922,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.5.0", + "bytes 1.6.0", "http 0.2.9", "regex", "tracing 0.1.40", @@ -948,7 +948,7 @@ dependencies = [ "aws-smithy-types", "aws-smithy-xml", "aws-types", - "bytes 1.5.0", + "bytes 1.6.0", "http 0.2.9", "http-body 0.4.5", "once_cell", @@ -997,7 +997,7 @@ dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", - "bytes 1.5.0", + "bytes 1.6.0", "http 0.2.9", "regex", "tracing 0.1.40", @@ -1037,7 +1037,7 @@ dependencies = [ "aws-smithy-http", "aws-smithy-runtime-api", "aws-smithy-types", - "bytes 1.5.0", + "bytes 1.6.0", "form_urlencoded", "hex", "hmac", @@ -1069,7 +1069,7 @@ checksum = "c5a373ec01aede3dd066ec018c1bc4e8f5dd11b2c11c59c8eef1a5c68101f397" dependencies = [ "aws-smithy-http", "aws-smithy-types", - "bytes 1.5.0", + "bytes 1.6.0", "crc32c", "crc32fast", "hex", @@ -1089,7 +1089,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e6363078f927f612b970edf9d1903ef5cef9a64d1e8423525ebb1f0a1633c858" dependencies = [ "aws-smithy-types", - "bytes 1.5.0", + "bytes 1.6.0", "crc32fast", ] @@ -1102,7 +1102,7 @@ dependencies = [ "aws-smithy-eventstream", "aws-smithy-runtime-api", "aws-smithy-types", - "bytes 1.5.0", + "bytes 1.6.0", "bytes-utils", "futures-core", "http 0.2.9", @@ -1143,7 +1143,7 @@ dependencies = [ "aws-smithy-http", "aws-smithy-runtime-api", "aws-smithy-types", - "bytes 1.5.0", + "bytes 1.6.0", "fastrand 2.0.1", "h2 0.3.24", "http 0.2.9", @@ -1166,7 +1166,7 @@ checksum = "9acb931e0adaf5132de878f1398d83f8677f90ba70f01f65ff87f6d7244be1c5" dependencies = [ "aws-smithy-async", "aws-smithy-types", - "bytes 1.5.0", + "bytes 1.6.0", "http 0.2.9", "http 1.0.0", "pin-project-lite", @@ -1182,7 +1182,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "abe14dceea1e70101d38fbf2a99e6a34159477c0fb95e68e05c66bd7ae4c3729" dependencies = [ "base64-simd", - "bytes 1.5.0", + "bytes 1.6.0", "bytes-utils", "http 0.2.9", "http-body 0.4.5", @@ -1228,7 +1228,7 @@ dependencies = [ "async-trait", "axum-core", "bitflags 1.3.2", - "bytes 1.5.0", + "bytes 1.6.0", "futures-util", "http 0.2.9", "http-body 0.4.5", @@ -1255,7 +1255,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c" dependencies = [ "async-trait", - "bytes 1.5.0", + "bytes 1.6.0", "futures-util", "http 0.2.9", "http-body 0.4.5", @@ -1273,7 +1273,7 @@ checksum = "4ccd63c07d1fbfb3d4543d7ea800941bf5a30db1911b9b9e4db3b2c4210a434f" dependencies = [ "async-trait", "base64 0.21.7", - "bytes 1.5.0", + "bytes 1.6.0", "dyn-clone", "futures 0.3.30", "getrandom 0.2.12", @@ -1322,7 +1322,7 @@ dependencies = [ "RustyXML", "async-trait", "azure_core", - "bytes 1.5.0", + "bytes 1.6.0", "futures 0.3.30", "hmac", "log", @@ -1344,7 +1344,7 @@ dependencies = [ "RustyXML", "azure_core", "azure_storage", - "bytes 1.5.0", + "bytes 1.6.0", "futures 0.3.30", "log", "serde", @@ -1545,7 +1545,7 @@ checksum = "0aed08d3adb6ebe0eff737115056652670ae290f177759aac19c30456135f94c" dependencies = [ "base64 0.22.0", "bollard-stubs", - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "futures-core", "futures-util", @@ -1702,9 +1702,9 @@ dependencies = [ [[package]] name = "bytes" -version = "1.5.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" +checksum = "514de17de45fdb8dc022b1a7975556c53c86f9f0aa5f534b98977b171857c2c9" dependencies = [ "serde", ] @@ -1715,7 +1715,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e47d3a8076e283f3acd27400535992edb3ba4b5bb72f8891ad8fbe7932a7d4b9" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "either", ] @@ -2073,7 +2073,7 @@ name = "codecs" version = "0.1.0" dependencies = [ "apache-avro", - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "csv-core", "derivative", @@ -2151,7 +2151,7 @@ version = "4.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "35ed6e9d84f0b51a7f52daf1c7d71dd136fd7a3f41a8462b8cdb8c78d920fad4" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "futures-core", "memchr", "pin-project-lite", @@ -3269,7 +3269,7 @@ name = "file-source" version = "0.1.0" dependencies = [ "bstr 1.9.1", - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "crc", "criterion", @@ -3751,7 +3751,7 @@ version = "0.3.24" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bb2c4422095b67ee78da96fbb51a4cc413b3b25883c7717ff7ca1ab31022c9c9" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "fnv", "futures-core", "futures-sink", @@ -3770,7 +3770,7 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "51ee2dd2e4f378392eeff5d51618cd9a63166a2513846bbc55f21cfacd9199d4" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "fnv", "futures-core", "futures-sink", @@ -3844,7 +3844,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" dependencies = [ "base64 0.21.7", - "bytes 1.5.0", + "bytes 1.6.0", "headers-core", "http 0.2.9", "httpdate", @@ -4095,7 +4095,7 @@ version = "0.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bd6effc99afb63425aff9b05836f029929e345a6148a14b7ecd5ab67af944482" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "fnv", "itoa", ] @@ -4106,7 +4106,7 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b32afd38673a8016f7c9ae69e5af41a58f81b1d31689040f2f1959594ce194ea" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "fnv", "itoa", ] @@ -4117,7 +4117,7 @@ version = "0.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5f38f16d184e36f2408a55281cd658ecbd3ca05cce6d6510a176eca393e26d1" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "http 0.2.9", "pin-project-lite", ] @@ -4128,7 +4128,7 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1cac85db508abc24a2e48553ba12a996e87244a0395ce011e62b37158745d643" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "http 1.0.0", ] @@ -4138,7 +4138,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "41cb79eb393015dadd30fc252023adb0b2400a0caee0fa2a077e6e21a551e840" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "futures-util", "http 1.0.0", "http-body 1.0.0", @@ -4206,7 +4206,7 @@ version = "0.14.28" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bf96e135eb83a2a8ddf766e426a841d8ddd7449d5f00d34ea02b41d2f19eef80" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "futures-channel", "futures-core", "futures-util", @@ -4230,7 +4230,7 @@ version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "186548d73ac615b32a73aafe38fb4f56c0d340e110e5a200bcadbaf2e199263a" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "futures-channel", "futures-util", "http 1.0.0", @@ -4282,7 +4282,7 @@ version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca815a891b24fdfb243fa3239c86154392b0953ee584aa1a2a1f66d20cbe75cc" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "futures 0.3.30", "headers", "http 0.2.9", @@ -4346,7 +4346,7 @@ version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d6183ddfa99b85da61a140bea0efc93fdf56ceaa041b37d553518030827f9905" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "hyper 0.14.28", "native-tls", "tokio", @@ -4359,7 +4359,7 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ca38ef113da30126bbff9cd1705f9273e15d45498615d138b0c20279ac7a76aa" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "futures-channel", "futures-util", "http 1.0.0", @@ -4734,7 +4734,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6d9455388f4977de4d0934efa9f7d36296295537d774574113a20f6082de03da" dependencies = [ "base64 0.13.1", - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "serde", "serde-value", @@ -4748,7 +4748,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cd990069640f9db34b3b0f7a1afc62a05ffaa3be9b66aa3c313f58346df7f788" dependencies = [ "base64 0.21.7", - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "http 0.2.9", "percent-encoding", @@ -4826,7 +4826,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "544339f1665488243f79080441cacb09c997746fd763342303e66eebb9d3ba13" dependencies = [ "base64 0.20.0", - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "dirs-next", "either", @@ -5073,7 +5073,7 @@ checksum = "879777f0cc6f3646a044de60e4ab98c75617e3f9580f7a2032e6ad7ea0cd3054" name = "loki-logproto" version = "0.1.0" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "prost 0.12.3", "prost-build 0.12.3", @@ -5473,7 +5473,7 @@ version = "3.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a15d522be0a9c3e46fd2632e272d178f56387bdb5c9fbb3a36c649062e9b5219" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "encoding_rs", "futures-util", "http 1.0.0", @@ -5988,7 +5988,7 @@ dependencies = [ "async-trait", "backon", "base64 0.21.7", - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "flagset", "futures 0.3.30", @@ -6096,7 +6096,7 @@ dependencies = [ name = "opentelemetry-proto" version = "0.1.0" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "hex", "ordered-float 4.2.0", @@ -6614,7 +6614,7 @@ checksum = "49b6c5ef183cd3ab4ba005f1ca64c21e8bd97ce4699cfea9e8d9a2c4958ca520" dependencies = [ "base64 0.21.7", "byteorder", - "bytes 1.5.0", + "bytes 1.6.0", "fallible-iterator", "hmac", "md-5", @@ -6630,7 +6630,7 @@ version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8d2234cdee9408b523530a9b6d2d6b373d1db34f6a8e51dc03ded1828d7fb67c" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "fallible-iterator", "postgres-protocol", @@ -6861,7 +6861,7 @@ version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b82eaa1d779e9a4bc1c3217db8ffbeabaae1dca241bf70183242128d48681cd" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "prost-derive 0.11.9", ] @@ -6871,7 +6871,7 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "146c289cda302b98a28d40c8b3b90498d6e526dd24ac2ecea73e4e491685b94a" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "prost-derive 0.12.3", ] @@ -6881,7 +6881,7 @@ version = "0.11.9" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "heck 0.4.1", "itertools 0.10.5", "lazy_static", @@ -6903,7 +6903,7 @@ version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c55e02e35260070b6f716a2423c2ff1c3bb1642ddca6f99e1f26d06268a0e2d2" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "heck 0.4.1", "itertools 0.11.0", "log", @@ -7020,7 +7020,7 @@ checksum = "5d21c6a837986cf25d22ac5b951c267d95808f3c830ff009c2879fff259a0268" dependencies = [ "async-trait", "bit-vec", - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "crc", "data-url", @@ -7379,7 +7379,7 @@ checksum = "c580d9cbbe1d1b479e8d67cf9daf6a62c957e6846048408b80b43ac3f6af84cd" dependencies = [ "arc-swap", "async-trait", - "bytes 1.5.0", + "bytes 1.6.0", "combine 4.6.6", "futures 0.3.30", "futures-util", @@ -7511,7 +7511,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "78bf93c4af7a8bb7d879d51cebe797356ff10ae8516ace542b5182d9dcac10b2" dependencies = [ "base64 0.21.7", - "bytes 1.5.0", + "bytes 1.6.0", "encoding_rs", "futures-core", "futures-util", @@ -7598,7 +7598,7 @@ checksum = "5cba464629b3394fc4dbc6f940ff8f5b4ff5c7aef40f29166fd4ad12acbc99c0" dependencies = [ "bitvec", "bytecheck", - "bytes 1.5.0", + "bytes 1.6.0", "hashbrown 0.12.3", "ptr_meta", "rend", @@ -7726,7 +7726,7 @@ version = "0.24.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e1568e15fab2d546f940ed3a21f48bbbd1c494c90c99c4481339364a497f94a9" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "flume 0.11.0", "futures-util", "log", @@ -7746,7 +7746,7 @@ checksum = "06676aec5ccb8fc1da723cc8c0f9a46549f21ebb8753d3915c6c41db1e7f1dc4" dependencies = [ "arrayvec", "borsh", - "bytes 1.5.0", + "bytes 1.6.0", "num-traits", "rand 0.8.5", "rkyv", @@ -9125,7 +9125,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" dependencies = [ "backtrace", - "bytes 1.5.0", + "bytes 1.6.0", "libc", "mio", "num_cpus", @@ -9200,7 +9200,7 @@ checksum = "d340244b32d920260ae7448cb72b6e238bddc3d4f7603394e7dd46ed8e48f5b8" dependencies = [ "async-trait", "byteorder", - "bytes 1.5.0", + "bytes 1.6.0", "fallible-iterator", "futures-channel", "futures-util", @@ -9269,7 +9269,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2468baabc3311435b55dd935f702f42cd1b8abb7e754fb7dfb16bd36aa88f9f7" dependencies = [ "async-stream", - "bytes 1.5.0", + "bytes 1.6.0", "futures-core", "tokio", "tokio-stream", @@ -9293,7 +9293,7 @@ name = "tokio-util" version = "0.7.8" source = "git+https://github.com/vectordotdev/tokio?branch=tokio-util-0.7.8-framed-read-continue-on-error#3747655f8f0443e13fe20da3f613ea65c23347c2" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "futures-core", "futures-io", "futures-sink", @@ -9369,7 +9369,7 @@ dependencies = [ "async-trait", "axum", "base64 0.21.7", - "bytes 1.5.0", + "bytes 1.6.0", "flate2", "h2 0.3.24", "http 0.2.9", @@ -9446,7 +9446,7 @@ dependencies = [ "async-compression", "base64 0.21.7", "bitflags 2.4.1", - "bytes 1.5.0", + "bytes 1.6.0", "futures-core", "futures-util", "http 0.2.9", @@ -9701,7 +9701,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" dependencies = [ "byteorder", - "bytes 1.5.0", + "bytes 1.6.0", "data-encoding", "http 0.2.9", "httparse", @@ -10056,7 +10056,7 @@ dependencies = [ "base64 0.22.0", "bloomy", "bollard", - "bytes 1.5.0", + "bytes 1.6.0", "bytesize", "chrono", "chrono-tz", @@ -10233,7 +10233,7 @@ dependencies = [ "async-stream", "async-trait", "bytecheck", - "bytes 1.5.0", + "bytes 1.6.0", "clap 4.5.4", "crc32fast", "criterion", @@ -10276,7 +10276,7 @@ name = "vector-common" version = "0.1.0" dependencies = [ "async-stream", - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "chrono-tz", "crossbeam-utils", @@ -10367,7 +10367,7 @@ dependencies = [ "async-trait", "base64 0.22.0", "bitmask-enum", - "bytes 1.5.0", + "bytes 1.6.0", "chrono", "chrono-tz", "criterion", @@ -10564,7 +10564,7 @@ dependencies = [ "arbitrary", "base16", "base64 0.22.0", - "bytes 1.5.0", + "bytes 1.6.0", "cbc", "cfb-mode", "cfg-if", @@ -10708,7 +10708,7 @@ version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c1e92e22e03ff1230c03a1a8ee37d2f89cd489e2e541b7550d6afad96faed169" dependencies = [ - "bytes 1.5.0", + "bytes 1.6.0", "futures-channel", "futures-util", "headers", diff --git a/Cargo.toml b/Cargo.toml index 576da94f37d84..92fa5c3f15522 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -260,7 +260,7 @@ axum = { version = "0.6.20", default-features = false } base64 = { version = "0.22.0", default-features = false, optional = true } bloomy = { version = "1.2.0", default-features = false, optional = true } bollard = { version = "0.16.1", default-features = false, features = ["ssl", "chrono"], optional = true } -bytes = { version = "1.5.0", default-features = false, features = ["serde"] } +bytes = { version = "1.6.0", default-features = false, features = ["serde"] } bytesize = { version = "1.3.0", default-features = false } chrono.workspace = true chrono-tz = { version = "0.8.6", default-features = false } diff --git a/lib/file-source/Cargo.toml b/lib/file-source/Cargo.toml index e7f21b6e6124a..4bb22d44889f1 100644 --- a/lib/file-source/Cargo.toml +++ b/lib/file-source/Cargo.toml @@ -24,7 +24,7 @@ default-features = false features = [] [dependencies.bytes] -version = "1.5.0" +version = "1.6.0" default-features = false features = [] diff --git a/lib/loki-logproto/Cargo.toml b/lib/loki-logproto/Cargo.toml index f7b8bb28ad29f..4f8887c8f617c 100644 --- a/lib/loki-logproto/Cargo.toml +++ b/lib/loki-logproto/Cargo.toml @@ -10,7 +10,7 @@ publish = false [dependencies] prost = { version = "0.12", default-features = false, features = ["std"] } prost-types = { version = "0.12", default-features = false, features = ["std"] } -bytes = { version = "1.5.0", default-features = false } +bytes = { version = "1.6.0", default-features = false } snap = { version = "1.1.1", default-features = false } [dev-dependencies] diff --git a/lib/opentelemetry-proto/Cargo.toml b/lib/opentelemetry-proto/Cargo.toml index e106bea46ad44..7877005e539bb 100644 --- a/lib/opentelemetry-proto/Cargo.toml +++ b/lib/opentelemetry-proto/Cargo.toml @@ -10,7 +10,7 @@ prost-build = { version = "0.12", default-features = false} tonic-build = { version = "0.10", default-features = false, features = ["prost", "transport"] } [dependencies] -bytes = { version = "1.5.0", default-features = false, features = ["serde"] } +bytes = { version = "1.6.0", default-features = false, features = ["serde"] } chrono.workspace = true hex = { version = "0.4.3", default-features = false, features = ["std"] } lookup = { package = "vector-lookup", path = "../vector-lookup", default-features = false } diff --git a/lib/vector-buffers/Cargo.toml b/lib/vector-buffers/Cargo.toml index 4b477d6932fe5..35e7076926a86 100644 --- a/lib/vector-buffers/Cargo.toml +++ b/lib/vector-buffers/Cargo.toml @@ -10,7 +10,7 @@ async-recursion = "1.1.0" async-stream = "0.3.5" async-trait = { version = "0.1", default-features = false } bytecheck = { version = "0.6.9", default-features = false, features = ["std"] } -bytes = { version = "1.5.0", default-features = false } +bytes = { version = "1.6.0", default-features = false } crc32fast = { version = "1.4.0", default-features = false } crossbeam-queue = { version = "0.3.11", default-features = false, features = ["std"] } crossbeam-utils = { version = "0.8.19", default-features = false } diff --git a/lib/vector-common/Cargo.toml b/lib/vector-common/Cargo.toml index 318e3b9d427df..6f6a6517aeddb 100644 --- a/lib/vector-common/Cargo.toml +++ b/lib/vector-common/Cargo.toml @@ -40,7 +40,7 @@ tokenize = [ [dependencies] async-stream = "0.3.5" -bytes = { version = "1.5.0", default-features = false, optional = true } +bytes = { version = "1.6.0", default-features = false, optional = true } chrono-tz = { version = "0.8.6", default-features = false, features = ["serde"] } chrono.workspace = true crossbeam-utils = { version = "0.8.19", default-features = false } diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index d8b3b95b3f545..d8c3c2435409c 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -9,7 +9,7 @@ publish = false async-graphql = { version = "7.0.3", default-features = false, features = ["playground" ], optional = true } async-trait = { version = "0.1", default-features = false } bitmask-enum = { version = "2.2.3", default-features = false } -bytes = { version = "1.5.0", default-features = false, features = ["serde"] } +bytes = { version = "1.6.0", default-features = false, features = ["serde"] } chrono.workspace = true crossbeam-utils = { version = "0.8.19", default-features = false } db-key = { version = "0.0.5", default-features = false, optional = true } From 173deda35dbf90377a82aacbc5cca273e0468e73 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 14:55:39 +0000 Subject: [PATCH 0213/1491] chore(deps): Bump serde_json from 1.0.114 to 1.0.115 (#20188) Bumps [serde_json](https://github.com/serde-rs/json) from 1.0.114 to 1.0.115. - [Release notes](https://github.com/serde-rs/json/releases) - [Commits](https://github.com/serde-rs/json/compare/v1.0.114...v1.0.115) --- updated-dependencies: - dependency-name: serde_json dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 26664e36f1c3e..e21cfc01a2a7d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -8181,9 +8181,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.114" +version = "1.0.115" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f09b1bd632ef549eaa9f60a1f8de742bdbc698e6cee2095fc84dde5f549ae0" +checksum = "12dc5c46daa8e9fdf4f5e71b6cf9a53f2487da0e86e55808e2d35539666497dd" dependencies = [ "indexmap 2.2.6", "itoa", diff --git a/Cargo.toml b/Cargo.toml index 92fa5c3f15522..6ab43a5df49e3 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -135,7 +135,7 @@ indexmap = { version = "2.2.6", default-features = false, features = ["serde", " pin-project = { version = "1.1.5", default-features = false } proptest = "1.4" proptest-derive = "0.4.0" -serde_json = { version = "1.0.114", default-features = false, features = ["raw_value", "std"] } +serde_json = { version = "1.0.115", default-features = false, features = ["raw_value", "std"] } serde = { version = "1.0.197", default-features = false, features = ["alloc", "derive", "rc"] } toml = { version = "0.8.12", default-features = false, features = ["display", "parse"] } vrl = { version = "0.13.0", features = ["arbitrary", "cli", "test", "test_framework"] } From 5ffb1a5557f1a257d841687b4d3ba48547dfd8d4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 14:55:42 +0000 Subject: [PATCH 0214/1491] chore(ci): Bump actions/add-to-project from 0.6.1 to 1.0.0 (#20194) Bumps [actions/add-to-project](https://github.com/actions/add-to-project) from 0.6.1 to 1.0.0. - [Release notes](https://github.com/actions/add-to-project/releases) - [Commits](https://github.com/actions/add-to-project/compare/v0.6.1...v1.0.0) --- updated-dependencies: - dependency-name: actions/add-to-project dependency-type: direct:production update-type: version-update:semver-major ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- .github/workflows/gardener_open_issue.yml | 2 +- .github/workflows/gardener_open_pr.yml | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/.github/workflows/gardener_open_issue.yml b/.github/workflows/gardener_open_issue.yml index ae9ba7f10635f..c613c6e80a629 100644 --- a/.github/workflows/gardener_open_issue.yml +++ b/.github/workflows/gardener_open_issue.yml @@ -12,7 +12,7 @@ jobs: runs-on: ubuntu-latest timeout-minutes: 5 steps: - - uses: actions/add-to-project@v0.6.1 + - uses: actions/add-to-project@v1.0.0 with: project-url: https://github.com/orgs/vectordotdev/projects/49 github-token: ${{ secrets.GH_PROJECT_PAT }} diff --git a/.github/workflows/gardener_open_pr.yml b/.github/workflows/gardener_open_pr.yml index 6079d1e5770f9..1887df5882985 100644 --- a/.github/workflows/gardener_open_pr.yml +++ b/.github/workflows/gardener_open_pr.yml @@ -26,7 +26,7 @@ jobs: username: ${{ github.actor }} team: vector GITHUB_TOKEN: ${{ steps.generate_token.outputs.token }} - - uses: actions/add-to-project@v0.6.1 + - uses: actions/add-to-project@v1.0.0 if: ${{ steps.checkVectorMember.outputs.isTeamMember == 'false' }} with: project-url: https://github.com/orgs/vectordotdev/projects/49 @@ -37,7 +37,7 @@ jobs: timeout-minutes: 5 if: ${{ github.actor == 'dependabot[bot]' }} steps: - - uses: actions/add-to-project@v0.6.1 + - uses: actions/add-to-project@v1.0.0 with: project-url: https://github.com/orgs/vectordotdev/projects/49 github-token: ${{ secrets.GH_PROJECT_PAT }} From 0599d60758386b631533adce250b4820f5434e59 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 14:56:54 +0000 Subject: [PATCH 0215/1491] chore(deps): Bump the zstd group with 1 update (#20199) Bumps the zstd group with 1 update: [zstd](https://github.com/gyscos/zstd-rs). Updates `zstd` from 0.13.0 to 0.13.1 - [Release notes](https://github.com/gyscos/zstd-rs/releases) - [Commits](https://github.com/gyscos/zstd-rs/compare/v0.13.0...v0.13.1) --- updated-dependencies: - dependency-name: zstd dependency-type: direct:production update-type: version-update:semver-patch dependency-group: zstd ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 22 +++++++++++----------- Cargo.toml | 4 ++-- 2 files changed, 13 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index e21cfc01a2a7d..1ab3ebdf5e344 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -368,8 +368,8 @@ dependencies = [ "memchr", "pin-project-lite", "tokio", - "zstd 0.13.0", - "zstd-safe 7.0.0", + "zstd 0.13.1", + "zstd-safe 7.1.0", ] [[package]] @@ -10202,7 +10202,7 @@ dependencies = [ "warp", "windows-service", "wiremock", - "zstd 0.13.0", + "zstd 0.13.1", ] [[package]] @@ -10639,7 +10639,7 @@ dependencies = [ "uuid", "webbrowser", "woothee", - "zstd 0.13.0", + "zstd 0.13.1", ] [[package]] @@ -11280,11 +11280,11 @@ dependencies = [ [[package]] name = "zstd" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bffb3309596d527cfcba7dfc6ed6052f1d39dfbd7c867aa2e865e4a449c10110" +checksum = "2d789b1514203a1120ad2429eae43a7bd32b90976a7bb8a05f7ec02fa88cc23a" dependencies = [ - "zstd-safe 7.0.0", + "zstd-safe 7.1.0", ] [[package]] @@ -11299,18 +11299,18 @@ dependencies = [ [[package]] name = "zstd-safe" -version = "7.0.0" +version = "7.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "43747c7422e2924c11144d5229878b98180ef8b06cca4ab5af37afc8a8d8ea3e" +checksum = "1cd99b45c6bc03a018c8b8a86025678c87e55526064e38f9df301989dce7ec0a" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.9+zstd.1.5.5" +version = "2.0.10+zstd.1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e16efa8a874a0481a574084d34cc26fdb3b99627480f785888deb6386506656" +checksum = "c253a4914af5bafc8fa8c86ee400827e83cf6ec01195ec1f1ed8441bf00d65aa" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index 6ab43a5df49e3..80dc5b7132464 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -341,7 +341,7 @@ typetag = { version = "0.2.16", default-features = false } url = { version = "2.5.0", default-features = false, features = ["serde"] } uuid = { version = "1", default-features = false, features = ["serde", "v4"] } warp = { version = "0.3.6", default-features = false } -zstd = { version = "0.13.0", default-features = false } +zstd = { version = "0.13.1", default-features = false } arr_macro = { version = "0.2.1" } # depending on fork for bumped nix dependency @@ -389,7 +389,7 @@ vector-lib = { path = "lib/vector-lib", default-features = false, features = ["v vrl.workspace = true wiremock = "0.5.22" -zstd = { version = "0.13.0", default-features = false } +zstd = { version = "0.13.1", default-features = false } [patch.crates-io] # The upgrade for `tokio-util` >= 0.6.9 is blocked on https://github.com/vectordotdev/vector/issues/11257. From ae5673367d3db51700b3c1b3264859f4f18cc2db Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 14:57:02 +0000 Subject: [PATCH 0216/1491] chore(deps): Bump memchr from 2.7.1 to 2.7.2 (#20200) Bumps [memchr](https://github.com/BurntSushi/memchr) from 2.7.1 to 2.7.2. - [Commits](https://github.com/BurntSushi/memchr/compare/2.7.1...2.7.2) --- updated-dependencies: - dependency-name: memchr dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1ab3ebdf5e344..943687b97f956 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -5232,9 +5232,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.7.1" +version = "2.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "523dc4f511e55ab87b694dc30d0f820d60906ef06413f93d4d7a1385599cc149" +checksum = "6c8640c5d730cb13ebd907d8d04b52f55ac9a2eec55b440c8892f40d56c76c1d" [[package]] name = "memmap2" From be56139a9c662ac09bd294f801b93073cccb0754 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 14:57:12 +0000 Subject: [PATCH 0217/1491] chore(deps): Bump enum_dispatch from 0.3.12 to 0.3.13 (#20207) Bumps [enum_dispatch](https://gitlab.com/antonok/enum_dispatch) from 0.3.12 to 0.3.13. - [Changelog](https://gitlab.com/antonok/enum_dispatch/blob/master/CHANGELOG.md) - [Commits](https://gitlab.com/antonok/enum_dispatch/compare/v0.3.12...v0.3.13) --- updated-dependencies: - dependency-name: enum_dispatch dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 943687b97f956..b41faa537a2da 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3065,9 +3065,9 @@ dependencies = [ [[package]] name = "enum_dispatch" -version = "0.3.12" +version = "0.3.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f33313078bb8d4d05a2733a94ac4c2d8a0df9a2b84424ebf4f33bfc224a890e" +checksum = "aa18ce2bc66555b3218614519ac839ddb759a7d6720732f979ef8d13be147ecd" dependencies = [ "once_cell", "proc-macro2 1.0.79", diff --git a/Cargo.toml b/Cargo.toml index 80dc5b7132464..27288b105e362 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -271,7 +271,7 @@ derivative = { version = "2.2.0", default-features = false } dirs-next = { version = "2.0.0", default-features = false, optional = true } dyn-clone = { version = "1.0.17", default-features = false } encoding_rs = { version = "0.8.33", default-features = false, features = ["serde"] } -enum_dispatch = { version = "0.3.12", default-features = false } +enum_dispatch = { version = "0.3.13", default-features = false } exitcode = { version = "1.1.2", default-features = false } flate2 = { version = "1.0.28", default-features = false, features = ["default"] } futures-util = { version = "0.3.29", default-features = false } From 4b4068fa3d0a1372b3d6a90c4b5ae73da3fc0f02 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 14:57:23 +0000 Subject: [PATCH 0218/1491] chore(deps): Bump tokio from 1.36.0 to 1.37.0 (#20208) Bumps [tokio](https://github.com/tokio-rs/tokio) from 1.36.0 to 1.37.0. - [Release notes](https://github.com/tokio-rs/tokio/releases) - [Commits](https://github.com/tokio-rs/tokio/compare/tokio-1.36.0...tokio-1.37.0) --- updated-dependencies: - dependency-name: tokio dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 4 ++-- lib/file-source/Cargo.toml | 2 +- lib/k8s-e2e-tests/Cargo.toml | 2 +- lib/k8s-test-framework/Cargo.toml | 2 +- lib/vector-api-client/Cargo.toml | 2 +- lib/vector-buffers/Cargo.toml | 2 +- lib/vector-common/Cargo.toml | 4 ++-- lib/vector-core/Cargo.toml | 2 +- lib/vector-stream/Cargo.toml | 2 +- 10 files changed, 13 insertions(+), 13 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index b41faa537a2da..07fb81c7f9cb0 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -9120,9 +9120,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.36.0" +version = "1.37.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" +checksum = "1adbebffeca75fcfd058afa480fb6c0b81e165a0323f9c9d39c9697e37c46787" dependencies = [ "backtrace", "bytes 1.6.0", diff --git a/Cargo.toml b/Cargo.toml index 27288b105e362..f843a0fffc33d 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -158,7 +158,7 @@ loki-logproto = { path = "lib/loki-logproto", optional = true } async-stream = { version = "0.3.5", default-features = false } async-trait = { version = "0.1.79", default-features = false } futures = { version = "0.3.30", default-features = false, features = ["compat", "io-compat"], package = "futures" } -tokio = { version = "1.36.0", default-features = false, features = ["full"] } +tokio = { version = "1.37.0", default-features = false, features = ["full"] } tokio-openssl = { version = "0.6.4", default-features = false } tokio-stream = { version = "0.1.15", default-features = false, features = ["net", "sync", "time"] } tokio-util = { version = "0.7", default-features = false, features = ["io", "time"] } @@ -382,7 +382,7 @@ reqwest = { version = "0.11", features = ["json"] } rstest = {version = "0.18.2"} tempfile = "3.10.1" test-generator = "0.3.1" -tokio = { version = "1.36.0", features = ["test-util"] } +tokio = { version = "1.37.0", features = ["test-util"] } tokio-test = "0.4.4" tower-test = "0.4.0" vector-lib = { path = "lib/vector-lib", default-features = false, features = ["vrl", "test"] } diff --git a/lib/file-source/Cargo.toml b/lib/file-source/Cargo.toml index 4bb22d44889f1..df919bae7b19a 100644 --- a/lib/file-source/Cargo.toml +++ b/lib/file-source/Cargo.toml @@ -69,7 +69,7 @@ default-features = false features = [] [dependencies.tokio] -version = "1.36.0" +version = "1.37.0" default-features = false features = ["full"] diff --git a/lib/k8s-e2e-tests/Cargo.toml b/lib/k8s-e2e-tests/Cargo.toml index d9e389312a8e0..7efad957d2cd0 100644 --- a/lib/k8s-e2e-tests/Cargo.toml +++ b/lib/k8s-e2e-tests/Cargo.toml @@ -14,7 +14,7 @@ k8s-test-framework = { version = "0.1", path = "../k8s-test-framework" } regex = "1" reqwest = { version = "0.11.26", features = ["json"] } serde_json.workspace = true -tokio = { version = "1.36.0", features = ["full"] } +tokio = { version = "1.37.0", features = ["full"] } indoc = "2.0.5" env_logger = "0.10" tracing = { version = "0.1", features = ["log"] } diff --git a/lib/k8s-test-framework/Cargo.toml b/lib/k8s-test-framework/Cargo.toml index eabd09b063c90..18154156896e9 100644 --- a/lib/k8s-test-framework/Cargo.toml +++ b/lib/k8s-test-framework/Cargo.toml @@ -11,5 +11,5 @@ license = "MPL-2.0" k8s-openapi = { version = "0.16.0", default-features = false, features = ["v1_19"] } serde_json.workspace = true tempfile = "3" -tokio = { version = "1.36.0", features = ["full"] } +tokio = { version = "1.37.0", features = ["full"] } log = "0.4" diff --git a/lib/vector-api-client/Cargo.toml b/lib/vector-api-client/Cargo.toml index 095cd5a512f34..b7a845a19c1b7 100644 --- a/lib/vector-api-client/Cargo.toml +++ b/lib/vector-api-client/Cargo.toml @@ -17,7 +17,7 @@ anyhow = { version = "1.0.81", default-features = false, features = ["std"] } # Tokio / Futures futures = { version = "0.3", default-features = false, features = ["compat", "io-compat"] } -tokio = { version = "1.36.0", default-features = false, features = ["macros", "rt", "sync"] } +tokio = { version = "1.37.0", default-features = false, features = ["macros", "rt", "sync"] } tokio-stream = { version = "0.1.15", default-features = false, features = ["sync"] } # GraphQL diff --git a/lib/vector-buffers/Cargo.toml b/lib/vector-buffers/Cargo.toml index 35e7076926a86..f496343e95f84 100644 --- a/lib/vector-buffers/Cargo.toml +++ b/lib/vector-buffers/Cargo.toml @@ -26,7 +26,7 @@ rkyv = { version = "0.7.44", default-features = false, features = ["size_32", "s serde.workspace = true snafu = { version = "0.7.5", default-features = false, features = ["std"] } tokio-util = { version = "0.7.0", default-features = false } -tokio = { version = "1.36.0", default-features = false, features = ["rt", "macros", "rt-multi-thread", "sync", "fs", "io-util", "time"] } +tokio = { version = "1.37.0", default-features = false, features = ["rt", "macros", "rt-multi-thread", "sync", "fs", "io-util", "time"] } tracing = { version = "0.1.34", default-features = false, features = ["attributes"] } vector-config = { path = "../vector-config", default-features = false } vector-config-common = { path = "../vector-config-common", default-features = false } diff --git a/lib/vector-common/Cargo.toml b/lib/vector-common/Cargo.toml index 6f6a6517aeddb..280503dfc9753 100644 --- a/lib/vector-common/Cargo.toml +++ b/lib/vector-common/Cargo.toml @@ -58,7 +58,7 @@ serde_json.workspace = true smallvec = { version = "1", default-features = false } snafu = { version = "0.7", optional = true } stream-cancel = { version = "0.8.2", default-features = false } -tokio = { version = "1.36.0", default-features = false, features = ["macros", "time"] } +tokio = { version = "1.37.0", default-features = false, features = ["macros", "time"] } tracing = { version = "0.1.34", default-features = false } vrl.workspace = true vector-config = { path = "../vector-config" } @@ -67,6 +67,6 @@ vector-config-macros = { path = "../vector-config-macros" } [dev-dependencies] futures = { version = "0.3.30", default-features = false, features = ["async-await", "std"] } -tokio = { version = "1.36.0", default-features = false, features = ["rt", "time"] } +tokio = { version = "1.37.0", default-features = false, features = ["rt", "time"] } quickcheck = "1" quickcheck_macros = "1" diff --git a/lib/vector-core/Cargo.toml b/lib/vector-core/Cargo.toml index d8c3c2435409c..4c0402f48a15f 100644 --- a/lib/vector-core/Cargo.toml +++ b/lib/vector-core/Cargo.toml @@ -47,7 +47,7 @@ serde_with = { version = "3.7.0", default-features = false, features = ["std", " smallvec = { version = "1", default-features = false, features = ["serde", "const_generics"] } snafu = { version = "0.7.5", default-features = false } socket2 = { version = "0.5.6", default-features = false } -tokio = { version = "1.36.0", default-features = false, features = ["net"] } +tokio = { version = "1.37.0", default-features = false, features = ["net"] } tokio-openssl = { version = "0.6.4", default-features = false } tokio-stream = { version = "0.1", default-features = false, features = ["time"], optional = true } tokio-util = { version = "0.7.0", default-features = false, features = ["time"] } diff --git a/lib/vector-stream/Cargo.toml b/lib/vector-stream/Cargo.toml index 0f132cf16c93f..ca01a7e0aa55c 100644 --- a/lib/vector-stream/Cargo.toml +++ b/lib/vector-stream/Cargo.toml @@ -10,7 +10,7 @@ async-stream = { version = "0.3.5", default-features = false } futures = { version = "0.3.30", default-features = false, features = ["std"] } futures-util = { version = "0.3.29", default-features = false, features = ["std"] } pin-project.workspace = true -tokio = { version = "1.36.0", default-features = false, features = ["net"] } +tokio = { version = "1.37.0", default-features = false, features = ["net"] } tokio-util = { version = "0.7.0", default-features = false, features = ["time"] } tower = { version = "0.4", default-features = false, features = ["util"] } tracing = { version = "0.1.34", default-features = false } From 333ed14f71c1acaaeb0936f4a3cdefd9e89518f9 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 14:57:39 +0000 Subject: [PATCH 0219/1491] chore(deps): Bump syn from 2.0.55 to 2.0.57 (#20219) Bumps [syn](https://github.com/dtolnay/syn) from 2.0.55 to 2.0.57. - [Release notes](https://github.com/dtolnay/syn/releases) - [Commits](https://github.com/dtolnay/syn/compare/2.0.55...2.0.57) --- updated-dependencies: - dependency-name: syn dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 94 +++++++++++++++++++++++++++--------------------------- 1 file changed, 47 insertions(+), 47 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 07fb81c7f9cb0..4b466f469af8f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -468,7 +468,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "strum 0.26.1", - "syn 2.0.55", + "syn 2.0.57", "thiserror", ] @@ -651,7 +651,7 @@ checksum = "30c5ef0ede93efbf733c1a727f3b6b5a1060bbedd5600183e66f6e4be4af0ec5" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -691,7 +691,7 @@ checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -708,7 +708,7 @@ checksum = "a507401cad91ec6a857ed5513a2073c82a9b9048762b885bb98655b306964681" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -1479,7 +1479,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9990737a6d5740ff51cdbbc0f0503015cb30c390f6623968281eb214a520cfc0" dependencies = [ "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -1609,7 +1609,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", "syn_derive", ] @@ -2041,7 +2041,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -2569,7 +2569,7 @@ checksum = "f46882e17999c6cc590af592290432be3bce0428cb0d5f8b6715e4dc7b383eb3" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -2641,7 +2641,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "strsim 0.10.0", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -2674,7 +2674,7 @@ checksum = "a668eda54683121533a393014d8692171709ff57a7d61f187b6e782719f8933f" dependencies = [ "darling_core 0.20.8", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -2779,7 +2779,7 @@ checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -3060,7 +3060,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -3072,7 +3072,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -3092,7 +3092,7 @@ checksum = "5c785274071b1b420972453b306eeca06acf4633829db4223b58a2a8c5953bc4" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -3494,7 +3494,7 @@ checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -5291,7 +5291,7 @@ checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -5411,7 +5411,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "regex", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -5861,7 +5861,7 @@ dependencies = [ "proc-macro-crate 1.3.1", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -5873,7 +5873,7 @@ dependencies = [ "proc-macro-crate 2.0.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -6061,7 +6061,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -6346,7 +6346,7 @@ dependencies = [ "pest_meta", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -6434,7 +6434,7 @@ checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -6711,7 +6711,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ "proc-macro2 1.0.79", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -6914,7 +6914,7 @@ dependencies = [ "prost 0.12.3", "prost-types 0.12.3", "regex", - "syn 2.0.55", + "syn 2.0.57", "tempfile", "which 4.4.2", ] @@ -6942,7 +6942,7 @@ dependencies = [ "itertools 0.11.0", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -7716,7 +7716,7 @@ dependencies = [ "regex", "relative-path", "rustc_version 0.4.0", - "syn 2.0.55", + "syn 2.0.57", "unicode-ident", ] @@ -8165,7 +8165,7 @@ checksum = "7eb0b34b42edc17f6b7cac84a52a1c5f0e1bb2227e997ca9011ea3dd34e8610b" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -8176,7 +8176,7 @@ checksum = "330f01ce65a3a5fe59a60c82f3c9a024b573b8a6e875bd233fe5f934e71d54e3" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -8238,7 +8238,7 @@ checksum = "3081f5ffbb02284dda55132aa26daecedd7372a42417bbbab6f14ab7d6bb9145" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -8311,7 +8311,7 @@ dependencies = [ "darling 0.20.8", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -8590,7 +8590,7 @@ dependencies = [ "heck 0.4.1", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -8777,7 +8777,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "rustversion", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -8790,7 +8790,7 @@ dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", "rustversion", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -8833,9 +8833,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.55" +version = "2.0.57" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "002a1b3dbf967edfafc32655d0f377ab0bb7b994aa1d32c8cc7e9b8bf3ebb8f0" +checksum = "11a6ae1e52eb25aab8f3fb9fca13be982a373b8f1157ca14b897a825ba4a2d35" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", @@ -8851,7 +8851,7 @@ dependencies = [ "proc-macro-error", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -9019,7 +9019,7 @@ checksum = "c61f3ba182994efc43764a46c018c347bc492c79f024e705f46567b418f6d4f7" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -9167,7 +9167,7 @@ checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -9414,7 +9414,7 @@ dependencies = [ "proc-macro2 1.0.79", "prost-build 0.12.3", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -9517,7 +9517,7 @@ checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -9751,7 +9751,7 @@ checksum = "f03ca4cb38206e2bef0700092660bb74d696f808514dae47fa1467cbfe26e96e" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -9781,7 +9781,7 @@ checksum = "ac73887f47b9312552aa90ef477927ff014d63d1920ca8037c6c1951eab64bb1" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] @@ -10341,7 +10341,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_json", - "syn 2.0.55", + "syn 2.0.57", "tracing 0.1.40", ] @@ -10354,7 +10354,7 @@ dependencies = [ "quote 1.0.35", "serde", "serde_derive_internals", - "syn 2.0.55", + "syn 2.0.57", "vector-config", "vector-config-common", ] @@ -10771,7 +10771,7 @@ dependencies = [ "once_cell", "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", "wasm-bindgen-shared", ] @@ -10805,7 +10805,7 @@ checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -11260,7 +11260,7 @@ checksum = "b3c129550b3e6de3fd0ba67ba5c81818f9805e58b8d7fee80a3a59d2c9fc601a" dependencies = [ "proc-macro2 1.0.79", "quote 1.0.35", - "syn 2.0.55", + "syn 2.0.57", ] [[package]] From 7d705e0c1045f062e80bd2fb5215c0911bf3bf7d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Michael=20=E3=82=B5=E3=82=A4=E3=83=88=E3=83=BC=20=E4=B8=AD?= =?UTF-8?q?=E6=9D=91=20Bashurov?= Date: Mon, 1 Apr 2024 18:34:02 +0300 Subject: [PATCH 0220/1491] docs: fix type cardinality docs (#20209) --- src/transforms/tag_cardinality_limit/config.rs | 2 +- .../transforms/base/tag_cardinality_limit.cue | 2 +- .../components/transforms/tag_cardinality_limit.cue | 12 +++++------- 3 files changed, 7 insertions(+), 9 deletions(-) diff --git a/src/transforms/tag_cardinality_limit/config.rs b/src/transforms/tag_cardinality_limit/config.rs index 84ae1fbee2b14..6e52396656427 100644 --- a/src/transforms/tag_cardinality_limit/config.rs +++ b/src/transforms/tag_cardinality_limit/config.rs @@ -47,7 +47,7 @@ pub enum Mode { /// This mode has lower memory requirements than `exact`, but may occasionally allow metric /// events to pass through the transform even when they contain new tags that exceed the /// configured limit. The rate at which this happens can be controlled by changing the value of - /// `cache_size_per_tag`. + /// `cache_size_per_key`. Probabilistic(BloomFilterConfig), } diff --git a/website/cue/reference/components/transforms/base/tag_cardinality_limit.cue b/website/cue/reference/components/transforms/base/tag_cardinality_limit.cue index 916778a9af807..8e961d886ba92 100644 --- a/website/cue/reference/components/transforms/base/tag_cardinality_limit.cue +++ b/website/cue/reference/components/transforms/base/tag_cardinality_limit.cue @@ -42,7 +42,7 @@ base: components: transforms: tag_cardinality_limit: configuration: { This mode has lower memory requirements than `exact`, but may occasionally allow metric events to pass through the transform even when they contain new tags that exceed the configured limit. The rate at which this happens can be controlled by changing the value of - `cache_size_per_tag`. + `cache_size_per_key`. """ } } diff --git a/website/cue/reference/components/transforms/tag_cardinality_limit.cue b/website/cue/reference/components/transforms/tag_cardinality_limit.cue index a00867f11c97a..4535f2d248051 100644 --- a/website/cue/reference/components/transforms/tag_cardinality_limit.cue +++ b/website/cue/reference/components/transforms/tag_cardinality_limit.cue @@ -61,10 +61,8 @@ components: transforms: tag_cardinality_limit: { because it exceeded the `value_limit`. """ configuration: { - fields: { - value_limit: 1 - limit_exceeded_action: "drop_tag" - } + value_limit: 1 + limit_exceeded_action: "drop_tag" } input: [ {metric: { @@ -150,15 +148,15 @@ components: transforms: tag_cardinality_limit: { ```text (number of distinct field names in the tags for your metrics * average length of the field names for the tags) + (number of distinct field names in the tags of - -your metrics * `cache_size_per_tag`) + -your metrics * `cache_size_per_key`) ``` - The `cache_size_per_tag` option controls the size of the bloom filter used + The `cache_size_per_key` option controls the size of the bloom filter used for storing the set of acceptable values for any single key. The larger the bloom filter the lower the false positive rate, which in our case means the less likely we are to allow a new tag value that would otherwise violate a configured limit. If you want to know the exact false positive rate for a given - `cache_size_per_tag` and `value_limit`, there are many free on-line bloom filter + `cache_size_per_key` and `value_limit`, there are many free on-line bloom filter calculators that can answer this. The formula is generally presented in terms of 'n', 'p', 'k', and 'm' where 'n' is the number of items in the filter (`value_limit` in our case), 'p' is the probability of false positives (what we From f2f16b61825be20c9d9e15328d33880c6addaa9a Mon Sep 17 00:00:00 2001 From: Dylan Werner-Meier Date: Mon, 1 Apr 2024 17:37:14 +0200 Subject: [PATCH 0221/1491] docs(kubernetes platform): fix example kustomization file (#20085) Update kubernetes.md Fix typo. kustomize rejects this base URL with an error message. Use the github format instead --- .../content/en/docs/setup/installation/platforms/kubernetes.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/website/content/en/docs/setup/installation/platforms/kubernetes.md b/website/content/en/docs/setup/installation/platforms/kubernetes.md index d52137b5bd5d9..998aa0d6a135d 100644 --- a/website/content/en/docs/setup/installation/platforms/kubernetes.md +++ b/website/content/en/docs/setup/installation/platforms/kubernetes.md @@ -53,7 +53,7 @@ namespace: vector bases: # Include Vector recommended base (from git). - - github.com/vectordotdev/vector/tree/master/distribution/kubernetes/vector-agent + - github.com/vectordotdev/vector/distribution/kubernetes/vector-agent?ref=master images: # Override the Vector image to pin the version used. From 9e7f6e3a7b095bcea285bd02740c5bc5605e0f08 Mon Sep 17 00:00:00 2001 From: neuronull Date: Mon, 1 Apr 2024 12:50:44 -0600 Subject: [PATCH 0222/1491] fix(splunk_hec_logs sink): don't attempt to remove timestamp if auto extract is enabled (#20213) * fix(splunk_hec_logs sink): don't attempt to remove timestamp if auto extract is enabled * typo * fix integration test expectation * spellcheck --- .../splunk_hec_logs_auto_extract_ts.fix.md | 3 + src/sinks/splunk_hec/logs/config.rs | 1 + .../splunk_hec/logs/integration_tests.rs | 10 +++- src/sinks/splunk_hec/logs/sink.rs | 55 ++++++++++--------- src/sinks/splunk_hec/logs/tests.rs | 36 ++++++++++-- 5 files changed, 73 insertions(+), 32 deletions(-) create mode 100644 changelog.d/splunk_hec_logs_auto_extract_ts.fix.md diff --git a/changelog.d/splunk_hec_logs_auto_extract_ts.fix.md b/changelog.d/splunk_hec_logs_auto_extract_ts.fix.md new file mode 100644 index 0000000000000..6ec0df5afa6a5 --- /dev/null +++ b/changelog.d/splunk_hec_logs_auto_extract_ts.fix.md @@ -0,0 +1,3 @@ +Previously, when the `auto_extract_timestamp` setting in the `splunk_hec_logs` Sink was enabled, the sink was attempting to remove the existing event timestamp. This would throw a warning that the timestamp type was invalid. + +This has been fixed to correctly not attempt to remove the timestamp from the event if `auto_extract_timestamp` is enabled, since this setting indicates that Vector should let Splunk do that. diff --git a/src/sinks/splunk_hec/logs/config.rs b/src/sinks/splunk_hec/logs/config.rs index 31a6d2b5d7931..67921b3eb790f 100644 --- a/src/sinks/splunk_hec/logs/config.rs +++ b/src/sinks/splunk_hec/logs/config.rs @@ -274,6 +274,7 @@ impl HecLogsSinkConfig { timestamp_nanos_key: self.timestamp_nanos_key.clone(), timestamp_key: self.timestamp_key.path.clone(), endpoint_target: self.endpoint_target, + auto_extract_timestamp: self.auto_extract_timestamp.unwrap_or_default(), }; Ok(VectorSink::from_event_streamsink(sink)) diff --git a/src/sinks/splunk_hec/logs/integration_tests.rs b/src/sinks/splunk_hec/logs/integration_tests.rs index fb47f68052755..63523683bcb7c 100644 --- a/src/sinks/splunk_hec/logs/integration_tests.rs +++ b/src/sinks/splunk_hec/logs/integration_tests.rs @@ -519,8 +519,16 @@ async fn splunk_auto_extracted_timestamp() { let entry = find_entry(&message).await; + // we should not have removed the timestamp from the event in this case, because that only + // happens when we set the `_time` field in the HEC metadata, which we do by extracting the + // timestamp from the event data in vector. Instead, when auto_extract_timestamp is true, + // Splunk will determine the timestamp from the *message* field in the event. + // Thus, we expect the `timestamp` field to still be present. assert_eq!( - format!("{{\"message\":\"{}\"}}", message), + format!( + "{{\"message\":\"{}\",\"timestamp\":\"2020-03-05T00:00:00Z\"}}", + message + ), entry["_raw"].as_str().unwrap() ); assert_eq!( diff --git a/src/sinks/splunk_hec/logs/sink.rs b/src/sinks/splunk_hec/logs/sink.rs index 898b4b5d9cb66..b2eaa4eec8e25 100644 --- a/src/sinks/splunk_hec/logs/sink.rs +++ b/src/sinks/splunk_hec/logs/sink.rs @@ -28,6 +28,7 @@ pub struct HecLogsSink { pub timestamp_nanos_key: Option, pub timestamp_key: Option, pub endpoint_target: EndpointTarget, + pub auto_extract_timestamp: bool, } pub struct HecLogData<'a> { @@ -39,6 +40,7 @@ pub struct HecLogData<'a> { pub timestamp_nanos_key: Option<&'a String>, pub timestamp_key: Option, pub endpoint_target: EndpointTarget, + pub auto_extract_timestamp: bool, } impl HecLogsSink @@ -58,6 +60,7 @@ where timestamp_nanos_key: self.timestamp_nanos_key.as_ref(), timestamp_key: self.timestamp_key.clone(), endpoint_target: self.endpoint_target, + auto_extract_timestamp: self.auto_extract_timestamp, }; let batch_settings = self.batch_settings; @@ -233,34 +236,34 @@ pub fn process_log(event: Event, data: &HecLogData) -> HecProcessedEvent { let host = data.host_key.as_ref().and_then(|key| log.get(key)).cloned(); - let timestamp = match data.endpoint_target { - EndpointTarget::Event => { - data.timestamp_key.as_ref().and_then(|timestamp_key| { - match log.remove(timestamp_key) { - Some(Value::Timestamp(ts)) => { - // set nanos in log if valid timestamp in event and timestamp_nanos_key is configured - if let Some(key) = data.timestamp_nanos_key { - log.try_insert( - event_path!(key), - ts.timestamp_subsec_nanos() % 1_000_000, - ); - } - Some((ts.timestamp_millis() as f64) / 1000f64) - } - Some(value) => { - emit!(SplunkEventTimestampInvalidType { - r#type: value.kind_str() - }); - None - } - None => { - emit!(SplunkEventTimestampMissing {}); - None + // only extract the timestamp if this is the Event endpoint, and if the setting + // `auto_extract_timestamp` is false (because that indicates that we should leave + // the timestamp in the event as-is, and let Splunk do the extraction). + let timestamp = if EndpointTarget::Event == data.endpoint_target && !data.auto_extract_timestamp + { + data.timestamp_key.as_ref().and_then(|timestamp_key| { + match log.remove(timestamp_key) { + Some(Value::Timestamp(ts)) => { + // set nanos in log if valid timestamp in event and timestamp_nanos_key is configured + if let Some(key) = data.timestamp_nanos_key { + log.try_insert(event_path!(key), ts.timestamp_subsec_nanos() % 1_000_000); } + Some((ts.timestamp_millis() as f64) / 1000f64) } - }) - } - EndpointTarget::Raw => None, + Some(value) => { + emit!(SplunkEventTimestampInvalidType { + r#type: value.kind_str() + }); + None + } + None => { + emit!(SplunkEventTimestampMissing {}); + None + } + } + }) + } else { + None }; let fields = data diff --git a/src/sinks/splunk_hec/logs/tests.rs b/src/sinks/splunk_hec/logs/tests.rs index d78b0cc6f4e0a..d584601f21a02 100644 --- a/src/sinks/splunk_hec/logs/tests.rs +++ b/src/sinks/splunk_hec/logs/tests.rs @@ -53,6 +53,7 @@ struct HecEventText { fn get_processed_event_timestamp( timestamp: Option, timestamp_key: Option, + auto_extract_timestamp: bool, ) -> HecProcessedEvent { let mut event = Event::Log(LogEvent::from("hello world")); event @@ -94,6 +95,7 @@ fn get_processed_event_timestamp( timestamp_nanos_key: timestamp_nanos_key.as_ref(), timestamp_key, endpoint_target: EndpointTarget::Event, + auto_extract_timestamp, }, ) } @@ -104,6 +106,7 @@ fn get_processed_event() -> HecProcessedEvent { Utc.timestamp_nanos(1638366107111456123), )), config_timestamp_key_target_path().path, + false, ) } @@ -261,8 +264,10 @@ fn splunk_encode_log_event_json_timestamps() { fn get_hec_data_for_timestamp_test( timestamp: Option, timestamp_key: Option, + auto_extract_timestamp: bool, ) -> HecEventJson { - let processed_event = get_processed_event_timestamp(timestamp, timestamp_key); + let processed_event = + get_processed_event_timestamp(timestamp, timestamp_key, auto_extract_timestamp); let encoder = hec_encoder(JsonSerializerConfig::default().into()); let mut bytes = Vec::new(); encoder @@ -271,20 +276,41 @@ fn splunk_encode_log_event_json_timestamps() { serde_json::from_slice::(&bytes).unwrap() } - let timestamp = OwnedTargetPath::event(owned_value_path!("timestamp")); + let timestamp_key = OwnedTargetPath::event(owned_value_path!("timestamp")); + let dont_auto_extract = false; + let do_auto_extract = true; // no timestamp_key is provided - let mut hec_data = get_hec_data_for_timestamp_test(None, None); + let mut hec_data = get_hec_data_for_timestamp_test(None, None, dont_auto_extract); assert_eq!(hec_data.time, None); // timestamp_key is provided but timestamp is not valid type hec_data = get_hec_data_for_timestamp_test( Some(vrl::value::Value::Integer(0)), - Some(timestamp.clone()), + Some(timestamp_key.clone()), + false, ); assert_eq!(hec_data.time, None); // timestamp_key is provided but no timestamp in the event - let hec_data = get_hec_data_for_timestamp_test(None, Some(timestamp)); + hec_data = + get_hec_data_for_timestamp_test(None, Some(timestamp_key.clone()), dont_auto_extract); + assert_eq!(hec_data.time, None); + + // timestamp_key is provided and timestamp is valid + hec_data = get_hec_data_for_timestamp_test( + Some(Value::Timestamp(Utc::now())), + Some(timestamp_key.clone()), + dont_auto_extract, + ); + + assert!(hec_data.time.is_some()); + + // timestamp_key is provided and timestamp is valid, but auto_extract_timestamp is set + hec_data = get_hec_data_for_timestamp_test( + Some(Value::Timestamp(Utc::now())), + Some(timestamp_key.clone()), + do_auto_extract, + ); assert_eq!(hec_data.time, None); } From 044e29daf4b57832481224703c1c6e085c05da80 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 1 Apr 2024 19:47:25 +0000 Subject: [PATCH 0223/1491] chore(deps): Bump chrono from 0.4.34 to 0.4.37 (#20195) * chore(deps): Bump chrono from 0.4.34 to 0.4.37 Bumps [chrono](https://github.com/chronotope/chrono) from 0.4.34 to 0.4.37. - [Release notes](https://github.com/chronotope/chrono/releases) - [Changelog](https://github.com/chronotope/chrono/blob/main/CHANGELOG.md) - [Commits](https://github.com/chronotope/chrono/compare/v0.4.34...v0.4.37) --- updated-dependencies: - dependency-name: chrono dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] * clippy Signed-off-by: Jesse Szwedko --------- Signed-off-by: dependabot[bot] Signed-off-by: Jesse Szwedko Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jesse Szwedko --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- lib/codecs/src/decoding/format/gelf.rs | 11 +++++------ lib/codecs/src/encoding/format/protobuf.rs | 7 ++----- lib/vector-core/src/event/test/common.rs | 6 ++---- src/sources/gcp_pubsub.rs | 10 +++------- 6 files changed, 15 insertions(+), 25 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4b466f469af8f..0f2ff8f17e727 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1871,9 +1871,9 @@ dependencies = [ [[package]] name = "chrono" -version = "0.4.34" +version = "0.4.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5bc015644b92d5890fab7489e49d21f879d5c990186827d42ec511919404f38b" +checksum = "8a0d04d43504c61aa6c7531f1871dd0d418d91130162063b789da00fd7057a5e" dependencies = [ "android-tzdata", "iana-time-zone", diff --git a/Cargo.toml b/Cargo.toml index f843a0fffc33d..b494014b5494a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -129,7 +129,7 @@ members = [ ] [workspace.dependencies] -chrono = { version = "0.4.34", default-features = false, features = ["clock", "serde"] } +chrono = { version = "0.4.37", default-features = false, features = ["clock", "serde"] } clap = { version = "4.5.4", default-features = false, features = ["derive", "error-context", "env", "help", "std", "string", "usage", "wrap_help"] } indexmap = { version = "2.2.6", default-features = false, features = ["serde", "std"] } pin-project = { version = "1.1.5", default-features = false } diff --git a/lib/codecs/src/decoding/format/gelf.rs b/lib/codecs/src/decoding/format/gelf.rs index 578d01f0ffc99..66613cee9c679 100644 --- a/lib/codecs/src/decoding/format/gelf.rs +++ b/lib/codecs/src/decoding/format/gelf.rs @@ -1,5 +1,5 @@ use bytes::Bytes; -use chrono::{NaiveDateTime, Utc}; +use chrono::{DateTime, Utc}; use derivative::Derivative; use lookup::{event_path, owned_value_path}; use serde::{Deserialize, Serialize}; @@ -133,12 +133,12 @@ impl GelfDeserializer { if let Some(timestamp_key) = log_schema().timestamp_key_target_path() { if let Some(timestamp) = parsed.timestamp { - let naive = NaiveDateTime::from_timestamp_opt( + let dt = DateTime::from_timestamp( f64::trunc(timestamp) as i64, f64::fract(timestamp) as u32, ) .expect("invalid timestamp"); - log.insert(timestamp_key, naive.and_utc()); + log.insert(timestamp_key, dt); // per GELF spec- add timestamp if not provided } else { log.insert(timestamp_key, Utc::now()); @@ -239,7 +239,6 @@ impl Deserializer for GelfDeserializer { mod tests { use super::*; use bytes::Bytes; - use chrono::NaiveDateTime; use lookup::event_path; use serde_json::json; use similar_asserts::assert_eq; @@ -303,8 +302,8 @@ mod tests { ))) ); // Vector does not use the nanos - let naive = NaiveDateTime::from_timestamp_opt(1385053862, 0).expect("invalid timestamp"); - assert_eq!(log.get(TIMESTAMP), Some(&Value::Timestamp(naive.and_utc()))); + let dt = DateTime::from_timestamp(1385053862, 0).expect("invalid timestamp"); + assert_eq!(log.get(TIMESTAMP), Some(&Value::Timestamp(dt))); assert_eq!(log.get(LEVEL), Some(&Value::Integer(1))); assert_eq!( log.get(FACILITY), diff --git a/lib/codecs/src/encoding/format/protobuf.rs b/lib/codecs/src/encoding/format/protobuf.rs index 6c0fe8eae9e5e..ce2a04e684bb9 100644 --- a/lib/codecs/src/encoding/format/protobuf.rs +++ b/lib/codecs/src/encoding/format/protobuf.rs @@ -223,7 +223,7 @@ impl Encoder for ProtobufSerializer { mod tests { use super::*; use bytes::Bytes; - use chrono::{DateTime, NaiveDateTime, Utc}; + use chrono::DateTime; use ordered_float::NotNan; use prost_reflect::MapKey; use std::collections::{BTreeMap, HashMap}; @@ -381,10 +381,7 @@ mod tests { &test_message_descriptor("Timestamp"), Value::Object(BTreeMap::from([( "morning".into(), - Value::Timestamp(DateTime::from_naive_utc_and_offset( - NaiveDateTime::from_timestamp_opt(8675, 309).unwrap(), - Utc, - )), + Value::Timestamp(DateTime::from_timestamp(8675, 309).unwrap()), )])), ) .unwrap(); diff --git a/lib/vector-core/src/event/test/common.rs b/lib/vector-core/src/event/test/common.rs index d258e1fa3b7ff..431a4f3eee417 100644 --- a/lib/vector-core/src/event/test/common.rs +++ b/lib/vector-core/src/event/test/common.rs @@ -1,6 +1,6 @@ use std::{collections::BTreeSet, iter}; -use chrono::{DateTime, NaiveDateTime, Utc}; +use chrono::{DateTime, Utc}; use quickcheck::{empty_shrinker, Arbitrary, Gen}; use vrl::value::{ObjectMap, Value}; @@ -50,9 +50,7 @@ fn datetime(g: &mut Gen) -> DateTime { // are. We just sort of arbitrarily restrict things. let secs = i64::arbitrary(g) % 32_000; let nanosecs = u32::arbitrary(g) % 32_000; - NaiveDateTime::from_timestamp_opt(secs, nanosecs) - .expect("invalid timestamp") - .and_utc() + DateTime::from_timestamp(secs, nanosecs).expect("invalid timestamp") } impl Arbitrary for Event { diff --git a/src/sources/gcp_pubsub.rs b/src/sources/gcp_pubsub.rs index 01412f0afec9d..8e7bb880ef590 100644 --- a/src/sources/gcp_pubsub.rs +++ b/src/sources/gcp_pubsub.rs @@ -4,7 +4,7 @@ use std::{ time::Duration, }; -use chrono::NaiveDateTime; +use chrono::DateTime; use derivative::Derivative; use futures::{stream, stream::FuturesUnordered, FutureExt, Stream, StreamExt, TryFutureExt}; use http::uri::{InvalidUri, Scheme, Uri}; @@ -675,9 +675,7 @@ impl PubsubSource { "gcp_pubsub", &message.data, message.publish_time.map(|dt| { - NaiveDateTime::from_timestamp_opt(dt.seconds, dt.nanos as u32) - .expect("invalid timestamp") - .and_utc() + DateTime::from_timestamp(dt.seconds, dt.nanos as u32).expect("invalid timestamp") }), batch, log_namespace, @@ -1004,9 +1002,7 @@ mod integration_tests { fn now_trunc() -> DateTime { let start = Utc::now().timestamp(); // Truncate the milliseconds portion, the hard way. - NaiveDateTime::from_timestamp_opt(start, 0) - .expect("invalid timestamp") - .and_utc() + DateTime::from_timestamp(start, 0).expect("invalid timestamp") } struct Tester { From 1fb257efb4fc9e9fc28304281292b5fab5601614 Mon Sep 17 00:00:00 2001 From: neuronull Date: Tue, 2 Apr 2024 07:45:46 -0600 Subject: [PATCH 0224/1491] fix(datadog_logs sink): reconstruct `ddtags` if not already in format expected by DD logs intake (#20198) * chore(datadog_agent): align ddtags parsed output with DD logs intake * changelog * feedback- changelog * component docs * tags reconstruct in dd logs * upgrade guide note * spell check * upgrade/changelog notes about dd logs * rm extraneous upgrade guide * feedback bruce * changelog * Revert "component docs" This reverts commit 2096c73dbf8c8b96e7a63ec33c1dd18424102b2d. * Revert "feedback- changelog" This reverts commit dda95120231ca1aab4df6c3e44c19a92b2ebdd4a. * Revert "changelog" This reverts commit a2789818f8d068b16879beb4ffe64844650ee10a. * Revert "chore(datadog_agent): align ddtags parsed output with DD logs intake" This reverts commit 4c657c60d163e186f6340d57c2bb3170140bdbdf. * Revert "upgrade guide note" This reverts commit 10cbbf9f3e7fbecca3b8ac5cd0977d27c930292e. * use correct attribute name * one more ddtag rename * feedback pavlos and const usage * fix agent unit tests * feedback bruce- constify ddtags * better path compare * feedback bruce- itertools ftw * Compare the owned path to the field name without creating an `OwnedTargetPath` * Revert "fix agent unit tests" This reverts commit f2d21608a4c2832c4bdc597400c6402f22eb3723. * restore dd agent --------- Co-authored-by: Bruce Guenter --- changelog.d/dd_logs_reconstruct_ddtags.fix.md | 1 + lib/vector-core/src/schema/meaning.rs | 4 + src/common/datadog.rs | 2 + src/sinks/datadog/logs/sink.rs | 211 +++++++++++++++--- src/sources/datadog_agent/logs.rs | 5 +- 5 files changed, 195 insertions(+), 28 deletions(-) create mode 100644 changelog.d/dd_logs_reconstruct_ddtags.fix.md diff --git a/changelog.d/dd_logs_reconstruct_ddtags.fix.md b/changelog.d/dd_logs_reconstruct_ddtags.fix.md new file mode 100644 index 0000000000000..32dff3c444f8e --- /dev/null +++ b/changelog.d/dd_logs_reconstruct_ddtags.fix.md @@ -0,0 +1 @@ +The `datadog_logs` sink was not re-constructing ddtags that may have been parsed upstream by the `datadog_agent` source's `parse_ddtags` setting. The sink log encoding was fixed to re-assemble the tags into a unified string that the Datadog logs intake expects. diff --git a/lib/vector-core/src/schema/meaning.rs b/lib/vector-core/src/schema/meaning.rs index ab766b0986924..ed8efe1e05a31 100644 --- a/lib/vector-core/src/schema/meaning.rs +++ b/lib/vector-core/src/schema/meaning.rs @@ -12,6 +12,10 @@ pub const TIMESTAMP: &str = "timestamp"; /// The hostname of the machine where the event was generated. pub const HOST: &str = "host"; +/// The tags of an event, generally a key-value paired list. +pub const TAGS: &str = "tags"; + pub const SOURCE: &str = "source"; pub const SEVERITY: &str = "severity"; +pub const STATUS: &str = "status"; pub const TRACE_ID: &str = "trace_id"; diff --git a/src/common/datadog.rs b/src/common/datadog.rs index 5077453861fbd..936cc87131016 100644 --- a/src/common/datadog.rs +++ b/src/common/datadog.rs @@ -9,6 +9,8 @@ use vector_lib::{event::DatadogMetricOriginMetadata, sensitive_string::Sensitive pub(crate) const DD_US_SITE: &str = "datadoghq.com"; pub(crate) const DD_EU_SITE: &str = "datadoghq.eu"; +pub(crate) const DDTAGS: &str = "ddtags"; + /// DatadogSeriesMetric #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] pub struct DatadogSeriesMetric { diff --git a/src/sinks/datadog/logs/sink.rs b/src/sinks/datadog/logs/sink.rs index 653bab3681b46..de49e5edf829a 100644 --- a/src/sinks/datadog/logs/sink.rs +++ b/src/sinks/datadog/logs/sink.rs @@ -1,15 +1,21 @@ use std::{collections::VecDeque, fmt::Debug, io, sync::Arc}; +use itertools::Itertools; use snafu::Snafu; use vector_lib::{ internal_event::{ComponentEventsDropped, UNINTENTIONAL}, lookup::event_path, + schema::meaning, }; +use vrl::path::{OwnedSegment, OwnedTargetPath, PathPrefix}; use super::{config::MAX_PAYLOAD_BYTES, service::LogApiRequest}; -use crate::sinks::{ - prelude::*, - util::{http::HttpJsonBatchSizer, Compressor}, +use crate::{ + common::datadog::DDTAGS, + sinks::{ + prelude::*, + util::{http::HttpJsonBatchSizer, Compressor}, + }, }; #[derive(Default)] struct EventPartitioner; @@ -102,6 +108,48 @@ fn normalize_event(event: &mut Event) { log.rename_key(host_path, event_path!("hostname")); } + if let Some(ddtags_path) = log.find_key_by_meaning(meaning::TAGS) { + let ddtags_path = ddtags_path.clone(); + + // first, if the value is an array we need to reconstruct it to a comma delimited string for DD logs intake. + if let Some(Value::Array(tags_arr)) = log.get(&ddtags_path) { + if !tags_arr.is_empty() { + let all_tags: String = tags_arr + .iter() + .filter_map(|tag_kv| { + tag_kv + .as_bytes() + .map(|bytes| String::from_utf8_lossy(bytes)) + }) + .join(","); + + log.insert(&ddtags_path, all_tags); + } + } + + // now, the tags attribute must be at the event root so we will move it there if + // needed and move any conflicting field if any. + if !path_is_field(&ddtags_path, DDTAGS) { + let desired_path = event_path!(DDTAGS); + + // if an existing attribute exists here already, move it so to not overwrite it. + // yes, technically the rename path could exist, but technically that could always be the case. + if log.contains(desired_path) { + let rename_attr = format!("_RESERVED_{}", DDTAGS); + let rename_path = event_path!(rename_attr.as_str()); + warn!( + message = "Semantic meaning is defined, but the event path already exists. Renaming to not overwrite.", + meaning = meaning::TAGS, + renamed = &rename_attr, + internal_log_rate_limit = true, + ); + log.rename_key(desired_path, rename_path); + } + + log.rename_key(&ddtags_path, desired_path); + } + } + if let Some(timestamp_path) = log.timestamp_path().cloned().as_ref() { if let Some(Value::Timestamp(ts)) = log.remove(timestamp_path) { log.insert( @@ -112,6 +160,16 @@ fn normalize_event(event: &mut Event) { } } +// Test if the named path consists of the single named field. This is rather a hack and should +// hypothetically be solvable in the `vrl` crate with an implementation of +// `PartialEq>`. The alternative is doing a comparison against another +// `OwnedTargetPath`, but the naïve implementation of that requires multiple allocations and copies +// just to test equality. +fn path_is_field(path: &OwnedTargetPath, field: &str) -> bool { + path.prefix == PathPrefix::Event + && matches!(&path.path.segments[..], [OwnedSegment::Field(f)] if f.as_str() == field) +} + #[derive(Debug, Snafu)] pub enum RequestBuildError { #[snafu(display("Encoded payload is greater than the max limit."))] @@ -323,15 +381,41 @@ where #[cfg(test)] mod tests { + use std::sync::Arc; + use chrono::Utc; use vector_lib::{ - config::LegacyKey, - event::{Event, LogEvent}, + config::{LegacyKey, LogNamespace}, + event::{Event, EventMetadata, LogEvent}, + schema::{meaning, Definition}, + }; + use vrl::{ + core::Value, + event_path, metadata_path, owned_value_path, + value::{kind::Collection, Kind}, }; - use vrl::{event_path, owned_value_path, path}; use super::normalize_event; + fn assert_normalized_log_has_expected_attrs(log: &LogEvent) { + assert!(log.contains(event_path!("message"))); + + assert!(log.contains(event_path!("timestamp"))); + + assert!(log + .get(event_path!("timestamp")) + .expect("should have timestamp") + .is_integer()); + + assert!(log.contains(event_path!("hostname"))); + + assert!(log.contains(event_path!("ddtags"))); + assert_eq!( + log.get(event_path!("ddtags")).expect("should have tags"), + &Value::Bytes("key1:value1,key2:value2".into()) + ); + } + #[test] fn normalize_event_doesnt_require() { let mut log = LogEvent::default(); @@ -348,33 +432,108 @@ mod tests { } #[test] - fn normalize_event_normalizes() { - let mut log = LogEvent::from("hello"); + fn normalize_event_normalizes_legacy_namespace() { + let metadata = EventMetadata::default().with_schema_definition(&Arc::new( + Definition::new_with_default_metadata( + Kind::object(Collection::empty()), + [LogNamespace::Legacy], + ) + .with_source_metadata( + "datadog_agent", + Some(LegacyKey::InsertIfEmpty(owned_value_path!("ddtags"))), + &owned_value_path!("ddtags"), + Kind::bytes(), + Some(meaning::TAGS), + ) + .with_source_metadata( + "datadog_agent", + Some(LegacyKey::InsertIfEmpty(owned_value_path!("hostname"))), + &owned_value_path!("hostname"), + Kind::bytes(), + Some(meaning::HOST), + ) + .with_source_metadata( + "datadog_agent", + Some(LegacyKey::InsertIfEmpty(owned_value_path!("timestamp"))), + &owned_value_path!("timestamp"), + Kind::timestamp(), + Some(meaning::TIMESTAMP), + ), + )); + + let mut log = LogEvent::new_with_metadata(metadata); + log.insert(event_path!("message"), "the_message"); let namespace = log.namespace(); - namespace.insert_standard_vector_source_metadata(&mut log, "this_source", Utc::now()); + namespace.insert_standard_vector_source_metadata(&mut log, "datadog_agent", Utc::now()); - let legacy_key = Some(owned_value_path!("host")); - let legacy_key = legacy_key.as_ref().map(LegacyKey::Overwrite); - namespace.insert_source_metadata( - "this_source", - &mut log, - legacy_key, - path!("host"), - "the_host", - ); + log.insert(event_path!("hostname"), "the_host"); + + let tags = vec![ + Value::Bytes("key1:value1".into()), + Value::Bytes("key2:value2".into()), + ]; + log.insert(event_path!("ddtags"), tags); + + assert!(log.namespace() == LogNamespace::Legacy); let mut event = Event::Log(log); normalize_event(&mut event); - let log = event.as_log(); + assert_normalized_log_has_expected_attrs(event.as_log()); + } - assert!(log.contains(event_path!("message"))); - assert!(log.contains(event_path!("timestamp"))); - assert!(log - .get_timestamp() - .expect("should have timestamp") - .is_integer()); - assert!(log.contains(event_path!("hostname"))); + #[test] + fn normalize_event_normalizes_vector_namespace() { + let metadata = EventMetadata::default().with_schema_definition(&Arc::new( + Definition::new_with_default_metadata(Kind::bytes(), [LogNamespace::Vector]) + .with_source_metadata( + "datadog_agent", + Some(LegacyKey::InsertIfEmpty(owned_value_path!("ddtags"))), + &owned_value_path!("ddtags"), + Kind::bytes(), + Some(meaning::TAGS), + ) + .with_source_metadata( + "datadog_agent", + Some(LegacyKey::InsertIfEmpty(owned_value_path!("hostname"))), + &owned_value_path!("hostname"), + Kind::bytes(), + Some(meaning::HOST), + ) + .with_source_metadata( + "datadog_agent", + Some(LegacyKey::InsertIfEmpty(owned_value_path!("timestamp"))), + &owned_value_path!("timestamp"), + Kind::timestamp(), + Some(meaning::TIMESTAMP), + ), + )); + + let mut log = LogEvent::new_with_metadata(metadata); + log.insert(event_path!("message"), "the_message"); + + // insert an arbitrary metadata field such that the log becomes Vector namespaced + log.insert(metadata_path!("vector", "foo"), "bar"); + + let namespace = log.namespace(); + namespace.insert_standard_vector_source_metadata(&mut log, "datadog_agent", Utc::now()); + + let tags = vec![ + Value::Bytes("key1:value1".into()), + Value::Bytes("key2:value2".into()), + ]; + log.insert(metadata_path!("datadog_agent", "ddtags"), tags); + + log.insert(metadata_path!("datadog_agent", "hostname"), "the_host"); + + log.insert(metadata_path!("datadog_agent", "timestamp"), Utc::now()); + + assert!(log.namespace() == LogNamespace::Vector); + + let mut event = Event::Log(log); + normalize_event(&mut event); + + assert_normalized_log_has_expected_attrs(event.as_log()); } } diff --git a/src/sources/datadog_agent/logs.rs b/src/sources/datadog_agent/logs.rs index 398febe5a07b3..50ee4aebb2d14 100644 --- a/src/sources/datadog_agent/logs.rs +++ b/src/sources/datadog_agent/logs.rs @@ -12,6 +12,7 @@ use vector_lib::{config::LegacyKey, EstimatedJsonEncodedSizeOf}; use vrl::core::Value; use warp::{filters::BoxedFilter, path as warp_path, path::FullPath, reply::Response, Filter}; +use crate::common::datadog::DDTAGS; use crate::{ event::Event, internal_events::DatadogAgentJsonParseError, @@ -160,8 +161,8 @@ pub(crate) fn decode_log_body( namespace.insert_source_metadata( source_name, log, - Some(LegacyKey::InsertIfEmpty(path!("ddtags"))), - path!("ddtags"), + Some(LegacyKey::InsertIfEmpty(path!(DDTAGS))), + path!(DDTAGS), ddtags, ); From 562f7b780ad339f933d49ee119b2646467a401f5 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 06:46:05 -0700 Subject: [PATCH 0225/1491] chore(deps): Bump async-compression from 0.4.6 to 0.4.7 (#20221) Bumps [async-compression](https://github.com/Nullus157/async-compression) from 0.4.6 to 0.4.7. - [Release notes](https://github.com/Nullus157/async-compression/releases) - [Changelog](https://github.com/Nullus157/async-compression/blob/main/CHANGELOG.md) - [Commits](https://github.com/Nullus157/async-compression/compare/async-compression-v0.4.6...async-compression-v0.4.7) --- updated-dependencies: - dependency-name: async-compression dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> --- Cargo.lock | 4 ++-- Cargo.toml | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0f2ff8f17e727..0fedd88643a31 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -359,9 +359,9 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.6" +version = "0.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a116f46a969224200a0a97f29cfd4c50e7534e4b4826bd23ea2c3c533039c82c" +checksum = "86a9249d1447a85f95810c620abea82e001fe58a31713fcce614caf52499f905" dependencies = [ "flate2", "futures-core", diff --git a/Cargo.toml b/Cargo.toml index b494014b5494a..40884b19ae207 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -254,7 +254,7 @@ greptimedb-client = { git = "https://github.com/GreptimeTeam/greptimedb-ingester # External libs arc-swap = { version = "1.7", default-features = false, optional = true } -async-compression = { version = "0.4.6", default-features = false, features = ["tokio", "gzip", "zstd"], optional = true } +async-compression = { version = "0.4.7", default-features = false, features = ["tokio", "gzip", "zstd"], optional = true } apache-avro = { version = "0.16.0", default-features = false, optional = true } axum = { version = "0.6.20", default-features = false } base64 = { version = "0.22.0", default-features = false, optional = true } From 667331435254c84ebe8a3d2b5173cebfcbcc7507 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 2 Apr 2024 08:51:25 -0700 Subject: [PATCH 0226/1491] chore(deps): Bump hostname from 0.3.1 to 0.4.0 (#20222) * chore(deps): Bump hostname from 0.3.1 to 0.4.0 Bumps [hostname](https://github.com/svartalf/hostname) from 0.3.1 to 0.4.0. - [Release notes](https://github.com/svartalf/hostname/releases) - [Changelog](https://github.com/svartalf/hostname/blob/master/CHANGELOG.md) - [Commits](https://github.com/svartalf/hostname/compare/v0.3.1...v0.4.0) --- updated-dependencies: - dependency-name: hostname dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] * regenerate licenses Signed-off-by: Jesse Szwedko --------- Signed-off-by: dependabot[bot] Signed-off-by: Jesse Szwedko Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Co-authored-by: Jesse Szwedko --- Cargo.lock | 40 +++++++++++++++++++++++++++++++++++----- Cargo.toml | 2 +- LICENSE-3rdparty.csv | 11 +---------- 3 files changed, 37 insertions(+), 16 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 0fedd88643a31..559465dea3d8e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -4089,6 +4089,17 @@ dependencies = [ "winapi", ] +[[package]] +name = "hostname" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f9c7c7c8ac16c798734b8a24560c1362120597c40d5e1459f09498f8f6c8f2ba" +dependencies = [ + "cfg-if", + "libc", + "windows", +] + [[package]] name = "http" version = "0.2.9" @@ -4399,7 +4410,7 @@ dependencies = [ "iana-time-zone-haiku", "js-sys", "wasm-bindgen", - "windows-core", + "windows-core 0.51.1", ] [[package]] @@ -7556,7 +7567,7 @@ version = "0.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "52e44394d2086d010551b14b53b1f24e31647570cd1deb0379e2c21b329aba00" dependencies = [ - "hostname", + "hostname 0.3.1", "quick-error", ] @@ -8867,7 +8878,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7434e95bcccce1215d30f4bf84fe8c00e8de1b9be4fb736d747ca53d36e7f96f" dependencies = [ "error-chain", - "hostname", + "hostname 0.3.1", "libc", "log", "time", @@ -10090,7 +10101,7 @@ dependencies = [ "heim", "hex", "hickory-proto", - "hostname", + "hostname 0.4.0", "http 0.2.9", "http-body 0.4.5", "http-serde", @@ -10588,7 +10599,7 @@ dependencies = [ "grok", "hex", "hmac", - "hostname", + "hostname 0.3.1", "iana-time-zone", "idna 0.5.0", "indexmap 2.2.6", @@ -10941,6 +10952,16 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e48a53791691ab099e5e2ad123536d0fff50652600abaf43bbf952894110d0be" +dependencies = [ + "windows-core 0.52.0", + "windows-targets 0.52.0", +] + [[package]] name = "windows-core" version = "0.51.1" @@ -10950,6 +10971,15 @@ dependencies = [ "windows-targets 0.48.5", ] +[[package]] +name = "windows-core" +version = "0.52.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "33ab640c8d7e35bf8ba19b884ba838ceb4fba93a4e8c65a9059d08afcfc683d9" +dependencies = [ + "windows-targets 0.52.0", +] + [[package]] name = "windows-service" version = "0.6.0" diff --git a/Cargo.toml b/Cargo.toml index 40884b19ae207..69bddf4814881 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -282,7 +282,7 @@ h2 = { version = "0.4.3", default-features = false, optional = true } hash_hasher = { version = "2.0.0", default-features = false } hashbrown = { version = "0.14.3", default-features = false, optional = true, features = ["ahash"] } headers = { version = "0.3.9", default-features = false } -hostname = { version = "0.3.1", default-features = false } +hostname = { version = "0.4.0", default-features = false } http = { version = "0.2.9", default-features = false } http-serde = "1.1.3" http-body = { version = "0.4.5", default-features = false } diff --git a/LICENSE-3rdparty.csv b/LICENSE-3rdparty.csv index 3387e033dc954..ea34d1ecd2dcc 100644 --- a/LICENSE-3rdparty.csv +++ b/LICENSE-3rdparty.csv @@ -650,17 +650,8 @@ widestring,https://github.com/starkat99/widestring-rs,MIT OR Apache-2.0,Kathryn widestring,https://github.com/starkat99/widestring-rs,MIT OR Apache-2.0,The widestring Authors winapi,https://github.com/retep998/winapi-rs,MIT OR Apache-2.0,Peter Atashian winapi-util,https://github.com/BurntSushi/winapi-util,Unlicense OR MIT,Andrew Gallant -windows-core,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft +windows,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft windows-service,https://github.com/mullvad/windows-service-rs,MIT OR Apache-2.0,Mullvad VPN -windows-sys,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft -windows-targets,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft -windows_aarch64_gnullvm,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft -windows_aarch64_msvc,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft -windows_i686_gnu,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft -windows_i686_msvc,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft -windows_x86_64_gnu,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft -windows_x86_64_gnullvm,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft -windows_x86_64_msvc,https://github.com/microsoft/windows-rs,MIT OR Apache-2.0,Microsoft winnow,https://github.com/winnow-rs/winnow,MIT,The winnow Authors winreg,https://github.com/gentoo90/winreg-rs,MIT,Igor Shaula woothee,https://github.com/woothee/woothee-rust,Apache-2.0,hhatto From 38f4868a9e35dade00098ff71bf5c3c294c335d0 Mon Sep 17 00:00:00 2001 From: neuronull Date: Tue, 2 Apr 2024 16:12:28 -0600 Subject: [PATCH 0227/1491] chore(datadog_logs sink): properly encode all semantically defined DD reserved attributes (#20226) * chore(datadog_logs sink): properly encode all semantically defined DD reserved attributes * fix comment * feedback and caught one other thing * fix bug * fix unit test * grr --- lib/vector-core/src/schema/meaning.rs | 1 - scripts/integration/datadog-logs/test.yaml | 2 +- src/common/datadog.rs | 16 +- src/sinks/datadog/logs/sink.rs | 177 ++++++++++++++------- src/sources/datadog_agent/mod.rs | 16 +- 5 files changed, 144 insertions(+), 68 deletions(-) diff --git a/lib/vector-core/src/schema/meaning.rs b/lib/vector-core/src/schema/meaning.rs index ed8efe1e05a31..450cfc7c9442b 100644 --- a/lib/vector-core/src/schema/meaning.rs +++ b/lib/vector-core/src/schema/meaning.rs @@ -17,5 +17,4 @@ pub const TAGS: &str = "tags"; pub const SOURCE: &str = "source"; pub const SEVERITY: &str = "severity"; -pub const STATUS: &str = "status"; pub const TRACE_ID: &str = "trace_id"; diff --git a/scripts/integration/datadog-logs/test.yaml b/scripts/integration/datadog-logs/test.yaml index 30a99f8a87ae7..7937db5d87158 100644 --- a/scripts/integration/datadog-logs/test.yaml +++ b/scripts/integration/datadog-logs/test.yaml @@ -1,7 +1,7 @@ features: - datadog-logs-integration-tests -test_filter: '::datadog::logs::' +test_filter: '::datadog::logs::integration_tests::' runner: env: diff --git a/src/common/datadog.rs b/src/common/datadog.rs index 936cc87131016..698c51cc38e7f 100644 --- a/src/common/datadog.rs +++ b/src/common/datadog.rs @@ -4,13 +4,27 @@ #![allow(dead_code)] #![allow(unreachable_pub)] use serde::{Deserialize, Serialize}; -use vector_lib::{event::DatadogMetricOriginMetadata, sensitive_string::SensitiveString}; +use vector_lib::{ + event::DatadogMetricOriginMetadata, schema::meaning, sensitive_string::SensitiveString, +}; pub(crate) const DD_US_SITE: &str = "datadoghq.com"; pub(crate) const DD_EU_SITE: &str = "datadoghq.eu"; pub(crate) const DDTAGS: &str = "ddtags"; +/// Mapping of the semantic meaning of well known Datadog reserved attributes +/// to the field name that Datadog intake expects. +// https://docs.datadoghq.com/logs/log_configuration/attributes_naming_convention/?s=severity#reserved-attributes +pub(crate) const DD_RESERVED_SEMANTIC_ATTRS: [(&str, &str); 6] = [ + (meaning::SEVERITY, "status"), // status is intentionally semantically defined as severity + (meaning::TIMESTAMP, "timestamp"), + (meaning::HOST, "hostname"), + (meaning::SERVICE, "service"), + (meaning::SOURCE, "ddsource"), + (meaning::TAGS, DDTAGS), +]; + /// DatadogSeriesMetric #[derive(Debug, Clone, PartialEq, Deserialize, Serialize)] pub struct DatadogSeriesMetric { diff --git a/src/sinks/datadog/logs/sink.rs b/src/sinks/datadog/logs/sink.rs index de49e5edf829a..56e40811d18b9 100644 --- a/src/sinks/datadog/logs/sink.rs +++ b/src/sinks/datadog/logs/sink.rs @@ -5,13 +5,12 @@ use snafu::Snafu; use vector_lib::{ internal_event::{ComponentEventsDropped, UNINTENTIONAL}, lookup::event_path, - schema::meaning, }; use vrl::path::{OwnedSegment, OwnedTargetPath, PathPrefix}; use super::{config::MAX_PAYLOAD_BYTES, service::LogApiRequest}; use crate::{ - common::datadog::DDTAGS, + common::datadog::{DDTAGS, DD_RESERVED_SEMANTIC_ATTRS}, sinks::{ prelude::*, util::{http::HttpJsonBatchSizer, Compressor}, @@ -100,63 +99,72 @@ pub struct LogSink { fn normalize_event(event: &mut Event) { let log = event.as_mut_log(); - if let Some(message_path) = log.message_path().cloned().as_ref() { - log.rename_key(message_path, event_path!("message")); + // Upstream Sources may have semantically defined Datadog reserved attributes outside of their + // expected location by DD logs intake (root of the event). Move them if needed. + for (meaning, expected_field_name) in DD_RESERVED_SEMANTIC_ATTRS { + // check if there is a semantic meaning for the reserved attribute + if let Some(current_path) = log.find_key_by_meaning(meaning).cloned() { + // move it to the desired location + position_reserved_attr_event_root(log, ¤t_path, expected_field_name, meaning); + } } - if let Some(host_path) = log.host_path().cloned().as_ref() { - log.rename_key(host_path, event_path!("hostname")); - } + // if the tags value is an array we need to reconstruct it to a comma delimited string for DD logs intake. + // NOTE: we don't access by semantic meaning here because in the prior step + // we ensured reserved attributes are in expected locations. + let ddtags_path = event_path!(DDTAGS); + if let Some(Value::Array(tags_arr)) = log.get(ddtags_path) { + if !tags_arr.is_empty() { + let all_tags: String = tags_arr + .iter() + .filter_map(|tag_kv| { + tag_kv + .as_bytes() + .map(|bytes| String::from_utf8_lossy(bytes)) + }) + .join(","); - if let Some(ddtags_path) = log.find_key_by_meaning(meaning::TAGS) { - let ddtags_path = ddtags_path.clone(); - - // first, if the value is an array we need to reconstruct it to a comma delimited string for DD logs intake. - if let Some(Value::Array(tags_arr)) = log.get(&ddtags_path) { - if !tags_arr.is_empty() { - let all_tags: String = tags_arr - .iter() - .filter_map(|tag_kv| { - tag_kv - .as_bytes() - .map(|bytes| String::from_utf8_lossy(bytes)) - }) - .join(","); - - log.insert(&ddtags_path, all_tags); - } + log.insert(ddtags_path, all_tags); } + } - // now, the tags attribute must be at the event root so we will move it there if - // needed and move any conflicting field if any. - if !path_is_field(&ddtags_path, DDTAGS) { - let desired_path = event_path!(DDTAGS); - - // if an existing attribute exists here already, move it so to not overwrite it. - // yes, technically the rename path could exist, but technically that could always be the case. - if log.contains(desired_path) { - let rename_attr = format!("_RESERVED_{}", DDTAGS); - let rename_path = event_path!(rename_attr.as_str()); - warn!( - message = "Semantic meaning is defined, but the event path already exists. Renaming to not overwrite.", - meaning = meaning::TAGS, - renamed = &rename_attr, - internal_log_rate_limit = true, - ); - log.rename_key(desired_path, rename_path); - } - - log.rename_key(&ddtags_path, desired_path); - } + // ensure the timestamp is in expected format + // NOTE: we don't access by semantic meaning here because in the prior step + // we ensured reserved attributes are in expected locations. + let ts_path = event_path!("timestamp"); + if let Some(Value::Timestamp(ts)) = log.remove(ts_path) { + log.insert(ts_path, Value::Integer(ts.timestamp_millis())); } +} - if let Some(timestamp_path) = log.timestamp_path().cloned().as_ref() { - if let Some(Value::Timestamp(ts)) = log.remove(timestamp_path) { - log.insert( - event_path!("timestamp"), - Value::Integer(ts.timestamp_millis()), +// If an expected reserved attribute is not located in the event root, rename it and handle +// any potential conflicts by preserving the conflicting one with a _RESERVED_ prefix. +fn position_reserved_attr_event_root( + log: &mut LogEvent, + current_path: &OwnedTargetPath, + expected_field_name: &str, + meaning: &str, +) { + // the path that DD archives expects this reserved attribute to be in. + let desired_path = event_path!(expected_field_name); + + // if not already be at the expected location + if !path_is_field(current_path, expected_field_name) { + // if an existing attribute exists here already, move it so to not overwrite it. + // yes, technically the rename path could exist, but technically that could always be the case. + if log.contains(desired_path) { + let rename_attr = format!("_RESERVED_{}", meaning); + let rename_path = event_path!(rename_attr.as_str()); + warn!( + message = "Semantic meaning is defined, but the event path already exists. Renaming to not overwrite.", + meaning = meaning, + renamed = &rename_attr, + internal_log_rate_limit = true, ); + log.rename_key(desired_path, rename_path); } + + log.rename_key(current_path, desired_path); } } @@ -398,18 +406,22 @@ mod tests { use super::normalize_event; fn assert_normalized_log_has_expected_attrs(log: &LogEvent) { - assert!(log.contains(event_path!("message"))); - - assert!(log.contains(event_path!("timestamp"))); - assert!(log .get(event_path!("timestamp")) .expect("should have timestamp") .is_integer()); - assert!(log.contains(event_path!("hostname"))); + for attr in [ + "message", + "timestamp", + "hostname", + "ddtags", + "service", + "status", + ] { + assert!(log.contains(event_path!(attr)), "missing {}", attr); + } - assert!(log.contains(event_path!("ddtags"))); assert_eq!( log.get(event_path!("ddtags")).expect("should have tags"), &Value::Bytes("key1:value1,key2:value2".into()) @@ -458,6 +470,27 @@ mod tests { &owned_value_path!("timestamp"), Kind::timestamp(), Some(meaning::TIMESTAMP), + ) + .with_source_metadata( + "datadog_agent", + Some(LegacyKey::InsertIfEmpty(owned_value_path!("severity"))), + &owned_value_path!("severity"), + Kind::bytes(), + Some(meaning::SEVERITY), + ) + .with_source_metadata( + "datadog_agent", + Some(LegacyKey::InsertIfEmpty(owned_value_path!("service"))), + &owned_value_path!("service"), + Kind::bytes(), + Some(meaning::SERVICE), + ) + .with_source_metadata( + "datadog_agent", + Some(LegacyKey::InsertIfEmpty(owned_value_path!("source"))), + &owned_value_path!("source"), + Kind::bytes(), + Some(meaning::SOURCE), ), )); @@ -467,13 +500,16 @@ mod tests { namespace.insert_standard_vector_source_metadata(&mut log, "datadog_agent", Utc::now()); - log.insert(event_path!("hostname"), "the_host"); - let tags = vec![ Value::Bytes("key1:value1".into()), Value::Bytes("key2:value2".into()), ]; + log.insert(event_path!("ddtags"), tags); + log.insert(event_path!("hostname"), "the_host"); + log.insert(event_path!("service"), "the_service"); + log.insert(event_path!("source"), "the_source"); + log.insert(event_path!("severity"), "the_severity"); assert!(log.namespace() == LogNamespace::Legacy); @@ -507,6 +543,27 @@ mod tests { &owned_value_path!("timestamp"), Kind::timestamp(), Some(meaning::TIMESTAMP), + ) + .with_source_metadata( + "datadog_agent", + Some(LegacyKey::InsertIfEmpty(owned_value_path!("severity"))), + &owned_value_path!("severity"), + Kind::bytes(), + Some(meaning::SEVERITY), + ) + .with_source_metadata( + "datadog_agent", + Some(LegacyKey::InsertIfEmpty(owned_value_path!("service"))), + &owned_value_path!("service"), + Kind::bytes(), + Some(meaning::SERVICE), + ) + .with_source_metadata( + "datadog_agent", + Some(LegacyKey::InsertIfEmpty(owned_value_path!("source"))), + &owned_value_path!("source"), + Kind::bytes(), + Some(meaning::SOURCE), ), )); @@ -526,8 +583,10 @@ mod tests { log.insert(metadata_path!("datadog_agent", "ddtags"), tags); log.insert(metadata_path!("datadog_agent", "hostname"), "the_host"); - log.insert(metadata_path!("datadog_agent", "timestamp"), Utc::now()); + log.insert(metadata_path!("datadog_agent", "service"), "the_service"); + log.insert(metadata_path!("datadog_agent", "source"), "the_source"); + log.insert(metadata_path!("datadog_agent", "severity"), "the_severity"); assert!(log.namespace() == LogNamespace::Vector); diff --git a/src/sources/datadog_agent/mod.rs b/src/sources/datadog_agent/mod.rs index ef406630be6c7..a955c9376e4de 100644 --- a/src/sources/datadog_agent/mod.rs +++ b/src/sources/datadog_agent/mod.rs @@ -40,6 +40,7 @@ use vector_lib::configurable::configurable_component; use vector_lib::event::{BatchNotifier, BatchStatus}; use vector_lib::internal_event::{EventsReceived, Registered}; use vector_lib::lookup::owned_value_path; +use vector_lib::schema::meaning; use vector_lib::tls::MaybeTlsIncomingStream; use vrl::path::OwnedTargetPath; use vrl::value::kind::Collection; @@ -238,40 +239,43 @@ impl SourceConfig for DatadogAgentConfig { let definition = self .decoding .schema_definition(global_log_namespace.merge(self.log_namespace)) + // NOTE: "status" is intentionally semantically mapped to "severity", + // since that is what DD designates as the semantic meaning of status + // https://docs.datadoghq.com/logs/log_configuration/attributes_naming_convention/?s=severity#reserved-attributes .with_source_metadata( Self::NAME, Some(LegacyKey::InsertIfEmpty(owned_value_path!("status"))), &owned_value_path!("status"), Kind::bytes(), - Some("severity"), + Some(meaning::SEVERITY), ) .with_source_metadata( Self::NAME, Some(LegacyKey::InsertIfEmpty(owned_value_path!("timestamp"))), &owned_value_path!("timestamp"), Kind::timestamp(), - Some("timestamp"), + Some(meaning::TIMESTAMP), ) .with_source_metadata( Self::NAME, Some(LegacyKey::InsertIfEmpty(owned_value_path!("hostname"))), &owned_value_path!("hostname"), Kind::bytes(), - Some("host"), + Some(meaning::HOST), ) .with_source_metadata( Self::NAME, Some(LegacyKey::InsertIfEmpty(owned_value_path!("service"))), &owned_value_path!("service"), Kind::bytes(), - Some("service"), + Some(meaning::SERVICE), ) .with_source_metadata( Self::NAME, Some(LegacyKey::InsertIfEmpty(owned_value_path!("ddsource"))), &owned_value_path!("ddsource"), Kind::bytes(), - Some("source"), + Some(meaning::SOURCE), ) .with_source_metadata( Self::NAME, @@ -282,7 +286,7 @@ impl SourceConfig for DatadogAgentConfig { } else { Kind::bytes() }, - Some("tags"), + Some(meaning::TAGS), ) .with_standard_vector_source_metadata(); From cb8f3def4fb63f9d72582701d5e96cfb2f63eff9 Mon Sep 17 00:00:00 2001 From: Matt Searle Date: Wed, 3 Apr 2024 11:32:29 +0100 Subject: [PATCH 0228/1491] feat(#19183): add namespace input to chronicle sink (#19398) * feat(#19183): add namespace input to chronicle sink * docs(#19183): add changelog reference for the enhancement * style(#19183): fix linting error in the changelog * docs(#19183): fix docs check error * Update src/sinks/gcp/chronicle_unstructured.rs Co-authored-by: Stephen Wakely * feature(#19183): make chronicle namespace optional * fix linting errors --------- Co-authored-by: Stephen Wakely --- ...chronicle_namespace_support.enhancement.md | 3 ++ src/sinks/gcp/chronicle_unstructured.rs | 28 ++++++++++++++++--- .../sinks/base/gcp_chronicle_unstructured.cue | 5 ++++ 3 files changed, 32 insertions(+), 4 deletions(-) create mode 100644 changelog.d/19183_chronicle_namespace_support.enhancement.md diff --git a/changelog.d/19183_chronicle_namespace_support.enhancement.md b/changelog.d/19183_chronicle_namespace_support.enhancement.md new file mode 100644 index 0000000000000..813f5adb9ad72 --- /dev/null +++ b/changelog.d/19183_chronicle_namespace_support.enhancement.md @@ -0,0 +1,3 @@ +Google Chronicle Unstructured Log sink now supports adding a namespace to the log events for indexing within Chronicle. + +authors: ChocPanda diff --git a/src/sinks/gcp/chronicle_unstructured.rs b/src/sinks/gcp/chronicle_unstructured.rs index 53003b886032c..f0e35e0150573 100644 --- a/src/sinks/gcp/chronicle_unstructured.rs +++ b/src/sinks/gcp/chronicle_unstructured.rs @@ -7,6 +7,7 @@ use goauth::scopes::Scope; use http::{header::HeaderValue, Request, StatusCode, Uri}; use hyper::Body; use indoc::indoc; +use serde::Serialize; use serde_json::json; use snafu::Snafu; use std::io; @@ -127,6 +128,10 @@ pub struct ChronicleUnstructuredConfig { #[configurable(metadata(docs::examples = "c8c65bfa-5f2c-42d4-9189-64bb7b939f2c"))] pub customer_id: String, + /// User-configured environment namespace to identify the data domain the logs originated from. + #[configurable(metadata(docs::examples = "production"))] + pub namespace: Option, + #[serde(flatten)] pub auth: GcpAuthConfig, @@ -167,6 +172,7 @@ impl GenerateConfig for ChronicleUnstructuredConfig { toml::from_str(indoc! {r#" credentials_path = "/path/to/credentials.json" customer_id = "customer_id" + namespace = "namespace" log_type = "log_type" encoding.codec = "text" "#}) @@ -300,9 +306,19 @@ impl MetaDescriptive for ChronicleRequest { } } +#[derive(Clone, Debug, Serialize)] +struct ChronicleRequestBody { + customer_id: String, + #[serde(skip_serializing_if = "Option::is_none")] + namespace: Option, + log_type: String, + entries: Vec, +} + #[derive(Clone, Debug)] struct ChronicleEncoder { customer_id: String, + namespace: Option, encoder: codecs::Encoder<()>, transformer: codecs::Transformer, } @@ -347,10 +363,11 @@ impl Encoder<(String, Vec)> for ChronicleEncoder { }) .collect::>(); - let json = json!({ - "customer_id": self.customer_id, - "log_type": partition_key, - "entries": events, + let json = json!(ChronicleRequestBody { + customer_id: self.customer_id.clone(), + namespace: self.namespace.clone(), + log_type: partition_key, + entries: events, }); let size = as_tracked_write::<_, _, io::Error>(writer, &json, |writer, json| { @@ -434,6 +451,7 @@ impl ChronicleRequestBuilder { let encoder = crate::codecs::Encoder::<()>::new(serializer); let encoder = ChronicleEncoder { customer_id: config.customer_id.clone(), + namespace: config.namespace.clone(), encoder, transformer, }; @@ -535,6 +553,7 @@ mod integration_tests { indoc! { r#" endpoint = "{}" customer_id = "customer id" + namespace = "namespace" credentials_path = "{}" log_type = "{}" encoding.codec = "text" @@ -622,6 +641,7 @@ mod integration_tests { #[derive(Clone, Debug, Deserialize, Serialize)] pub struct Log { customer_id: String, + namespace: String, log_type: String, log_text: String, ts_rfc3339: String, diff --git a/website/cue/reference/components/sinks/base/gcp_chronicle_unstructured.cue b/website/cue/reference/components/sinks/base/gcp_chronicle_unstructured.cue index 3bddfbd844955..bedd487decbaa 100644 --- a/website/cue/reference/components/sinks/base/gcp_chronicle_unstructured.cue +++ b/website/cue/reference/components/sinks/base/gcp_chronicle_unstructured.cue @@ -362,6 +362,11 @@ base: components: sinks: gcp_chronicle_unstructured: configuration: { syntax: "template" } } + namespace: { + description: "User-configured environment namespace to identify the data domain the logs originated from." + required: false + type: string: examples: ["production"] + } region: { description: "The GCP region to use." required: false From f1fdfd0a3c00227544d250654ee567925c0a0a79 Mon Sep 17 00:00:00 2001 From: slamp Date: Wed, 3 Apr 2024 21:24:01 +0200 Subject: [PATCH 0229/1491] docs: Correct docker.md so the command can be executed (#20227) Update docker.md so the command can be executed It was missing --name for the docker run command --- .../en/docs/setup/installation/platforms/docker.md | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/website/content/en/docs/setup/installation/platforms/docker.md b/website/content/en/docs/setup/installation/platforms/docker.md index 050b703c27a09..b2669173f5148 100644 --- a/website/content/en/docs/setup/installation/platforms/docker.md +++ b/website/content/en/docs/setup/installation/platforms/docker.md @@ -63,6 +63,7 @@ docker run \ -d \ -v $PWD/vector.yaml:/etc/vector/vector.yaml:ro \ -p 8686:8686 \ + --name vector timberio/vector:{{< version >}}-debian ``` @@ -71,19 +72,19 @@ Make sure to substitute out `debian` if you're using a different distribution. ### Stop ```shell -docker stop timberio/vector +docker stop vector ``` ### Reload ```shell -docker kill --signal=HUP timberio/vector +docker kill --signal=HUP vector ``` ### Restart ```shell -docker restart -f $(docker ps -aqf "name=vector") +docker restart $(docker ps -aqf "name=vector") ``` ### Observe @@ -97,13 +98,13 @@ docker logs -f $(docker ps -aqf "name=vector") To access metrics from your Vector image: ```shell -vector top +docker exec -ti $(docker ps -aqf "name=vector") vector top ``` ### Uninstall ```shell -docker rm timberio/vector +docker rm vector ``` [docker]: https://docker.com From 1eb18e26c61e4eeb0df425c1987be60aa40b4501 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Fl=C3=A1vio=20Cruz?= Date: Thu, 4 Apr 2024 10:58:25 -0400 Subject: [PATCH 0230/1491] Use new VRL protobuf library to remove duplication in the protobuf codec (#20074) * Try to use the new VRL protobuf library * Run fmt/clippy * Some more changes * Fix tests * Add changelog entry * Address comment --- .../20074_protobuf_decoder.breaking.md | 1 + lib/codecs/src/common/mod.rs | 1 - lib/codecs/src/common/protobuf.rs | 36 -- lib/codecs/src/decoding/format/protobuf.rs | 116 +---- lib/codecs/src/encoding/format/protobuf.rs | 404 +----------------- .../tests/data/protobuf/protos/test.desc | Bin 1258 -> 0 bytes .../tests/data/protobuf/protos/test.proto | 61 --- 7 files changed, 26 insertions(+), 593 deletions(-) create mode 100644 changelog.d/20074_protobuf_decoder.breaking.md delete mode 100644 lib/codecs/src/common/protobuf.rs delete mode 100644 lib/codecs/tests/data/protobuf/protos/test.desc delete mode 100644 lib/codecs/tests/data/protobuf/protos/test.proto diff --git a/changelog.d/20074_protobuf_decoder.breaking.md b/changelog.d/20074_protobuf_decoder.breaking.md new file mode 100644 index 0000000000000..22e812c3fd7b4 --- /dev/null +++ b/changelog.d/20074_protobuf_decoder.breaking.md @@ -0,0 +1 @@ +The `protobuf` decoder will no longer set fields on the decoded event that are not set in the incoming byte stream. Previously it would set the default value for the field even if it wasn't in the event. This change ensures that the encoder will return the exact same bytes for the same given event. diff --git a/lib/codecs/src/common/mod.rs b/lib/codecs/src/common/mod.rs index 64493b9fe0d1b..e2d14804f7d3b 100644 --- a/lib/codecs/src/common/mod.rs +++ b/lib/codecs/src/common/mod.rs @@ -1,4 +1,3 @@ //! A collection of common utility features used by both encoding and decoding logic. pub mod length_delimited; -pub mod protobuf; diff --git a/lib/codecs/src/common/protobuf.rs b/lib/codecs/src/common/protobuf.rs deleted file mode 100644 index 5b557deb3e0a5..0000000000000 --- a/lib/codecs/src/common/protobuf.rs +++ /dev/null @@ -1,36 +0,0 @@ -use prost_reflect::{DescriptorPool, MessageDescriptor}; -use std::path::Path; - -/// Load a `MessageDescriptor` from a specific message type from the given descriptor set file. -/// -/// The path should point to the output of `protoc -o ...` -pub fn get_message_descriptor( - descriptor_set_path: &Path, - message_type: &str, -) -> vector_common::Result { - let b = std::fs::read(descriptor_set_path).map_err(|e| { - format!("Failed to open protobuf desc file '{descriptor_set_path:?}': {e}",) - })?; - let pool = DescriptorPool::decode(b.as_slice()).map_err(|e| { - format!("Failed to parse protobuf desc file '{descriptor_set_path:?}': {e}") - })?; - pool.get_message_by_name(message_type).ok_or_else(|| { - format!("The message type '{message_type}' could not be found in '{descriptor_set_path:?}'") - .into() - }) -} - -#[cfg(test)] -mod tests { - use super::*; - use std::path::PathBuf; - - #[test] - fn test_get_message_descriptor() { - let path = PathBuf::from(std::env::var_os("CARGO_MANIFEST_DIR").unwrap()) - .join("tests/data/protobuf/protos/test.desc"); - let message_descriptor = get_message_descriptor(&path, "test.Integers").unwrap(); - assert_eq!("Integers", message_descriptor.name()); - assert_eq!(4, message_descriptor.fields().count()); - } -} diff --git a/lib/codecs/src/decoding/format/protobuf.rs b/lib/codecs/src/decoding/format/protobuf.rs index 4f83432ebe7f4..42f8665891dab 100644 --- a/lib/codecs/src/decoding/format/protobuf.rs +++ b/lib/codecs/src/decoding/format/protobuf.rs @@ -3,8 +3,7 @@ use std::path::PathBuf; use bytes::Bytes; use chrono::Utc; use derivative::Derivative; -use ordered_float::NotNan; -use prost_reflect::{DynamicMessage, MessageDescriptor, ReflectMessage}; +use prost_reflect::{DynamicMessage, MessageDescriptor}; use smallvec::{smallvec, SmallVec}; use vector_config::configurable_component; use vector_core::event::LogEvent; @@ -13,9 +12,7 @@ use vector_core::{ event::Event, schema, }; -use vrl::value::{Kind, ObjectMap}; - -use crate::common::protobuf::get_message_descriptor; +use vrl::value::Kind; use super::Deserializer; @@ -98,7 +95,8 @@ impl Deserializer for ProtobufDeserializer { let dynamic_message = DynamicMessage::decode(self.message_descriptor.clone(), bytes) .map_err(|error| format!("Error parsing protobuf: {:?}", error))?; - let proto_vrl = to_vrl(&prost_reflect::Value::Message(dynamic_message), None)?; + let proto_vrl = + vrl::protobuf::proto_to_value(&prost_reflect::Value::Message(dynamic_message), None)?; let mut event = Event::Log(LogEvent::from(proto_vrl)); let event = match log_namespace { LogNamespace::Vector => event, @@ -121,101 +119,14 @@ impl Deserializer for ProtobufDeserializer { impl TryFrom<&ProtobufDeserializerConfig> for ProtobufDeserializer { type Error = vector_common::Error; fn try_from(config: &ProtobufDeserializerConfig) -> vector_common::Result { - let message_descriptor = - get_message_descriptor(&config.protobuf.desc_file, &config.protobuf.message_type)?; + let message_descriptor = vrl::protobuf::get_message_descriptor( + &config.protobuf.desc_file, + &config.protobuf.message_type, + )?; Ok(Self::new(message_descriptor)) } } -fn to_vrl( - prost_reflect_value: &prost_reflect::Value, - field_descriptor: Option<&prost_reflect::FieldDescriptor>, -) -> vector_common::Result { - let vrl_value = match prost_reflect_value { - prost_reflect::Value::Bool(v) => vrl::value::Value::from(*v), - prost_reflect::Value::I32(v) => vrl::value::Value::from(*v), - prost_reflect::Value::I64(v) => vrl::value::Value::from(*v), - prost_reflect::Value::U32(v) => vrl::value::Value::from(*v), - prost_reflect::Value::U64(v) => vrl::value::Value::from(*v), - prost_reflect::Value::F32(v) => vrl::value::Value::Float( - NotNan::new(f64::from(*v)).map_err(|_e| "Float number cannot be Nan")?, - ), - prost_reflect::Value::F64(v) => { - vrl::value::Value::Float(NotNan::new(*v).map_err(|_e| "F64 number cannot be Nan")?) - } - prost_reflect::Value::String(v) => vrl::value::Value::from(v.as_str()), - prost_reflect::Value::Bytes(v) => vrl::value::Value::from(v.clone()), - prost_reflect::Value::EnumNumber(v) => { - if let Some(field_descriptor) = field_descriptor { - let kind = field_descriptor.kind(); - let enum_desc = kind.as_enum().ok_or_else(|| { - format!( - "Internal error while parsing protobuf enum. Field descriptor: {:?}", - field_descriptor - ) - })?; - vrl::value::Value::from( - enum_desc - .get_value(*v) - .ok_or_else(|| { - format!("The number {} cannot be in '{}'", v, enum_desc.name()) - })? - .name(), - ) - } else { - Err("Expected valid field descriptor")? - } - } - prost_reflect::Value::Message(v) => { - let mut obj_map = ObjectMap::new(); - for field_desc in v.descriptor().fields() { - let field_value = v.get_field(&field_desc); - let out = to_vrl(field_value.as_ref(), Some(&field_desc))?; - obj_map.insert(field_desc.name().into(), out); - } - vrl::value::Value::from(obj_map) - } - prost_reflect::Value::List(v) => { - let vec = v - .iter() - .map(|o| to_vrl(o, field_descriptor)) - .collect::, vector_common::Error>>()?; - vrl::value::Value::from(vec) - } - prost_reflect::Value::Map(v) => { - if let Some(field_descriptor) = field_descriptor { - let kind = field_descriptor.kind(); - let message_desc = kind.as_message().ok_or_else(|| { - format!( - "Internal error while parsing protobuf field descriptor: {:?}", - field_descriptor - ) - })?; - vrl::value::Value::from( - v.iter() - .map(|kv| { - Ok(( - kv.0.as_str() - .ok_or_else(|| { - format!( - "Internal error while parsing protobuf map. Field descriptor: {:?}", - field_descriptor - ) - })? - .into(), - to_vrl(kv.1, Some(&message_desc.map_entry_value_field()))?, - )) - }) - .collect::>()?, - ) - } else { - Err("Expected valid field descriptor")? - } - } - }; - Ok(vrl_value) -} - #[cfg(test)] mod tests { // TODO: add test for bad file path & invalid message_type @@ -237,7 +148,8 @@ mod tests { validate_log: fn(&LogEvent), ) { let input = Bytes::from(protobuf_bin_message); - let message_descriptor = get_message_descriptor(&protobuf_desc_path, message_type).unwrap(); + let message_descriptor = + vrl::protobuf::get_message_descriptor(&protobuf_desc_path, message_type).unwrap(); let deserializer = ProtobufDeserializer::new(message_descriptor); for namespace in [LogNamespace::Legacy, LogNamespace::Vector] { @@ -315,7 +227,11 @@ mod tests { let protobuf_desc_path = test_data_dir().join("protos/test_protobuf.desc"); let message_type = "test_protobuf.Person"; let validate_log = |log: &LogEvent| { - assert_eq!(log["name"], "".into()); + // No field will be set. + assert!(!log.contains("name")); + assert!(!log.contains("id")); + assert!(!log.contains("email")); + assert!(!log.contains("phones")); }; parse_and_validate( @@ -329,7 +245,7 @@ mod tests { #[test] fn deserialize_error_invalid_protobuf() { let input = Bytes::from("{ foo"); - let message_descriptor = get_message_descriptor( + let message_descriptor = vrl::protobuf::get_message_descriptor( &test_data_dir().join("protos/test_protobuf.desc"), "test_protobuf.Person", ) diff --git a/lib/codecs/src/encoding/format/protobuf.rs b/lib/codecs/src/encoding/format/protobuf.rs index ce2a04e684bb9..1313124d0bcb7 100644 --- a/lib/codecs/src/encoding/format/protobuf.rs +++ b/lib/codecs/src/encoding/format/protobuf.rs @@ -1,10 +1,7 @@ -use crate::common::protobuf::get_message_descriptor; use crate::encoding::BuildError; use bytes::BytesMut; -use chrono::Timelike; use prost::Message; -use prost_reflect::{DynamicMessage, FieldDescriptor, Kind, MapKey, MessageDescriptor}; -use std::collections::HashMap; +use prost_reflect::MessageDescriptor; use std::path::PathBuf; use tokio_util::codec::Encoder; use vector_core::{ @@ -24,8 +21,10 @@ pub struct ProtobufSerializerConfig { impl ProtobufSerializerConfig { /// Build the `ProtobufSerializer` from this configuration. pub fn build(&self) -> Result { - let message_descriptor = - get_message_descriptor(&self.protobuf.desc_file, &self.protobuf.message_type)?; + let message_descriptor = vrl::protobuf::get_message_descriptor( + &self.protobuf.desc_file, + &self.protobuf.message_type, + )?; Ok(ProtobufSerializer { message_descriptor }) } @@ -64,133 +63,6 @@ pub struct ProtobufSerializer { message_descriptor: MessageDescriptor, } -/// Convert a single raw vector `Value` into a protobuf `Value`. -/// -/// Unlike `convert_value`, this ignores any field metadata such as cardinality. -fn convert_value_raw( - value: Value, - kind: &prost_reflect::Kind, -) -> Result { - let kind_str = value.kind_str().to_owned(); - match (value, kind) { - (Value::Boolean(b), Kind::Bool) => Ok(prost_reflect::Value::Bool(b)), - (Value::Bytes(b), Kind::Bytes) => Ok(prost_reflect::Value::Bytes(b)), - (Value::Bytes(b), Kind::String) => Ok(prost_reflect::Value::String( - String::from_utf8_lossy(&b).into_owned(), - )), - (Value::Bytes(b), Kind::Enum(descriptor)) => { - let string = String::from_utf8_lossy(&b).into_owned(); - if let Some(d) = descriptor - .values() - .find(|v| v.name().eq_ignore_ascii_case(&string)) - { - Ok(prost_reflect::Value::EnumNumber(d.number())) - } else { - Err(format!( - "Enum `{}` has no value that matches string '{}'", - descriptor.full_name(), - string - ) - .into()) - } - } - (Value::Float(f), Kind::Double) => Ok(prost_reflect::Value::F64(f.into_inner())), - (Value::Float(f), Kind::Float) => Ok(prost_reflect::Value::F32(f.into_inner() as f32)), - (Value::Integer(i), Kind::Int32) => Ok(prost_reflect::Value::I32(i as i32)), - (Value::Integer(i), Kind::Int64) => Ok(prost_reflect::Value::I64(i)), - (Value::Integer(i), Kind::Sint32) => Ok(prost_reflect::Value::I32(i as i32)), - (Value::Integer(i), Kind::Sint64) => Ok(prost_reflect::Value::I64(i)), - (Value::Integer(i), Kind::Sfixed32) => Ok(prost_reflect::Value::I32(i as i32)), - (Value::Integer(i), Kind::Sfixed64) => Ok(prost_reflect::Value::I64(i)), - (Value::Integer(i), Kind::Uint32) => Ok(prost_reflect::Value::U32(i as u32)), - (Value::Integer(i), Kind::Uint64) => Ok(prost_reflect::Value::U64(i as u64)), - (Value::Integer(i), Kind::Fixed32) => Ok(prost_reflect::Value::U32(i as u32)), - (Value::Integer(i), Kind::Fixed64) => Ok(prost_reflect::Value::U64(i as u64)), - (Value::Integer(i), Kind::Enum(_)) => Ok(prost_reflect::Value::EnumNumber(i as i32)), - (Value::Object(o), Kind::Message(message_descriptor)) => { - if message_descriptor.is_map_entry() { - let value_field = message_descriptor - .get_field_by_name("value") - .ok_or("Internal error with proto map processing")?; - let mut map: HashMap = HashMap::new(); - for (key, val) in o.into_iter() { - match convert_value(&value_field, val) { - Ok(prost_val) => { - map.insert(MapKey::String(key.into()), prost_val); - } - Err(e) => return Err(e), - } - } - Ok(prost_reflect::Value::Map(map)) - } else { - // if it's not a map, it's an actual message - Ok(prost_reflect::Value::Message(encode_message( - message_descriptor, - Value::Object(o), - )?)) - } - } - (Value::Regex(r), Kind::String) => Ok(prost_reflect::Value::String(r.as_str().to_owned())), - (Value::Regex(r), Kind::Bytes) => Ok(prost_reflect::Value::Bytes(r.as_bytes())), - (Value::Timestamp(t), Kind::Int64) => Ok(prost_reflect::Value::I64(t.timestamp_micros())), - (Value::Timestamp(t), Kind::Message(descriptor)) - if descriptor.full_name() == "google.protobuf.Timestamp" => - { - let mut message = DynamicMessage::new(descriptor.clone()); - message.try_set_field_by_name("seconds", prost_reflect::Value::I64(t.timestamp()))?; - message - .try_set_field_by_name("nanos", prost_reflect::Value::I32(t.nanosecond() as i32))?; - Ok(prost_reflect::Value::Message(message)) - } - _ => Err(format!("Cannot encode vector `{kind_str}` into protobuf `{kind:?}`",).into()), - } -} - -/// Convert a vector `Value` into a protobuf `Value`. -fn convert_value( - field_descriptor: &FieldDescriptor, - value: Value, -) -> Result { - if let Value::Array(a) = value { - if field_descriptor.cardinality() == prost_reflect::Cardinality::Repeated { - let repeated: Result, vector_common::Error> = a - .into_iter() - .map(|v| convert_value_raw(v, &field_descriptor.kind())) - .collect(); - Ok(prost_reflect::Value::List(repeated?)) - } else { - Err("Cannot encode vector array into a non-repeated protobuf field".into()) - } - } else { - convert_value_raw(value, &field_descriptor.kind()) - } -} - -/// Convert a vector object (`Value`) into a protobuf message. -/// -/// This function can only operate on `Value::Object`s, -/// since they are the only field-based vector Value -/// and protobuf messages are defined as a collection of fields and values. -fn encode_message( - message_descriptor: &MessageDescriptor, - value: Value, -) -> Result { - let mut message = DynamicMessage::new(message_descriptor.clone()); - if let Value::Object(map) = value { - for field in message_descriptor.fields() { - match map.get(field.name()) { - None | Some(Value::Null) => message.clear_field(&field), - Some(value) => { - message.try_set_field(&field, convert_value(&field, value.clone())?)? - } - } - } - Ok(message) - } else { - Err("ProtobufSerializer only supports serializing objects".into()) - } -} - impl ProtobufSerializer { /// Creates a new `ProtobufSerializer`. pub fn new(message_descriptor: MessageDescriptor) -> Self { @@ -208,9 +80,11 @@ impl Encoder for ProtobufSerializer { fn encode(&mut self, event: Event, buffer: &mut BytesMut) -> Result<(), Self::Error> { let message = match event { - Event::Log(log) => encode_message(&self.message_descriptor, log.into_parts().0), + Event::Log(log) => { + vrl::protobuf::encode_message(&self.message_descriptor, log.into_parts().0) + } Event::Metric(_) => unimplemented!(), - Event::Trace(trace) => encode_message( + Event::Trace(trace) => vrl::protobuf::encode_message( &self.message_descriptor, Value::Object(trace.into_parts().0), ), @@ -218,263 +92,3 @@ impl Encoder for ProtobufSerializer { message.encode(buffer).map_err(Into::into) } } - -#[cfg(test)] -mod tests { - use super::*; - use bytes::Bytes; - use chrono::DateTime; - use ordered_float::NotNan; - use prost_reflect::MapKey; - use std::collections::{BTreeMap, HashMap}; - - macro_rules! mfield { - ($m:expr, $f:expr) => { - $m.get_field_by_name($f).unwrap().into_owned() - }; - } - - fn test_data_dir() -> PathBuf { - PathBuf::from(std::env::var_os("CARGO_MANIFEST_DIR").unwrap()).join("tests/data/protobuf") - } - - fn test_message_descriptor(message_type: &str) -> MessageDescriptor { - let path = PathBuf::from(std::env::var_os("CARGO_MANIFEST_DIR").unwrap()) - .join("tests/data/protobuf/protos/test.desc"); - get_message_descriptor(&path, &format!("test.{message_type}")).unwrap() - } - - #[test] - fn test_config_input_type() { - let config = ProtobufSerializerConfig { - protobuf: ProtobufSerializerOptions { - desc_file: test_data_dir().join("test_protobuf.desc"), - message_type: "test_protobuf.Person".into(), - }, - }; - assert_eq!(config.input_type(), DataType::Log); - } - - #[test] - fn test_encode_integers() { - let message = encode_message( - &test_message_descriptor("Integers"), - Value::Object(BTreeMap::from([ - ("i32".into(), Value::Integer(-1234)), - ("i64".into(), Value::Integer(-9876)), - ("u32".into(), Value::Integer(1234)), - ("u64".into(), Value::Integer(9876)), - ])), - ) - .unwrap(); - assert_eq!(Some(-1234), mfield!(message, "i32").as_i32()); - assert_eq!(Some(-9876), mfield!(message, "i64").as_i64()); - assert_eq!(Some(1234), mfield!(message, "u32").as_u32()); - assert_eq!(Some(9876), mfield!(message, "u64").as_u64()); - } - - #[test] - fn test_encode_floats() { - let message = encode_message( - &test_message_descriptor("Floats"), - Value::Object(BTreeMap::from([ - ("d".into(), Value::Float(NotNan::new(11.0).unwrap())), - ("f".into(), Value::Float(NotNan::new(2.0).unwrap())), - ])), - ) - .unwrap(); - assert_eq!(Some(11.0), mfield!(message, "d").as_f64()); - assert_eq!(Some(2.0), mfield!(message, "f").as_f32()); - } - - #[test] - fn test_encode_bytes() { - let bytes = Bytes::from(vec![0, 1, 2, 3]); - let message = encode_message( - &test_message_descriptor("Bytes"), - Value::Object(BTreeMap::from([ - ("text".into(), Value::Bytes(Bytes::from("vector"))), - ("binary".into(), Value::Bytes(bytes.clone())), - ])), - ) - .unwrap(); - assert_eq!(Some("vector"), mfield!(message, "text").as_str()); - assert_eq!(Some(&bytes), mfield!(message, "binary").as_bytes()); - } - - #[test] - fn test_encode_map() { - let message = encode_message( - &test_message_descriptor("Map"), - Value::Object(BTreeMap::from([ - ( - "names".into(), - Value::Object(BTreeMap::from([ - ("forty-four".into(), Value::Integer(44)), - ("one".into(), Value::Integer(1)), - ])), - ), - ( - "people".into(), - Value::Object(BTreeMap::from([( - "mark".into(), - Value::Object(BTreeMap::from([ - ("nickname".into(), Value::Bytes(Bytes::from("jeff"))), - ("age".into(), Value::Integer(22)), - ])), - )])), - ), - ])), - ) - .unwrap(); - // the simpler string->primitive map - assert_eq!( - Some(&HashMap::from([ - ( - MapKey::String("forty-four".into()), - prost_reflect::Value::I32(44), - ), - (MapKey::String("one".into()), prost_reflect::Value::I32(1),), - ])), - mfield!(message, "names").as_map() - ); - // the not-simpler string->message map - let people = mfield!(message, "people").as_map().unwrap().to_owned(); - assert_eq!(1, people.len()); - assert_eq!( - Some("jeff"), - mfield!( - people[&MapKey::String("mark".into())].as_message().unwrap(), - "nickname" - ) - .as_str() - ); - assert_eq!( - Some(22), - mfield!( - people[&MapKey::String("mark".into())].as_message().unwrap(), - "age" - ) - .as_u32() - ); - } - - #[test] - fn test_encode_enum() { - let message = encode_message( - &test_message_descriptor("Enum"), - Value::Object(BTreeMap::from([ - ("breakfast".into(), Value::Bytes(Bytes::from("tomato"))), - ("dinner".into(), Value::Bytes(Bytes::from("OLIVE"))), - ("lunch".into(), Value::Integer(0)), - ])), - ) - .unwrap(); - assert_eq!(Some(2), mfield!(message, "breakfast").as_enum_number()); - assert_eq!(Some(0), mfield!(message, "lunch").as_enum_number()); - assert_eq!(Some(1), mfield!(message, "dinner").as_enum_number()); - } - - #[test] - fn test_encode_timestamp() { - let message = encode_message( - &test_message_descriptor("Timestamp"), - Value::Object(BTreeMap::from([( - "morning".into(), - Value::Timestamp(DateTime::from_timestamp(8675, 309).unwrap()), - )])), - ) - .unwrap(); - let timestamp = mfield!(message, "morning").as_message().unwrap().clone(); - assert_eq!(Some(8675), mfield!(timestamp, "seconds").as_i64()); - assert_eq!(Some(309), mfield!(timestamp, "nanos").as_i32()); - } - - #[test] - fn test_encode_repeated_primitive() { - let message = encode_message( - &test_message_descriptor("RepeatedPrimitive"), - Value::Object(BTreeMap::from([( - "numbers".into(), - Value::Array(vec![ - Value::Integer(8), - Value::Integer(6), - Value::Integer(4), - ]), - )])), - ) - .unwrap(); - let list = mfield!(message, "numbers").as_list().unwrap().to_vec(); - assert_eq!(3, list.len()); - assert_eq!(Some(8), list[0].as_i64()); - assert_eq!(Some(6), list[1].as_i64()); - assert_eq!(Some(4), list[2].as_i64()); - } - - #[test] - fn test_encode_repeated_message() { - let message = encode_message( - &test_message_descriptor("RepeatedMessage"), - Value::Object(BTreeMap::from([( - "messages".into(), - Value::Array(vec![ - Value::Object(BTreeMap::from([( - "text".into(), - Value::Bytes(Bytes::from("vector")), - )])), - Value::Object(BTreeMap::from([("index".into(), Value::Integer(4444))])), - Value::Object(BTreeMap::from([ - ("text".into(), Value::Bytes(Bytes::from("protobuf"))), - ("index".into(), Value::Integer(1)), - ])), - ]), - )])), - ) - .unwrap(); - let list = mfield!(message, "messages").as_list().unwrap().to_vec(); - assert_eq!(3, list.len()); - assert_eq!( - Some("vector"), - mfield!(list[0].as_message().unwrap(), "text").as_str() - ); - assert!(!list[0].as_message().unwrap().has_field_by_name("index")); - assert!(!list[1].as_message().unwrap().has_field_by_name("t4ext")); - assert_eq!( - Some(4444), - mfield!(list[1].as_message().unwrap(), "index").as_u32() - ); - assert_eq!( - Some("protobuf"), - mfield!(list[2].as_message().unwrap(), "text").as_str() - ); - assert_eq!( - Some(1), - mfield!(list[2].as_message().unwrap(), "index").as_u32() - ); - } - - fn run_encoding_on_decoding_test_data( - filename: &str, - message_type: &str, - ) -> Result { - let protos_dir = PathBuf::from(std::env::var_os("CARGO_MANIFEST_DIR").unwrap()) - .join("tests/data/protobuf/protos"); - let descriptor_set_path = protos_dir.join(filename); - let message_descriptor = - get_message_descriptor(&descriptor_set_path, message_type).unwrap(); - encode_message( - &message_descriptor, - Value::Object(BTreeMap::from([ - ("name".into(), Value::Bytes(Bytes::from("rope"))), - ("id".into(), Value::Integer(9271)), - ])), - ) - } - - #[test] - fn test_encode_decoding_protobuf_test_data() { - // just check for the side-effect of success - run_encoding_on_decoding_test_data("test_protobuf.desc", "test_protobuf.Person").unwrap(); - run_encoding_on_decoding_test_data("test_protobuf3.desc", "test_protobuf3.Person").unwrap(); - } -} diff --git a/lib/codecs/tests/data/protobuf/protos/test.desc b/lib/codecs/tests/data/protobuf/protos/test.desc deleted file mode 100644 index f12bfa7d889b8237c0cbb34d81ce79b716907f2a..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1258 zcmah|&2G~`5boNslT1nzmIB2GKo+2aR98x)sDe`>r9Wy*tyKhwOXb8HW5ropwo{r1 zK;kWsIC9{^Gw}}GV0UdNR#b`G%s1c6&Nn*)e-S+K!q5xoeiVmkIGRuPQ-4O2)R{$= zgm5z>WF9A&Z=gObICuxD2_1*bO?J>zw5yWB#oM4UhlR5gUDM=M*mt9OhYJ?3H?4L^Ro-vhXuo^&5@;AoN{PkL~^s1OQ}cW1h+u-586__MuaJmZb#}?InwP2 zWX?G%pTr#^Oo?z#g(DS?bUWrV(2s)9NfW#Y$d&aFL9Xy7vX(;7#5@44zu-+`47}Hy zRPyRX+V}zJBcD0(Lh?6B?wQ{dP%oSazY<@ak=2;HkBI&$V4v}#NjZ!1Gq`oMpHrlPS0MTiwnE4AqWx(b$Y(ae#Q)fDH zl5DnHMWP5>NAcWG$%=ycBf*@FKS@ouakDVq1>N--qp@6q+lYDGdQ_O#5=YUmfqo@P#hw2HW7cM3 diff --git a/lib/codecs/tests/data/protobuf/protos/test.proto b/lib/codecs/tests/data/protobuf/protos/test.proto deleted file mode 100644 index 8e3275b7e5394..0000000000000 --- a/lib/codecs/tests/data/protobuf/protos/test.proto +++ /dev/null @@ -1,61 +0,0 @@ -// Remember to recompile `test.desc` when you update this file: -// protoc -I . -o test.desc test.proto google/protobuf/timestamp.proto - -syntax = "proto3"; - -package test; - -import "google/protobuf/timestamp.proto"; - -message Integers { - int32 i32 = 1; - int64 i64 = 2; - uint32 u32 = 3; - uint64 u64 = 4; -} - -message Floats { - double d = 1; - float f = 2; -} - -message Bytes { - string text = 1; - bytes binary = 2; -} - -message Map { - message Person { - string nickname = 1; - uint32 age = 2; - }; - map names = 1; - map people = 2; -} - -message Enum { - enum Fruit { - APPLE = 0; - OLIVE = 1; - TOMATO = 2; - } - Fruit breakfast = 1; - Fruit lunch = 2; - Fruit dinner = 3; -} - -message Timestamp { - google.protobuf.Timestamp morning = 1; -} - -message RepeatedPrimitive { - repeated int64 numbers = 1; -} - -message RepeatedMessage { - message EmbeddedMessage { - optional string text = 1; - optional uint32 index = 2; - } - repeated EmbeddedMessage messages = 1; -} From 7b85728c474abc2ff691624ac253ff1777d450b7 Mon Sep 17 00:00:00 2001 From: neuronull Date: Thu, 4 Apr 2024 11:11:13 -0600 Subject: [PATCH 0231/1491] chore(splunk_hec_logs sink): support log namespaced host and timestamp attributes (#20211) - Currently the splunk hec logs sink has default paths for the timestamp_key and host_key configurable settings which statically point to the global log namespace location for these keys. - In order to support log namespacing for these settings, the logic for determining the default case is moved from the configuration serialization down into the sink's encoding of the event. That is necessary because we must determine at runtime if each event received is namespaced or not. - Note that this necessitated a small change to the Humio sinks as well simply due to those sinks being wrappers over Splunk HEC, to preserve existing behavior. --- src/sinks/humio/logs.rs | 11 +- src/sinks/humio/metrics.rs | 3 +- src/sinks/splunk_hec/common/util.rs | 12 -- src/sinks/splunk_hec/logs/config.rs | 33 ++-- .../splunk_hec/logs/integration_tests.rs | 14 +- src/sinks/splunk_hec/logs/sink.rs | 75 ++++++-- src/sinks/splunk_hec/logs/tests.rs | 166 +++++++++++++----- src/sources/splunk_hec/mod.rs | 7 +- .../components/sinks/base/humio_logs.cue | 7 +- .../components/sinks/base/humio_metrics.cue | 3 +- .../components/sinks/base/splunk_hec_logs.cue | 13 +- 11 files changed, 231 insertions(+), 113 deletions(-) diff --git a/src/sinks/humio/logs.rs b/src/sinks/humio/logs.rs index dd1f2eda4c7e7..d81adb6a885de 100644 --- a/src/sinks/humio/logs.rs +++ b/src/sinks/humio/logs.rs @@ -71,7 +71,8 @@ pub struct HumioLogsConfig { /// Overrides the name of the log field used to retrieve the hostname to send to Humio. /// - /// By default, the [global `log_schema.host_key` option][global_host_key] is used. + /// By default, the [global `log_schema.host_key` option][global_host_key] is used if log + /// events are Legacy namespaced, or the semantic meaning of "host" is used, if defined. /// /// [global_host_key]: https://vector.dev/docs/reference/configuration/global-options/#log_schema.host_key #[serde(default = "config_host_key_target_path")] @@ -128,8 +129,10 @@ pub struct HumioLogsConfig { pub acknowledgements: AcknowledgementsConfig, /// Overrides the name of the log field used to retrieve the timestamp to send to Humio. + /// When set to `“”`, a timestamp is not set in the events sent to Humio. /// - /// By default, the [global `log_schema.timestamp_key` option][global_timestamp_key] is used. + /// By default, either the [global `log_schema.timestamp_key` option][global_timestamp_key] is used + /// if log events are Legacy namespaced, or the semantic meaning of "timestamp" is used, if defined. /// /// [global_timestamp_key]: https://vector.dev/docs/reference/configuration/global-options/#log_schema.timestamp_key #[serde(default = "config_timestamp_key_target_path")] @@ -188,7 +191,7 @@ impl HumioLogsConfig { HecLogsSinkConfig { default_token: self.token.clone(), endpoint: self.endpoint.clone(), - host_key: self.host_key.clone(), + host_key: Some(self.host_key.clone()), indexed_fields: self.indexed_fields.clone(), index: self.index.clone(), sourcetype: self.event_type.clone(), @@ -203,7 +206,7 @@ impl HumioLogsConfig { indexer_acknowledgements_enabled: false, ..Default::default() }, - timestamp_key: config_timestamp_key_target_path(), + timestamp_key: Some(config_timestamp_key_target_path()), endpoint_target: EndpointTarget::Event, auto_extract_timestamp: None, } diff --git a/src/sinks/humio/metrics.rs b/src/sinks/humio/metrics.rs index e9d6fc51249ad..b25527f573802 100644 --- a/src/sinks/humio/metrics.rs +++ b/src/sinks/humio/metrics.rs @@ -84,7 +84,8 @@ pub struct HumioMetricsConfig { /// Overrides the name of the log field used to retrieve the hostname to send to Humio. /// - /// By default, the [global `log_schema.host_key` option][global_host_key] is used. + /// By default, the [global `log_schema.host_key` option][global_host_key] is used if log + /// events are Legacy namespaced, or the semantic meaning of "host" is used, if defined. /// /// [global_host_key]: https://vector.dev/docs/reference/configuration/global-options/#log_schema.host_key #[serde(default = "config_host_key")] diff --git a/src/sinks/splunk_hec/common/util.rs b/src/sinks/splunk_hec/common/util.rs index a48bd2c83252a..e362f377f7532 100644 --- a/src/sinks/splunk_hec/common/util.rs +++ b/src/sinks/splunk_hec/common/util.rs @@ -135,12 +135,6 @@ pub fn build_uri( uri.parse::() } -pub fn config_host_key_target_path() -> OptionalTargetPath { - OptionalTargetPath { - path: crate::config::log_schema().host_key_target_path().cloned(), - } -} - pub fn config_host_key() -> OptionalValuePath { OptionalValuePath { path: crate::config::log_schema().host_key().cloned(), @@ -155,12 +149,6 @@ pub fn config_timestamp_key_target_path() -> OptionalTargetPath { } } -pub fn config_timestamp_key() -> OptionalValuePath { - OptionalValuePath { - path: crate::config::log_schema().timestamp_key().cloned(), - } -} - pub fn render_template_string<'a>( template: &Template, event: impl Into>, diff --git a/src/sinks/splunk_hec/logs/config.rs b/src/sinks/splunk_hec/logs/config.rs index 67921b3eb790f..844d073245bad 100644 --- a/src/sinks/splunk_hec/logs/config.rs +++ b/src/sinks/splunk_hec/logs/config.rs @@ -12,8 +12,7 @@ use crate::{ prelude::*, splunk_hec::common::{ acknowledgements::HecClientAcknowledgementsConfig, - build_healthcheck, build_http_batch_service, config_host_key_target_path, - config_timestamp_key_target_path, create_client, + build_healthcheck, build_http_batch_service, create_client, service::{HecService, HttpRequestBuilder}, EndpointTarget, SplunkHecDefaultBatchSettings, }, @@ -53,12 +52,15 @@ pub struct HecLogsSinkConfig { /// Overrides the name of the log field used to retrieve the hostname to send to Splunk HEC. /// - /// By default, the [global `log_schema.host_key` option][global_host_key] is used. + /// By default, the [global `log_schema.host_key` option][global_host_key] is used if log + /// events are Legacy namespaced, or the semantic meaning of "host" is used, if defined. /// /// [global_host_key]: https://vector.dev/docs/reference/configuration/global-options/#log_schema.host_key + // NOTE: The `OptionalTargetPath` is wrapped in an `Option` in order to distinguish between a true + // `None` type and an empty string. This is necessary because `OptionalTargetPath` deserializes an + // empty string to a `None` path internally. #[configurable(metadata(docs::advanced))] - #[serde(default = "config_host_key_target_path")] - pub host_key: OptionalTargetPath, + pub host_key: Option, /// Fields to be [added to Splunk index][splunk_field_index_docs]. /// @@ -124,13 +126,16 @@ pub struct HecLogsSinkConfig { /// Overrides the name of the log field used to retrieve the timestamp to send to Splunk HEC. /// When set to `“”`, a timestamp is not set in the events sent to Splunk HEC. /// - /// By default, the [global `log_schema.timestamp_key` option][global_timestamp_key] is used. + /// By default, either the [global `log_schema.timestamp_key` option][global_timestamp_key] is used + /// if log events are Legacy namespaced, or the semantic meaning of "timestamp" is used, if defined. /// /// [global_timestamp_key]: https://vector.dev/docs/reference/configuration/global-options/#log_schema.timestamp_key #[configurable(metadata(docs::advanced))] - #[serde(default = "crate::sinks::splunk_hec::common::config_timestamp_key_target_path")] #[configurable(metadata(docs::examples = "timestamp", docs::examples = ""))] - pub timestamp_key: OptionalTargetPath, + // NOTE: The `OptionalTargetPath` is wrapped in an `Option` in order to distinguish between a true + // `None` type and an empty string. This is necessary because `OptionalTargetPath` deserializes an + // empty string to a `None` path internally. + pub timestamp_key: Option, /// Passes the `auto_extract_timestamp` option to Splunk. /// @@ -158,7 +163,7 @@ impl GenerateConfig for HecLogsSinkConfig { toml::Value::try_from(Self { default_token: "${VECTOR_SPLUNK_HEC_TOKEN}".to_owned().into(), endpoint: "endpoint".to_owned(), - host_key: config_host_key_target_path(), + host_key: None, indexed_fields: vec![], index: None, sourcetype: None, @@ -170,7 +175,7 @@ impl GenerateConfig for HecLogsSinkConfig { tls: None, acknowledgements: Default::default(), timestamp_nanos_key: None, - timestamp_key: config_timestamp_key_target_path(), + timestamp_key: None, auto_extract_timestamp: None, endpoint_target: EndpointTarget::Event, }) @@ -270,9 +275,9 @@ impl HecLogsSinkConfig { .iter() .map(|config_path| config_path.0.clone()) .collect(), - host_key: self.host_key.path.clone(), + host_key: self.host_key.clone(), timestamp_nanos_key: self.timestamp_nanos_key.clone(), - timestamp_key: self.timestamp_key.path.clone(), + timestamp_key: self.timestamp_key.clone(), endpoint_target: self.endpoint_target, auto_extract_timestamp: self.auto_extract_timestamp.unwrap_or_default(), }; @@ -305,7 +310,7 @@ mod tests { let config = Self { endpoint: endpoint.clone(), default_token: "i_am_an_island".to_string().into(), - host_key: config_host_key_target_path(), + host_key: None, indexed_fields: vec![], index: None, sourcetype: None, @@ -327,7 +332,7 @@ mod tests { ..Default::default() }, timestamp_nanos_key: None, - timestamp_key: config_timestamp_key_target_path(), + timestamp_key: None, auto_extract_timestamp: None, endpoint_target: EndpointTarget::Raw, }; diff --git a/src/sinks/splunk_hec/logs/integration_tests.rs b/src/sinks/splunk_hec/logs/integration_tests.rs index 63523683bcb7c..6f07435097143 100644 --- a/src/sinks/splunk_hec/logs/integration_tests.rs +++ b/src/sinks/splunk_hec/logs/integration_tests.rs @@ -118,7 +118,7 @@ async fn config( HecLogsSinkConfig { default_token: get_token().await.into(), endpoint: splunk_hec_address(), - host_key: OptionalTargetPath::event("host"), + host_key: Some(OptionalTargetPath::event("host")), indexed_fields, index: None, sourcetype: None, @@ -130,7 +130,7 @@ async fn config( tls: None, acknowledgements: Default::default(), timestamp_nanos_key: None, - timestamp_key: Default::default(), + timestamp_key: None, auto_extract_timestamp: None, endpoint_target: EndpointTarget::Event, } @@ -409,7 +409,7 @@ async fn splunk_configure_hostname() { let cx = SinkContext::default(); let config = HecLogsSinkConfig { - host_key: OptionalTargetPath::event("roast"), + host_key: Some(OptionalTargetPath::event("roast")), ..config(JsonSerializerConfig::default().into(), vec!["asdf".into()]).await }; @@ -488,11 +488,11 @@ async fn splunk_auto_extracted_timestamp() { let config = HecLogsSinkConfig { auto_extract_timestamp: Some(true), - timestamp_key: OptionalTargetPath { + timestamp_key: Some(OptionalTargetPath { path: Some(OwnedTargetPath::event(lookup::owned_value_path!( "timestamp" ))), - }, + }), ..config(JsonSerializerConfig::default().into(), vec![]).await }; @@ -551,11 +551,11 @@ async fn splunk_non_auto_extracted_timestamp() { let config = HecLogsSinkConfig { auto_extract_timestamp: Some(false), - timestamp_key: OptionalTargetPath { + timestamp_key: Some(OptionalTargetPath { path: Some(OwnedTargetPath::event(lookup::owned_value_path!( "timestamp" ))), - }, + }), ..config(JsonSerializerConfig::default().into(), vec![]).await }; diff --git a/src/sinks/splunk_hec/logs/sink.rs b/src/sinks/splunk_hec/logs/sink.rs index b2eaa4eec8e25..56c5a538765fe 100644 --- a/src/sinks/splunk_hec/logs/sink.rs +++ b/src/sinks/splunk_hec/logs/sink.rs @@ -13,8 +13,16 @@ use crate::{ util::processed_event::ProcessedEvent, }, }; -use vector_lib::lookup::{event_path, OwnedTargetPath, OwnedValuePath, PathPrefix}; +use vector_lib::{ + config::{log_schema, LogNamespace}, + lookup::{event_path, lookup_v2::OptionalTargetPath, OwnedValuePath, PathPrefix}, + schema::meaning, +}; +use vrl::path::OwnedTargetPath; +// NOTE: The `OptionalTargetPath`s are wrapped in an `Option` in order to distinguish between a true +// `None` type and an empty string. This is necessary because `OptionalTargetPath` deserializes an +// empty string to a `None` path internally. pub struct HecLogsSink { pub context: SinkContext, pub service: S, @@ -24,9 +32,9 @@ pub struct HecLogsSink { pub source: Option